hexsha stringlengths 40 40 | size int64 2 1.02M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 245 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 245 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 245 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 1.02M | avg_line_length float64 1 958k | max_line_length int64 1 987k | alphanum_fraction float64 0 1 | content_no_comment stringlengths 0 1.01M | is_comment_constant_removed bool 2
classes | is_sharp_comment_removed bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f72fbad2adffd95d82e895b59a2b7ca1499c137d | 193 | py | Python | python/interpret-core/interpret/greybox/__init__.py | prateekiiest/interpret | b5530a587251a77516ab443037fc37f71708564c | [
"MIT"
] | 2,674 | 2019-10-03T14:14:35.000Z | 2022-03-31T13:40:49.000Z | python/interpret-core/interpret/greybox/__init__.py | prateekiiest/interpret | b5530a587251a77516ab443037fc37f71708564c | [
"MIT"
] | 257 | 2019-11-08T19:22:56.000Z | 2022-03-29T20:09:07.000Z | python/interpret-core/interpret/greybox/__init__.py | prateekiiest/interpret | b5530a587251a77516ab443037fc37f71708564c | [
"MIT"
] | 367 | 2019-10-31T15:33:21.000Z | 2022-03-31T13:40:50.000Z | # Copyright (c) 2019 Microsoft Corporation
# Distributed under the MIT software license
from .treeinterpreter import TreeInterpreter # noqa: F401
from .shaptree import ShapTree # noqa: F401
| 32.166667 | 58 | 0.792746 |
from .treeinterpreter import TreeInterpreter
from .shaptree import ShapTree
| true | true |
f72fbadd2f5f8ca5d3e6c9fc5dc5627d098e888f | 5,496 | py | Python | source/generate_symbolic_derivatives.py | daccordeon/CEonlyPony | 7af50792a3a28101391397fce1e2b5e01d919701 | [
"BSD-3-Clause"
] | null | null | null | source/generate_symbolic_derivatives.py | daccordeon/CEonlyPony | 7af50792a3a28101391397fce1e2b5e01d919701 | [
"BSD-3-Clause"
] | null | null | null | source/generate_symbolic_derivatives.py | daccordeon/CEonlyPony | 7af50792a3a28101391397fce1e2b5e01d919701 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python3
"""Generate symbolic derivatives as lambdified functions for gwbench.
When run as a script: generate all symbolic derivatives for tf2_tidal at all standard locations ahead of benchmarking.
Slurm gets upset when multiple tasks try to create the derivatives if there aren't any there already, so run in series.
Usage:
$ python3 generate_symbolic_derivatives.py
License:
BSD 3-Clause License
Copyright (c) 2022, James Gardner.
All rights reserved except for those for the gwbench code which remain reserved
by S. Borhanian; the gwbench code is included in this repository for convenience.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
from typing import List, Set, Dict, Tuple, Optional, Union
import os
from gwbench import wf_class as wfc
from gwbench import detector_response_derivatives as drd
def generate_symbolic_derivatives(
wf_model_name: str,
wf_other_var_dic: Optional[Dict[str, str]],
deriv_symbs_string: str,
locs: List[str],
use_rot: bool,
output_path: Optional[str] = None,
print_progress: bool = True,
) -> None:
"""Generate symbolic derivatives, from generate_lambdified_functions.py from gwbench.
Use network's wf_model_name, wf_other_var_dic, deriv_symbs_string, and use_rot.
Will print 'Done.' when finished unless all files already exist in which it will print as such.
Args:
wf_model_name: Waveform model name.
wf_other_var_dic: Waveform approximant.
deriv_symbs_string: Symbols to take derivatives wrt.
locs: Detector locations.
use_rot: Whether to account for Earth's rotation.
output_path: Output file path.
print_progress: Whether to print progress.
"""
# # how to print settings as a sanity check
# print('wf_model_name = \'{}\''.format(wf.wf_model_name))
# print('wf_other_var_dic = {}'.format(wf.wf_other_var_dic))
# print('deriv_symbs_string = \'{}\''.format(deriv_symbs_string))
# print('use_rot = %i'%use_rot)
# skip if derivatives already exist
file_names = [
"par_deriv_WFM_"
+ wf_model_name
+ "_VAR_"
+ deriv_symbs_string.replace(" ", "_")
+ "_DET_"
+ key
+ ".dat"
for key in locs
]
file_names.append(
"par_deriv_WFM_"
+ wf_model_name
+ "_VAR_"
+ deriv_symbs_string.replace(" ra", "")
.replace(" dec", "")
.replace(" psi", "")
.replace(" ", "_")
+ "_DET_"
+ "pl_cr"
+ ".dat"
)
path = "lambdified_functions/"
file_names_existing = [
file_name for file_name in file_names if os.path.isfile(path + file_name)
]
if len(file_names_existing) < len(file_names):
# if a file doesn't exist, generate them all again
# TODO: make this more efficient and just generate the missing files, or, do it in parallel
# waveform
wf = wfc.Waveform(wf_model_name, wf_other_var_dic)
# lambidified detector reponses and derivatives
drd.generate_det_responses_derivs_sym(
wf,
deriv_symbs_string,
locs=locs,
use_rot=use_rot,
user_lambdified_functions_path=output_path,
)
elif print_progress:
print("All lambdified derivatives already exist.")
if __name__ == "__main__":
# tf2_tidal is used as a replacement for numerical BNS simulations until they become well-conditioned
# TODO: make a user input file somewhere to unify the considered waveforms
wf_model_name, wf_other_var_dic = "tf2_tidal", None
deriv_symbs_string = "Mc eta DL tc phic iota ra dec psi"
# TODO: make this automated by using a locs list from networks.py
locs = ["H", "L", "V", "K", "I", "ET1", "ET2", "ET3", "C", "N", "S"]
use_rot = True
generate_symbolic_derivatives(
wf_model_name,
wf_other_var_dic,
deriv_symbs_string,
locs,
use_rot,
print_progress=False,
)
| 39.539568 | 119 | 0.691776 |
from typing import List, Set, Dict, Tuple, Optional, Union
import os
from gwbench import wf_class as wfc
from gwbench import detector_response_derivatives as drd
def generate_symbolic_derivatives(
wf_model_name: str,
wf_other_var_dic: Optional[Dict[str, str]],
deriv_symbs_string: str,
locs: List[str],
use_rot: bool,
output_path: Optional[str] = None,
print_progress: bool = True,
) -> None:
= [
"par_deriv_WFM_"
+ wf_model_name
+ "_VAR_"
+ deriv_symbs_string.replace(" ", "_")
+ "_DET_"
+ key
+ ".dat"
for key in locs
]
file_names.append(
"par_deriv_WFM_"
+ wf_model_name
+ "_VAR_"
+ deriv_symbs_string.replace(" ra", "")
.replace(" dec", "")
.replace(" psi", "")
.replace(" ", "_")
+ "_DET_"
+ "pl_cr"
+ ".dat"
)
path = "lambdified_functions/"
file_names_existing = [
file_name for file_name in file_names if os.path.isfile(path + file_name)
]
if len(file_names_existing) < len(file_names):
# TODO: make this more efficient and just generate the missing files, or, do it in parallel
# waveform
wf = wfc.Waveform(wf_model_name, wf_other_var_dic)
# lambidified detector reponses and derivatives
drd.generate_det_responses_derivs_sym(
wf,
deriv_symbs_string,
locs=locs,
use_rot=use_rot,
user_lambdified_functions_path=output_path,
)
elif print_progress:
print("All lambdified derivatives already exist.")
if __name__ == "__main__":
# tf2_tidal is used as a replacement for numerical BNS simulations until they become well-conditioned
# TODO: make a user input file somewhere to unify the considered waveforms
wf_model_name, wf_other_var_dic = "tf2_tidal", None
deriv_symbs_string = "Mc eta DL tc phic iota ra dec psi"
# TODO: make this automated by using a locs list from networks.py
locs = ["H", "L", "V", "K", "I", "ET1", "ET2", "ET3", "C", "N", "S"]
use_rot = True
generate_symbolic_derivatives(
wf_model_name,
wf_other_var_dic,
deriv_symbs_string,
locs,
use_rot,
print_progress=False,
)
| true | true |
f72fbb46da1ac696d933485cf3bec183189f023a | 1,181 | py | Python | setup.py | i008/neptune-contrib | 4071c44112da4d7c52ee42cbb1ba937a66e5845b | [
"MIT"
] | null | null | null | setup.py | i008/neptune-contrib | 4071c44112da4d7c52ee42cbb1ba937a66e5845b | [
"MIT"
] | null | null | null | setup.py | i008/neptune-contrib | 4071c44112da4d7c52ee42cbb1ba937a66e5845b | [
"MIT"
] | null | null | null | from setuptools import find_packages, setup
def main():
extras = {
'bots': ['python-telegram-bot'],
'hpo': ['scikit-optimize==0.5.2', 'scipy'],
'monitoring': ['scikit-optimize==0.5.2', 'sacred==0.7.5', 'scikit-learn==0.21.3',
'scikit-plot==0.3.7', 'seaborn==0.8.1', 'aif360==0.2.1'],
'versioning': ['boto3', 'numpy'],
'viz': ['altair==2.3.0'],
}
all_deps = []
for group_name in extras:
all_deps += extras[group_name]
extras['all'] = all_deps
base_libs = ['attrdict==2.0.0', 'neptune-client', 'joblib==0.13', 'pandas', 'matplotlib', 'Pillow==5.4.1']
setup(
name='neptune-contrib',
version='0.13.7',
description='Neptune Python library contributions',
author='neptune.ml',
author_email='contact@neptune.ml',
url="https://github.com/neptune-ml/neptune-contrib",
long_description='Neptune Python library contributions',
license='MIT License',
install_requires=base_libs,
extras_require=extras,
packages=find_packages(include=['neptunecontrib*']),
)
if __name__ == "__main__":
main()
| 31.078947 | 110 | 0.580017 | from setuptools import find_packages, setup
def main():
extras = {
'bots': ['python-telegram-bot'],
'hpo': ['scikit-optimize==0.5.2', 'scipy'],
'monitoring': ['scikit-optimize==0.5.2', 'sacred==0.7.5', 'scikit-learn==0.21.3',
'scikit-plot==0.3.7', 'seaborn==0.8.1', 'aif360==0.2.1'],
'versioning': ['boto3', 'numpy'],
'viz': ['altair==2.3.0'],
}
all_deps = []
for group_name in extras:
all_deps += extras[group_name]
extras['all'] = all_deps
base_libs = ['attrdict==2.0.0', 'neptune-client', 'joblib==0.13', 'pandas', 'matplotlib', 'Pillow==5.4.1']
setup(
name='neptune-contrib',
version='0.13.7',
description='Neptune Python library contributions',
author='neptune.ml',
author_email='contact@neptune.ml',
url="https://github.com/neptune-ml/neptune-contrib",
long_description='Neptune Python library contributions',
license='MIT License',
install_requires=base_libs,
extras_require=extras,
packages=find_packages(include=['neptunecontrib*']),
)
if __name__ == "__main__":
main()
| true | true |
f72fbc9ef2815a7c16260374b2af5e47dc631fe1 | 5,099 | py | Python | dev_scripts/chemenv/equivalent_indices.py | frssp/pymatgen | bdd977f065b66191557c7398b31a1571bc541fdb | [
"MIT"
] | 5 | 2019-04-11T20:57:38.000Z | 2021-12-01T05:00:42.000Z | dev_scripts/chemenv/equivalent_indices.py | darnoceloc/pymatgen | 5cc42912a12a265a603df7e34c856561f76edc1f | [
"MIT"
] | 3 | 2017-07-18T01:13:41.000Z | 2019-04-29T18:17:30.000Z | dev_scripts/chemenv/equivalent_indices.py | darnoceloc/pymatgen | 5cc42912a12a265a603df7e34c856561f76edc1f | [
"MIT"
] | 3 | 2019-10-14T19:47:34.000Z | 2020-07-02T08:10:45.000Z | # coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
from __future__ import division, unicode_literals
"""
Development script of the ChemEnv utility to get the equivalent indices of the model coordination environments
"""
__author__ = "David Waroquiers"
__copyright__ = "Copyright 2012, The Materials Project"
__version__ = "2.0"
__maintainer__ = "David Waroquiers"
__email__ = "david.waroquiers@gmail.com"
__date__ = "Feb 20, 2016"
import numpy as np
if __name__ == '__main__':
cg_symbol = 'O:6'
equiv_list = []
# O:6
if cg_symbol == 'O:6':
opposite_points = {0: 1,
1: 0,
2: 3,
3: 2,
4: 5,
5: 4}
perp_plane = {0: [2, 3, 4, 5],
1: [2, 3, 4, 5],
2: [0, 1, 4, 5],
3: [0, 1, 4, 5],
4: [0, 1, 2, 3],
5: [0, 1, 2, 3]}
# 0. any point
for i0 in range(6):
# 1. point opposite to point 0.
i1 = opposite_points[i0]
# 2. one of the 4 points in the perpendicular plane
for i2 in perp_plane[i0]:
# 3. point opposite to point 2.
i3 = opposite_points[i2]
remaining = range(6)
remaining.remove(i0)
remaining.remove(i1)
remaining.remove(i2)
remaining.remove(i3)
# 4. one of the 2 remaining points
for i4 in remaining:
# 5. point opposite to point 4.
i5 = opposite_points[i4]
equiv_list.append([i0, i1, i2, i3, i4, i5])
# PB:7
if cg_symbol == 'PB:7':
for i0 in range(5):
for turn in [1, -1]:
i1 = np.mod(i0+turn, 5)
i2 = np.mod(i1+turn, 5)
i3 = np.mod(i2+turn, 5)
i4 = np.mod(i3+turn, 5)
for i5 in [5, 6]:
i6 = 5 if i5 == 6 else 6
equiv_list.append([i0, i1, i2, i3, i4, i5, i6])
# HB:8
if cg_symbol == 'HB:8':
for i0 in range(6):
for turn in [1, -1]:
i1 = np.mod(i0 + turn, 6)
i2 = np.mod(i1 + turn, 6)
i3 = np.mod(i2 + turn, 6)
i4 = np.mod(i3 + turn, 6)
i5 = np.mod(i4 + turn, 6)
for i6 in [6, 7]:
i7 = 6 if i6 == 7 else 7
equiv_list.append([i0, i1, i2, i3, i4, i5, i6, i7])
# SBT:8
if cg_symbol == 'SBT:8':
#0. any point on the square face without cap
for i0 in [0, 1, 3, 4]:
#1. point in this square face but also in the triangular plane of point 0
#2. last point in the triangular plane of point 0
if i0 < 3:
i1 = 0 if i0 == 1 else 1
i2 = 2
else:
i1 = 3 if i0 == 4 else 4
i2 = 5
#3.4.5. corresponding points in the opposite triangular plane to the one of points 0.1.2.
i3 = np.mod(i0 + 3, 6)
i4 = np.mod(i1 + 3, 6)
i5 = np.mod(i2 + 3, 6)
#6. cap point opposite to the first point
i6 = 7 if i0 in [1, 4] else 6
#7. last cap point
i7 = 6 if i0 in [1, 4] else 7
equiv_list.append([i0, i1, i2, i3, i4, i5, i6, i7])
# SA:8
if cg_symbol == 'SA:8':
sf1 = [0, 2, 1, 3]
sf2 = [4, 5, 7, 6]
# 0. any point
for i0 in range(8):
# 1. point opposite to point 0. in the square face
if i0 in [0, 2]:
i1 = i0 + 1
elif i0 in [1, 3]:
i1 = i0 - 1
elif i0 == 4:
i1 = 7
elif i0 == 5:
i1 = 6
elif i0 == 6:
i1 = 5
elif i0 == 7:
i1 = 4
# 2. one of the two last points in the square face
sfleft = list(sf1) if i0 in sf1 else list(sf2)
sfleft.remove(i0)
sfleft.remove(i1)
for i2 in sfleft:
sfleft2 = list(sfleft)
sfleft2.remove(i2)
# 3. last point in the square face
i3 = sfleft2[0]
# 4. point opposite to point 3. and closest to point 0.
i4 = 0
# 3.4.5. corresponding points in the opposite triangular plane to the one of points 0.1.2.
i3 = np.mod(i0 + 3, 6)
i4 = np.mod(i1 + 3, 6)
i5 = np.mod(i2 + 3, 6)
# 6. cap point opposite to the first point
i6 = 7 if i0 in [1, 4] else 6
# 7. last cap point
i7 = 6 if i0 in [1, 4] else 7
equiv_list.append([i0, i1, i2, i3, i4, i5, i6, i7])
print('Equivalent indices ({:d}) for {} : '.format(len(equiv_list), cg_symbol))
print(equiv_list) | 34.452703 | 110 | 0.442636 |
from __future__ import division, unicode_literals
__author__ = "David Waroquiers"
__copyright__ = "Copyright 2012, The Materials Project"
__version__ = "2.0"
__maintainer__ = "David Waroquiers"
__email__ = "david.waroquiers@gmail.com"
__date__ = "Feb 20, 2016"
import numpy as np
if __name__ == '__main__':
cg_symbol = 'O:6'
equiv_list = []
if cg_symbol == 'O:6':
opposite_points = {0: 1,
1: 0,
2: 3,
3: 2,
4: 5,
5: 4}
perp_plane = {0: [2, 3, 4, 5],
1: [2, 3, 4, 5],
2: [0, 1, 4, 5],
3: [0, 1, 4, 5],
4: [0, 1, 2, 3],
5: [0, 1, 2, 3]}
for i0 in range(6):
i1 = opposite_points[i0]
for i2 in perp_plane[i0]:
i3 = opposite_points[i2]
remaining = range(6)
remaining.remove(i0)
remaining.remove(i1)
remaining.remove(i2)
remaining.remove(i3)
for i4 in remaining:
i5 = opposite_points[i4]
equiv_list.append([i0, i1, i2, i3, i4, i5])
if cg_symbol == 'PB:7':
for i0 in range(5):
for turn in [1, -1]:
i1 = np.mod(i0+turn, 5)
i2 = np.mod(i1+turn, 5)
i3 = np.mod(i2+turn, 5)
i4 = np.mod(i3+turn, 5)
for i5 in [5, 6]:
i6 = 5 if i5 == 6 else 6
equiv_list.append([i0, i1, i2, i3, i4, i5, i6])
if cg_symbol == 'HB:8':
for i0 in range(6):
for turn in [1, -1]:
i1 = np.mod(i0 + turn, 6)
i2 = np.mod(i1 + turn, 6)
i3 = np.mod(i2 + turn, 6)
i4 = np.mod(i3 + turn, 6)
i5 = np.mod(i4 + turn, 6)
for i6 in [6, 7]:
i7 = 6 if i6 == 7 else 7
equiv_list.append([i0, i1, i2, i3, i4, i5, i6, i7])
if cg_symbol == 'SBT:8':
for i0 in [0, 1, 3, 4]:
if i0 < 3:
i1 = 0 if i0 == 1 else 1
i2 = 2
else:
i1 = 3 if i0 == 4 else 4
i2 = 5
i3 = np.mod(i0 + 3, 6)
i4 = np.mod(i1 + 3, 6)
i5 = np.mod(i2 + 3, 6)
i6 = 7 if i0 in [1, 4] else 6
i7 = 6 if i0 in [1, 4] else 7
equiv_list.append([i0, i1, i2, i3, i4, i5, i6, i7])
if cg_symbol == 'SA:8':
sf1 = [0, 2, 1, 3]
sf2 = [4, 5, 7, 6]
for i0 in range(8):
if i0 in [0, 2]:
i1 = i0 + 1
elif i0 in [1, 3]:
i1 = i0 - 1
elif i0 == 4:
i1 = 7
elif i0 == 5:
i1 = 6
elif i0 == 6:
i1 = 5
elif i0 == 7:
i1 = 4
sfleft = list(sf1) if i0 in sf1 else list(sf2)
sfleft.remove(i0)
sfleft.remove(i1)
for i2 in sfleft:
sfleft2 = list(sfleft)
sfleft2.remove(i2)
i3 = sfleft2[0]
i4 = 0
i3 = np.mod(i0 + 3, 6)
i4 = np.mod(i1 + 3, 6)
i5 = np.mod(i2 + 3, 6)
i6 = 7 if i0 in [1, 4] else 6
i7 = 6 if i0 in [1, 4] else 7
equiv_list.append([i0, i1, i2, i3, i4, i5, i6, i7])
print('Equivalent indices ({:d}) for {} : '.format(len(equiv_list), cg_symbol))
print(equiv_list) | true | true |
f72fbcceb7f7342d732b521612c2db620aa6ae77 | 15,134 | py | Python | neutron/agent/l3/extensions/qos/fip.py | netsec/neutron | 17f90e17f139dc47eaafa1d3e342eb87ff0f61ed | [
"Apache-2.0"
] | null | null | null | neutron/agent/l3/extensions/qos/fip.py | netsec/neutron | 17f90e17f139dc47eaafa1d3e342eb87ff0f61ed | [
"Apache-2.0"
] | null | null | null | neutron/agent/l3/extensions/qos/fip.py | netsec/neutron | 17f90e17f139dc47eaafa1d3e342eb87ff0f61ed | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron_lib.agent import l3_extension
from neutron_lib import constants
from neutron_lib.services.qos import constants as qos_consts
from oslo_concurrency import lockutils
from oslo_log import log as logging
from neutron.agent.l3.extensions.qos import base as qos_base
from neutron.agent.linux import ip_lib
from neutron.api.rpc.callbacks import events
from neutron.api.rpc.callbacks import resources
from neutron.api.rpc.handlers import resources_rpc
from neutron.common import coordination
LOG = logging.getLogger(__name__)
class RouterFipRateLimitMaps(qos_base.RateLimitMaps):
LOCK_NAME = "fip-qos-cache"
def __init__(self):
"""Initialize RouterFipRateLimitMaps
The router_floating_ips will be:
router_floating_ips = {
router_id_1: set(fip1, fip2),
router_id_2: set(), # default
}
"""
self.router_floating_ips = {}
"""
The rate limits dict will be:
xxx_ratelimits = {
fip_1: (rate, burst),
fip_2: (IP_DEFAULT_RATE, IP_DEFAULT_BURST), # default
fip_3: (1, 2),
fip_4: (3, 4),
}
"""
self.ingress_ratelimits = {}
self.egress_ratelimits = {}
super(RouterFipRateLimitMaps, self).__init__(self.LOCK_NAME)
def find_fip_router_id(self, fip):
@lockutils.synchronized(self.lock_name)
def _find_fip_router_id():
for router_id, ips in self.router_floating_ips.items():
if fip in ips:
return router_id
return _find_fip_router_id()
def get_router_floating_ips(self, router_id):
@lockutils.synchronized(self.lock_name)
def _get_router_floating_ips():
return self.router_floating_ips.pop(
router_id, [])
return _get_router_floating_ips()
def remove_fip_ratelimit_cache(self, direction, fip):
@lockutils.synchronized(self.lock_name)
def _remove_fip_ratelimit_cache():
rate_limits_direction = direction + "_ratelimits"
rate_limits = getattr(self, rate_limits_direction, {})
rate_limits.pop(fip, None)
_remove_fip_ratelimit_cache()
def set_fip_ratelimit_cache(self, direction, fip, rate, burst):
@lockutils.synchronized(self.lock_name)
def _set_fip_ratelimit_cache():
rate_limits_direction = direction + "_ratelimits"
rate_limits = getattr(self, rate_limits_direction, {})
rate_limits[fip] = (rate, burst)
_set_fip_ratelimit_cache()
def get_fip_ratelimit_cache(self, direction, fip):
@lockutils.synchronized(self.lock_name)
def _get_fip_ratelimit_cache():
rate_limits_direction = direction + "_ratelimits"
rate_limits = getattr(self, rate_limits_direction, {})
rate, burst = rate_limits.get(fip, (qos_base.IP_DEFAULT_RATE,
qos_base.IP_DEFAULT_BURST))
return rate, burst
return _get_fip_ratelimit_cache()
class FipQosAgentExtension(qos_base.L3QosAgentExtensionBase,
l3_extension.L3AgentExtension):
def initialize(self, connection, driver_type):
"""Initialize agent extension."""
self.resource_rpc = resources_rpc.ResourcesPullRpcApi()
self.fip_qos_map = RouterFipRateLimitMaps()
self._register_rpc_consumers()
def _handle_notification(self, context, resource_type,
qos_policies, event_type):
if event_type == events.UPDATED:
for qos_policy in qos_policies:
self._process_update_policy(qos_policy)
def _process_update_policy(self, qos_policy):
old_qos_policy = self.fip_qos_map.get_policy(qos_policy.id)
if old_qos_policy:
if self._policy_rules_modified(old_qos_policy, qos_policy):
for fip in self.fip_qos_map.get_resources(qos_policy):
router_id = self.fip_qos_map.find_fip_router_id(fip)
router_info = self._get_router_info(router_id)
if not router_info:
continue
device = self._get_rate_limit_ip_device(router_info)
dvr_fip_device = self._get_dvr_fip_device(router_info)
if not device and not dvr_fip_device:
LOG.debug("Router %s does not have a floating IP "
"related device, skipping.", router_id)
continue
rates = self.get_policy_rates(qos_policy)
if device:
self.process_ip_rates(fip, device, rates)
if dvr_fip_device:
self.process_ip_rates(
fip, dvr_fip_device, rates, with_cache=False)
self.fip_qos_map.update_policy(qos_policy)
def _remove_fip_rate_limit_cache(self, fip):
for direction in constants.VALID_DIRECTIONS:
self.fip_qos_map.remove_fip_ratelimit_cache(direction, fip)
def _process_reset_fip(self, fip):
self.fip_qos_map.clean_by_resource(fip)
@coordination.synchronized('qos-floating-ip-{ip}')
def process_ip_rate_limit(self, ip, direction,
device, rate, burst):
tc_wrapper = self._get_tc_wrapper(device)
if (rate == qos_base.IP_DEFAULT_RATE and
burst == qos_base.IP_DEFAULT_BURST):
# According to the agreements of default value definition,
# floating IP bandwidth was changed to default value (no limit).
# NOTE: l3_tc_lib will ignore exception FilterIDForIPNotFound.
tc_wrapper.clear_ip_rate_limit(direction, ip)
self.fip_qos_map.remove_fip_ratelimit_cache(direction, ip)
return
# Finally just set it, l3_tc_lib will clean the old rules if exists.
tc_wrapper.set_ip_rate_limit(direction, ip, rate, burst)
def _get_rate_limit_ip_device(self, router_info):
ex_gw_port = router_info.get_ex_gw_port()
if not ex_gw_port:
return
agent_mode = router_info.agent_conf.agent_mode
is_distributed_router = router_info.router.get('distributed')
if is_distributed_router and agent_mode == (
constants.L3_AGENT_MODE_DVR_SNAT):
# DVR edge (or DVR edge ha) router
if not router_info._is_this_snat_host():
return
name = router_info.get_snat_external_device_interface_name(
ex_gw_port)
else:
# DVR local router
# Legacy/HA router
name = router_info.get_external_device_interface_name(ex_gw_port)
if not name:
# DVR local router in dvr_no_external agent mode may not have
# such rfp-device.
return
namespace = router_info.get_gw_ns_name()
return ip_lib.IPDevice(name, namespace=namespace)
def _remove_fip_rate_limit(self, device, fip_ip):
tc_wrapper = self._get_tc_wrapper(device)
for direction in constants.VALID_DIRECTIONS:
if device.exists():
tc_wrapper.clear_ip_rate_limit(direction, fip_ip)
self.fip_qos_map.remove_fip_ratelimit_cache(direction, fip_ip)
def get_fip_qos_rates(self, context, fip, policy_id):
if policy_id is None:
self._process_reset_fip(fip)
# process_ip_rate_limit will treat value 0 as
# cleaning the tc filters if exits or no action.
return {constants.INGRESS_DIRECTION: {
"rate": qos_base.IP_DEFAULT_RATE,
"burst": qos_base.IP_DEFAULT_BURST},
constants.EGRESS_DIRECTION: {
"rate": qos_base.IP_DEFAULT_RATE,
"burst": qos_base.IP_DEFAULT_BURST}}
policy = self.resource_rpc.pull(
context, resources.QOS_POLICY, policy_id)
self.fip_qos_map.set_resource_policy(fip, policy)
return self.get_policy_rates(policy)
def process_ip_rates(self, fip, device, rates, with_cache=True):
for direction in constants.VALID_DIRECTIONS:
rate = rates.get(direction)
if with_cache:
old_rate, old_burst = self.fip_qos_map.get_fip_ratelimit_cache(
direction, fip)
if old_rate == rate['rate'] and old_burst == rate['burst']:
# Two possibilities here:
# 1. Floating IP rate limit does not change.
# 2. Floating IP bandwidth does not limit.
continue
self.process_ip_rate_limit(
fip, direction, device,
rate['rate'], rate['burst'])
self.fip_qos_map.set_fip_ratelimit_cache(
direction, fip, rate['rate'], rate['burst'])
else:
tc_wrapper = self._get_tc_wrapper(device)
if (rate['rate'] == qos_base.IP_DEFAULT_RATE and
rate['burst'] == qos_base.IP_DEFAULT_BURST):
# Default value is no limit
tc_wrapper.clear_ip_rate_limit(direction, fip)
else:
tc_wrapper.set_ip_rate_limit(direction, fip,
rate['rate'], rate['burst'])
def _get_dvr_fip_device(self, router_info):
is_distributed_router = router_info.router.get('distributed')
agent_mode = router_info.agent_conf.agent_mode
if is_distributed_router and agent_mode == (
constants.L3_AGENT_MODE_DVR_SNAT):
gw_port = router_info.get_ex_gw_port()
if gw_port and router_info.fip_ns:
rfp_dev_name = router_info.get_external_device_interface_name(
gw_port)
if router_info.router_namespace.exists() and rfp_dev_name:
return ip_lib.IPDevice(
rfp_dev_name, namespace=router_info.ns_name)
def process_floating_ip_addresses(self, context, router_info):
# Loop all the router floating ips, the corresponding floating IP tc
# rules will be configured:
# 1. for legacy and HA router, it will be all floating IPs to qg-device
# of qrouter-namespace in (all ha router hosted) network node.
# 2. for dvr router, we can do this simple. No matter the agent
# type is dvr or dvr_snat, we can just set all the
# floating IP tc rules to the corresponding device:
# 2.1 for dvr local router in compute node:
# the namespace is qrouter-x, and the device is rfp-device.
# 2.2 for dvr edge (ha) router in network node:
# the namespace is snat-x, and the device is qg-device.
# 3. for dvr local router, if agent_mod is dvr_no_external, no
# floating IP rules will be configured.
# 4. for dvr router in snat node, we should process the floating
# IP QoS again in qrouter-namespace to cover the mixed deployment
# with nova-compute scenario.
is_distributed_router = router_info.router.get('distributed')
agent_mode = router_info.agent_conf.agent_mode
LOG.debug("Start processing floating IP QoS for "
"router %(router_id)s, router "
"distributed: %(distributed)s, "
"agent mode: %(agent_mode)s",
{"router_id": router_info.router_id,
"distributed": is_distributed_router,
"agent_mode": agent_mode})
if is_distributed_router and agent_mode == (
constants.L3_AGENT_MODE_DVR_NO_EXTERNAL):
# condition 3: dvr local router and dvr_no_external agent
return
device = self._get_rate_limit_ip_device(router_info)
dvr_fip_device = self._get_dvr_fip_device(router_info)
if not device and not dvr_fip_device:
LOG.debug("No relevant QoS device found "
"for router: %s", router_info.router_id)
return
floating_ips = (router_info.get_floating_ips() +
router_info.get_port_forwarding_fips())
current_fips = self.fip_qos_map.router_floating_ips.get(
router_info.router_id, set())
new_fips = set()
for fip in floating_ips:
fip_addr = fip['floating_ip_address']
new_fips.add(fip_addr)
rates = self.get_fip_qos_rates(context,
fip_addr,
fip.get(qos_consts.QOS_POLICY_ID))
if device:
self.process_ip_rates(fip_addr, device, rates)
if dvr_fip_device:
# NOTE(liuyulong): for scenario 4 (mixed dvr_snat and compute
# node), because floating IP qos rates may have been
# processed in dvr snat-namespace, so here the cache was
# already set. We just install the rules to the device in
# qrouter-namesapce.
self.process_ip_rates(
fip_addr, dvr_fip_device, rates, with_cache=False)
self.fip_qos_map.router_floating_ips[router_info.router_id] = new_fips
fips_removed = current_fips - new_fips
for fip in fips_removed:
if device:
self._remove_fip_rate_limit(device, fip)
if dvr_fip_device:
self._remove_fip_rate_limit(dvr_fip_device, fip)
self._process_reset_fip(fip)
def add_router(self, context, data):
router_info = self._get_router_info(data['id'])
if router_info:
self.process_floating_ip_addresses(context, router_info)
def update_router(self, context, data):
router_info = self._get_router_info(data['id'])
if router_info:
self.process_floating_ip_addresses(context, router_info)
def delete_router(self, context, data):
# NOTE(liuyulong): to delete the router, you need to disassociate the
# floating IP first, so the update_router has done the cache clean.
pass
def ha_state_change(self, context, data):
pass
| 43.24 | 79 | 0.619466 |
from neutron_lib.agent import l3_extension
from neutron_lib import constants
from neutron_lib.services.qos import constants as qos_consts
from oslo_concurrency import lockutils
from oslo_log import log as logging
from neutron.agent.l3.extensions.qos import base as qos_base
from neutron.agent.linux import ip_lib
from neutron.api.rpc.callbacks import events
from neutron.api.rpc.callbacks import resources
from neutron.api.rpc.handlers import resources_rpc
from neutron.common import coordination
LOG = logging.getLogger(__name__)
class RouterFipRateLimitMaps(qos_base.RateLimitMaps):
LOCK_NAME = "fip-qos-cache"
def __init__(self):
self.router_floating_ips = {}
self.ingress_ratelimits = {}
self.egress_ratelimits = {}
super(RouterFipRateLimitMaps, self).__init__(self.LOCK_NAME)
def find_fip_router_id(self, fip):
@lockutils.synchronized(self.lock_name)
def _find_fip_router_id():
for router_id, ips in self.router_floating_ips.items():
if fip in ips:
return router_id
return _find_fip_router_id()
def get_router_floating_ips(self, router_id):
@lockutils.synchronized(self.lock_name)
def _get_router_floating_ips():
return self.router_floating_ips.pop(
router_id, [])
return _get_router_floating_ips()
def remove_fip_ratelimit_cache(self, direction, fip):
@lockutils.synchronized(self.lock_name)
def _remove_fip_ratelimit_cache():
rate_limits_direction = direction + "_ratelimits"
rate_limits = getattr(self, rate_limits_direction, {})
rate_limits.pop(fip, None)
_remove_fip_ratelimit_cache()
def set_fip_ratelimit_cache(self, direction, fip, rate, burst):
@lockutils.synchronized(self.lock_name)
def _set_fip_ratelimit_cache():
rate_limits_direction = direction + "_ratelimits"
rate_limits = getattr(self, rate_limits_direction, {})
rate_limits[fip] = (rate, burst)
_set_fip_ratelimit_cache()
def get_fip_ratelimit_cache(self, direction, fip):
@lockutils.synchronized(self.lock_name)
def _get_fip_ratelimit_cache():
rate_limits_direction = direction + "_ratelimits"
rate_limits = getattr(self, rate_limits_direction, {})
rate, burst = rate_limits.get(fip, (qos_base.IP_DEFAULT_RATE,
qos_base.IP_DEFAULT_BURST))
return rate, burst
return _get_fip_ratelimit_cache()
class FipQosAgentExtension(qos_base.L3QosAgentExtensionBase,
l3_extension.L3AgentExtension):
def initialize(self, connection, driver_type):
self.resource_rpc = resources_rpc.ResourcesPullRpcApi()
self.fip_qos_map = RouterFipRateLimitMaps()
self._register_rpc_consumers()
def _handle_notification(self, context, resource_type,
qos_policies, event_type):
if event_type == events.UPDATED:
for qos_policy in qos_policies:
self._process_update_policy(qos_policy)
def _process_update_policy(self, qos_policy):
old_qos_policy = self.fip_qos_map.get_policy(qos_policy.id)
if old_qos_policy:
if self._policy_rules_modified(old_qos_policy, qos_policy):
for fip in self.fip_qos_map.get_resources(qos_policy):
router_id = self.fip_qos_map.find_fip_router_id(fip)
router_info = self._get_router_info(router_id)
if not router_info:
continue
device = self._get_rate_limit_ip_device(router_info)
dvr_fip_device = self._get_dvr_fip_device(router_info)
if not device and not dvr_fip_device:
LOG.debug("Router %s does not have a floating IP "
"related device, skipping.", router_id)
continue
rates = self.get_policy_rates(qos_policy)
if device:
self.process_ip_rates(fip, device, rates)
if dvr_fip_device:
self.process_ip_rates(
fip, dvr_fip_device, rates, with_cache=False)
self.fip_qos_map.update_policy(qos_policy)
def _remove_fip_rate_limit_cache(self, fip):
for direction in constants.VALID_DIRECTIONS:
self.fip_qos_map.remove_fip_ratelimit_cache(direction, fip)
def _process_reset_fip(self, fip):
self.fip_qos_map.clean_by_resource(fip)
@coordination.synchronized('qos-floating-ip-{ip}')
def process_ip_rate_limit(self, ip, direction,
device, rate, burst):
tc_wrapper = self._get_tc_wrapper(device)
if (rate == qos_base.IP_DEFAULT_RATE and
burst == qos_base.IP_DEFAULT_BURST):
tc_wrapper.clear_ip_rate_limit(direction, ip)
self.fip_qos_map.remove_fip_ratelimit_cache(direction, ip)
return
tc_wrapper.set_ip_rate_limit(direction, ip, rate, burst)
def _get_rate_limit_ip_device(self, router_info):
ex_gw_port = router_info.get_ex_gw_port()
if not ex_gw_port:
return
agent_mode = router_info.agent_conf.agent_mode
is_distributed_router = router_info.router.get('distributed')
if is_distributed_router and agent_mode == (
constants.L3_AGENT_MODE_DVR_SNAT):
if not router_info._is_this_snat_host():
return
name = router_info.get_snat_external_device_interface_name(
ex_gw_port)
else:
name = router_info.get_external_device_interface_name(ex_gw_port)
if not name:
return
namespace = router_info.get_gw_ns_name()
return ip_lib.IPDevice(name, namespace=namespace)
def _remove_fip_rate_limit(self, device, fip_ip):
tc_wrapper = self._get_tc_wrapper(device)
for direction in constants.VALID_DIRECTIONS:
if device.exists():
tc_wrapper.clear_ip_rate_limit(direction, fip_ip)
self.fip_qos_map.remove_fip_ratelimit_cache(direction, fip_ip)
def get_fip_qos_rates(self, context, fip, policy_id):
if policy_id is None:
self._process_reset_fip(fip)
return {constants.INGRESS_DIRECTION: {
"rate": qos_base.IP_DEFAULT_RATE,
"burst": qos_base.IP_DEFAULT_BURST},
constants.EGRESS_DIRECTION: {
"rate": qos_base.IP_DEFAULT_RATE,
"burst": qos_base.IP_DEFAULT_BURST}}
policy = self.resource_rpc.pull(
context, resources.QOS_POLICY, policy_id)
self.fip_qos_map.set_resource_policy(fip, policy)
return self.get_policy_rates(policy)
def process_ip_rates(self, fip, device, rates, with_cache=True):
for direction in constants.VALID_DIRECTIONS:
rate = rates.get(direction)
if with_cache:
old_rate, old_burst = self.fip_qos_map.get_fip_ratelimit_cache(
direction, fip)
if old_rate == rate['rate'] and old_burst == rate['burst']:
continue
self.process_ip_rate_limit(
fip, direction, device,
rate['rate'], rate['burst'])
self.fip_qos_map.set_fip_ratelimit_cache(
direction, fip, rate['rate'], rate['burst'])
else:
tc_wrapper = self._get_tc_wrapper(device)
if (rate['rate'] == qos_base.IP_DEFAULT_RATE and
rate['burst'] == qos_base.IP_DEFAULT_BURST):
tc_wrapper.clear_ip_rate_limit(direction, fip)
else:
tc_wrapper.set_ip_rate_limit(direction, fip,
rate['rate'], rate['burst'])
def _get_dvr_fip_device(self, router_info):
is_distributed_router = router_info.router.get('distributed')
agent_mode = router_info.agent_conf.agent_mode
if is_distributed_router and agent_mode == (
constants.L3_AGENT_MODE_DVR_SNAT):
gw_port = router_info.get_ex_gw_port()
if gw_port and router_info.fip_ns:
rfp_dev_name = router_info.get_external_device_interface_name(
gw_port)
if router_info.router_namespace.exists() and rfp_dev_name:
return ip_lib.IPDevice(
rfp_dev_name, namespace=router_info.ns_name)
def process_floating_ip_addresses(self, context, router_info):
is_distributed_router = router_info.router.get('distributed')
agent_mode = router_info.agent_conf.agent_mode
LOG.debug("Start processing floating IP QoS for "
"router %(router_id)s, router "
"distributed: %(distributed)s, "
"agent mode: %(agent_mode)s",
{"router_id": router_info.router_id,
"distributed": is_distributed_router,
"agent_mode": agent_mode})
if is_distributed_router and agent_mode == (
constants.L3_AGENT_MODE_DVR_NO_EXTERNAL):
return
device = self._get_rate_limit_ip_device(router_info)
dvr_fip_device = self._get_dvr_fip_device(router_info)
if not device and not dvr_fip_device:
LOG.debug("No relevant QoS device found "
"for router: %s", router_info.router_id)
return
floating_ips = (router_info.get_floating_ips() +
router_info.get_port_forwarding_fips())
current_fips = self.fip_qos_map.router_floating_ips.get(
router_info.router_id, set())
new_fips = set()
for fip in floating_ips:
fip_addr = fip['floating_ip_address']
new_fips.add(fip_addr)
rates = self.get_fip_qos_rates(context,
fip_addr,
fip.get(qos_consts.QOS_POLICY_ID))
if device:
self.process_ip_rates(fip_addr, device, rates)
if dvr_fip_device:
self.process_ip_rates(
fip_addr, dvr_fip_device, rates, with_cache=False)
self.fip_qos_map.router_floating_ips[router_info.router_id] = new_fips
fips_removed = current_fips - new_fips
for fip in fips_removed:
if device:
self._remove_fip_rate_limit(device, fip)
if dvr_fip_device:
self._remove_fip_rate_limit(dvr_fip_device, fip)
self._process_reset_fip(fip)
def add_router(self, context, data):
router_info = self._get_router_info(data['id'])
if router_info:
self.process_floating_ip_addresses(context, router_info)
def update_router(self, context, data):
router_info = self._get_router_info(data['id'])
if router_info:
self.process_floating_ip_addresses(context, router_info)
def delete_router(self, context, data):
pass
def ha_state_change(self, context, data):
pass
| true | true |
f72fbced7d7530fbca0bf7e3bb8adb613862ba38 | 183 | py | Python | Euler/Problem_16.py | ChristensenCode/energy-viking | 7a720cbcfabcb020ed42d52462bfad4058b0c20f | [
"MIT"
] | null | null | null | Euler/Problem_16.py | ChristensenCode/energy-viking | 7a720cbcfabcb020ed42d52462bfad4058b0c20f | [
"MIT"
] | null | null | null | Euler/Problem_16.py | ChristensenCode/energy-viking | 7a720cbcfabcb020ed42d52462bfad4058b0c20f | [
"MIT"
] | null | null | null | # Problem 16 Power Digit Sum
x = 2**1000
print(x)
value = str(2**1000)
totalling = []
for i in range(len(value)):
total = int(value[i])
totalling.append(total)
print(sum(totalling)) | 20.333333 | 28 | 0.688525 |
x = 2**1000
print(x)
value = str(2**1000)
totalling = []
for i in range(len(value)):
total = int(value[i])
totalling.append(total)
print(sum(totalling)) | true | true |
f72fbf0efa739283e1861ea301b93db9d409887a | 1,174 | py | Python | neutron/db/migration/alembic_migrations/versions/1f71e54a85e7_ml2_net_seg_model.py | gampel/neutron | 51a6260266dc59c066072ca890ad9c40b1aad6cf | [
"Apache-2.0"
] | 10 | 2015-09-22T10:22:53.000Z | 2016-02-25T06:12:05.000Z | neutron/db/migration/alembic_migrations/versions/1f71e54a85e7_ml2_net_seg_model.py | gampel/neutron | 51a6260266dc59c066072ca890ad9c40b1aad6cf | [
"Apache-2.0"
] | 12 | 2015-01-08T18:30:45.000Z | 2015-03-13T21:04:15.000Z | neutron/db/migration/alembic_migrations/versions/1f71e54a85e7_ml2_net_seg_model.py | gampel/neutron | 51a6260266dc59c066072ca890ad9c40b1aad6cf | [
"Apache-2.0"
] | 7 | 2015-02-05T10:23:52.000Z | 2019-05-18T17:11:19.000Z | # Copyright 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""ml2_network_segments models change for multi-segment network.
Revision ID: 1f71e54a85e7
Revises: 44621190bc02
Create Date: 2014-10-15 18:30:51.395295
"""
# revision identifiers, used by Alembic.
revision = '1f71e54a85e7'
down_revision = '44621190bc02'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('ml2_network_segments',
sa.Column('segment_index', sa.Integer(), nullable=False,
server_default='0'))
def downgrade():
op.drop_column('ml2_network_segments', 'segment_index')
| 28.634146 | 78 | 0.721465 |
revision = '1f71e54a85e7'
down_revision = '44621190bc02'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('ml2_network_segments',
sa.Column('segment_index', sa.Integer(), nullable=False,
server_default='0'))
def downgrade():
op.drop_column('ml2_network_segments', 'segment_index')
| true | true |
f72fbf450177a71f28c55fbc60c587342b06a61a | 26,495 | py | Python | scikits/crab/recommenders/knn/classes.py | MostaSchoolOfAI/crab | 1c1fc21e902e4ee422ab367d691df16978972f8c | [
"BSD-3-Clause"
] | null | null | null | scikits/crab/recommenders/knn/classes.py | MostaSchoolOfAI/crab | 1c1fc21e902e4ee422ab367d691df16978972f8c | [
"BSD-3-Clause"
] | null | null | null | scikits/crab/recommenders/knn/classes.py | MostaSchoolOfAI/crab | 1c1fc21e902e4ee422ab367d691df16978972f8c | [
"BSD-3-Clause"
] | null | null | null | """
Generalized Recommender models.
This module contains basic memory recommender interfaces used throughout
the whole scikit-crab package.
The interfaces are realized as abstract base classes (ie., some optional
functionality is provided in the interface itself, so that the interfaces
can be subclassed).
"""
# Author: Marcel Caraciolo <marcel@muricoca.com>
#
# License: BSD Style.
from sklearn.base import BaseEstimator
from .base import ItemRecommender, UserRecommender
from .item_strategies import ItemsNeighborhoodStrategy
from .neighborhood_strategies import NearestNeighborsStrategy
import numpy as np
class ItemBasedRecommender(ItemRecommender):
"""
Item Based Collaborative Filtering Recommender.
Parameters
-----------
data_model: The data model instance that will be data source
for the recommender.
similarity: The Item Similarity instance that will be used to
score the items that will be recommended.
items_selection_strategy: The item candidates strategy that you
can choose for selecting the possible items to recommend.
default = ItemsNeighborhoodStrategy
capper: bool (default=True)
Cap the preferences with maximum and minimum preferences
in the model.
with_preference: bool (default=False)
Return the recommendations with the estimated preferences if True.
Attributes
-----------
`model`: The data model instance that will be data source
for the recommender.
`similarity`: The Item Similarity instance that will be used to
score the items that will be recommended.
`items_selection_strategy`: The item candidates strategy that you
can choose for selecting the possible items to recommend.
default = ItemsNeighborhoodStrategy
`capper`: bool (default=True)
Cap the preferences with maximum and minimum preferences
in the model.
`with_preference`: bool (default=False)
Return the recommendations with the estimated preferences if True.
Examples
-----------
>>> from scikits.crab.models.classes import MatrixPreferenceDataModel
>>> from scikits.crab.recommenders.knn.classes import ItemBasedRecommender
>>> from scikits.crab.similarities.basic_similarities import ItemSimilarity
>>> from scikits.crab.recommenders.knn.item_strategies import ItemsNeighborhoodStrategy
>>> from scikits.crab.metrics.pairwise import euclidean_distances
>>> movies = {'Marcel Caraciolo': {'Lady in the Water': 2.5, \
'Snakes on a Plane': 3.5, \
'Just My Luck': 3.0, 'Superman Returns': 3.5, 'You, Me and Dupree': 2.5, \
'The Night Listener': 3.0}, \
'Paola Pow': {'Lady in the Water': 3.0, 'Snakes on a Plane': 3.5, \
'Just My Luck': 1.5, 'Superman Returns': 5.0, 'The Night Listener': 3.0, \
'You, Me and Dupree': 3.5}, \
'Leopoldo Pires': {'Lady in the Water': 2.5, 'Snakes on a Plane': 3.0, \
'Superman Returns': 3.5, 'The Night Listener': 4.0}, \
'Lorena Abreu': {'Snakes on a Plane': 3.5, 'Just My Luck': 3.0, \
'The Night Listener': 4.5, 'Superman Returns': 4.0, \
'You, Me and Dupree': 2.5}, \
'Steve Gates': {'Lady in the Water': 3.0, 'Snakes on a Plane': 4.0, \
'Just My Luck': 2.0, 'Superman Returns': 3.0, 'The Night Listener': 3.0, \
'You, Me and Dupree': 2.0}, \
'Sheldom': {'Lady in the Water': 3.0, 'Snakes on a Plane': 4.0, \
'The Night Listener': 3.0, 'Superman Returns': 5.0, \
'You, Me and Dupree': 3.5}, \
'Penny Frewman': {'Snakes on a Plane':4.5,'You, Me and Dupree':1.0, \
'Superman Returns':4.0}, \
'Maria Gabriela': {}}
>>> model = MatrixPreferenceDataModel(movies)
>>> items_strategy = ItemsNeighborhoodStrategy()
>>> similarity = ItemSimilarity(model, euclidean_distances)
>>> recsys = ItemBasedRecommender(model, similarity, items_strategy)
>>> #Return the recommendations for the given user.
>>> recsys.recommend('Leopoldo Pires')
['Just My Luck', 'You, Me and Dupree']
>>> #Return the 2 explanations for the given recommendation.
>>> recsys.recommended_because('Leopoldo Pires', 'Just My Luck',2)
['The Night Listener', 'Superman Returns']
Notes
-----------
This ItemBasedRecommender does not yet provide
suppot for rescorer functions.
References
-----------
Item-based collaborative filtering recommendation algorithms by Sarwar
http://portal.acm.org/citation.cfm?id=372071
"""
def __init__(self, model, similarity, items_selection_strategy=None,
capper=True, with_preference=False):
ItemRecommender.__init__(self, model, with_preference)
self.similarity = similarity
self.capper = capper
if items_selection_strategy is None:
self.items_selection_strategy = ItemsNeighborhoodStrategy()
else:
self.items_selection_strategy = items_selection_strategy
def recommend(self, user_id, how_many=None, **params):
'''
Return a list of recommended items, ordered from most strongly
recommend to least.
Parameters
----------
user_id: int or string
User for which recommendations are to be computed.
how_many: int
Desired number of recommendations (default=None ALL)
'''
self._set_params(**params)
candidate_items = self.all_other_items(user_id)
recommendable_items = self._top_matches(user_id, \
candidate_items, how_many)
return recommendable_items
def estimate_preference(self, user_id, item_id, **params):
'''
Parameters
----------
user_id: int or string
User for which recommendations are to be computed.
item_id: int or string
ID of item for which wants to find the estimated preference.
Returns
-------
Return an estimated preference if the user has not expressed a
preference for the item, or else the user's actual preference for the
item. If a preference cannot be estimated, returns None.
'''
preference = self.model.preference_value(user_id, item_id)
if not np.isnan(preference):
return preference
#TODO: It needs optimization
prefs = self.model.preferences_from_user(user_id)
if not self.model.has_preference_values():
prefs = [(pref, 1.0) for pref in prefs]
similarities = \
np.array([self.similarity.get_similarity(item_id, to_item_id) \
for to_item_id, pref in prefs if to_item_id != item_id]).flatten()
prefs = np.array([pref for it, pref in prefs])
prefs_sim = np.sum(prefs[~np.isnan(similarities)] *
similarities[~np.isnan(similarities)])
total_similarity = np.sum(similarities)
#Throw out the estimate if it was based on no data points,
#of course, but also if based on
#just one. This is a bit of a band-aid on the 'stock'
#item-based algorithm for the moment.
#The reason is that in this case the estimate is, simply,
#the user's rating for one item
#that happened to have a defined similarity.
#The similarity score doesn't matter, and that
#seems like a bad situation.
if total_similarity == 0.0 or \
not similarities[~np.isnan(similarities)].size:
return np.nan
estimated = prefs_sim / total_similarity
if self.capper:
max_p = self.model.maximum_preference_value()
min_p = self.model.minimum_preference_value()
estimated = max_p if estimated > max_p else min_p \
if estimated < min_p else estimated
return estimated
def all_other_items(self, user_id, **params):
'''
Parameters
----------
user_id: int or string
User for which recommendations are to be computed.
Returns
---------
Return items in the `model` for which the user has not expressed
the preference and could possibly be recommended to the user.
'''
return self.items_selection_strategy.candidate_items(user_id, \
self.model)
def _top_matches(self, source_id, target_ids, how_many=None, **params):
'''
Parameters
----------
target_ids: array of shape [n_target_ids]
source_id: int or string
item id to compare against.
how_many: int
Desired number of most top items to recommend (default=None ALL)
Returns
--------
Return the top N matches
It can be user_ids or item_ids.
'''
#Empty target_ids
if target_ids.size == 0:
return np.array([])
estimate_preferences = np.vectorize(self.estimate_preference)
preferences = estimate_preferences(source_id, target_ids)
preference_values = preferences[~np.isnan(preferences)]
target_ids = target_ids[~np.isnan(preferences)]
sorted_preferences = np.lexsort((preference_values,))[::-1]
sorted_preferences = sorted_preferences[0:how_many] \
if how_many and sorted_preferences.size > how_many \
else sorted_preferences
if self.with_preference:
top_n_recs = [(target_ids[ind], \
preferences[ind]) for ind in sorted_preferences]
else:
top_n_recs = [target_ids[ind]
for ind in sorted_preferences]
return top_n_recs
def most_similar_items(self, item_id, how_many=None):
'''
Return the most similar items to the given item, ordered
from most similar to least.
Parameters
-----------
item_id: int or string
ID of item for which to find most similar other items
how_many: int
Desired number of most similar items to find (default=None ALL)
'''
old_how_many = self.similarity.num_best
#+1 since it returns the identity.
self.similarity.num_best = how_many + 1 \
if how_many is not None else None
similarities = self.similarity[item_id]
self.similarity.num_best = old_how_many
return np.array([item for item, pref in similarities \
if item != item_id and not np.isnan(pref)])
def recommended_because(self, user_id, item_id, how_many=None, **params):
'''
Returns the items that were most influential in recommending a
given item to a given user. In most implementations, this
method will return items that the user prefers and that
are similar to the given item.
Parameters
-----------
user_id : int or string
ID of the user who was recommended the item
item_id: int or string
ID of item that was recommended
how_many: int
Maximum number of items to return (default=None ALL)
Returns
----------
The list of items ordered from most influential in
recommended the given item to least
'''
preferences = self.model.preferences_from_user(user_id)
if self.model.has_preference_values():
similarities = \
np.array([self.similarity.get_similarity(item_id, to_item_id) \
for to_item_id, pref in preferences
if to_item_id != item_id]).flatten()
prefs = np.array([pref for it, pref in preferences])
item_ids = np.array([it for it, pref in preferences])
else:
similarities = \
np.array([self.similarity.get_similarity(item_id, to_item_id) \
for to_item_id in preferences
if to_item_id != item_id]).flatten()
prefs = np.array([1.0 for it in preferences])
item_ids = np.array(preferences)
scores = prefs[~np.isnan(similarities)] * \
(1.0 + similarities[~np.isnan(similarities)])
sorted_preferences = np.lexsort((scores,))[::-1]
sorted_preferences = sorted_preferences[0:how_many] \
if how_many and sorted_preferences.size > how_many \
else sorted_preferences
if self.with_preference:
top_n_recs = [(item_ids[ind], \
prefs[ind]) for ind in sorted_preferences]
else:
top_n_recs = [item_ids[ind]
for ind in sorted_preferences]
return top_n_recs
#=====================
#User Based Recommender
class UserBasedRecommender(UserRecommender):
"""
User Based Collaborative Filtering Recommender.
Parameters
-----------
data_model: The data model instance that will be data source
for the recommender.
similarity: The User Similarity instance that will be used to
score the users that are the most similar to the user.
neighborhood_strategy: The user neighborhood strategy that you
can choose for selecting the most similar users to find
the items to recommend.
default = NearestNeighborsStrategy
capper: bool (default=True)
Cap the preferences with maximum and minimum preferences
in the model.
with_preference: bool (default=False)
Return the recommendations with the estimated preferences if True.
Attributes
-----------
`model`: The data model instance that will be data source
for the recommender.
`similarity`: The User Similarity instance that will be used to
score the users that are the most similar to the user.
`neighborhood_strategy`: The user neighborhood strategy that you
can choose for selecting the most similar users to find
the items to recommend.
default = NearestNeighborsStrategy
`capper`: bool (default=True)
Cap the preferences with maximum and minimum preferences
in the model.
`with_preference`: bool (default=False)
Return the recommendations with the estimated preferences if True.
Examples
-----------
>>> from scikits.crab.models.classes import MatrixPreferenceDataModel
>>> from scikits.crab.recommenders.knn.classes import UserBasedRecommender
>>> from scikits.crab.similarities.basic_similarities import UserSimilarity
>>> from scikits.crab.recommenders.knn.neighborhood_strategies import NearestNeighborsStrategy
>>> from scikits.crab.metrics.pairwise import euclidean_distances
>>> movies = {'Marcel Caraciolo': {'Lady in the Water': 2.5, \
'Snakes on a Plane': 3.5, \
'Just My Luck': 3.0, 'Superman Returns': 3.5, 'You, Me and Dupree': 2.5, \
'The Night Listener': 3.0}, \
'Paola Pow': {'Lady in the Water': 3.0, 'Snakes on a Plane': 3.5, \
'Just My Luck': 1.5, 'Superman Returns': 5.0, 'The Night Listener': 3.0, \
'You, Me and Dupree': 3.5}, \
'Leopoldo Pires': {'Lady in the Water': 2.5, 'Snakes on a Plane': 3.0, \
'Superman Returns': 3.5, 'The Night Listener': 4.0}, \
'Lorena Abreu': {'Snakes on a Plane': 3.5, 'Just My Luck': 3.0, \
'The Night Listener': 4.5, 'Superman Returns': 4.0, \
'You, Me and Dupree': 2.5}, \
'Steve Gates': {'Lady in the Water': 3.0, 'Snakes on a Plane': 4.0, \
'Just My Luck': 2.0, 'Superman Returns': 3.0, 'The Night Listener': 3.0, \
'You, Me and Dupree': 2.0}, \
'Sheldom': {'Lady in the Water': 3.0, 'Snakes on a Plane': 4.0, \
'The Night Listener': 3.0, 'Superman Returns': 5.0, \
'You, Me and Dupree': 3.5}, \
'Penny Frewman': {'Snakes on a Plane':4.5,'You, Me and Dupree':1.0, \
'Superman Returns':4.0}, \
'Maria Gabriela': {}}
>>> model = MatrixPreferenceDataModel(movies)
>>> nhood_strategy = NearestNeighborsStrategy()
>>> similarity = UserSimilarity(model, euclidean_distances)
>>> recsys = UserBasedRecommender(model, similarity, nhood_strategy)
>>> #Return the recommendations for the given user.
>>> recsys.recommend('Leopoldo Pires')
['Just My Luck', 'You, Me and Dupree']
>>> #Return the 2 explanations for the given recommendation.
>>> recsys.recommended_because('Leopoldo Pires', 'Just My Luck',2)
['Lorena Abreu', 'Marcel Caraciolo']
Notes
-----------
This UserBasedRecommender does not yet provide
suppot for rescorer functions.
References
-----------
User-based collaborative filtering recommendation algorithms by
"""
def __init__(self, model, similarity, neighborhood_strategy=None,
capper=True, with_preference=False):
UserRecommender.__init__(self, model, with_preference)
self.similarity = similarity
self.capper = capper
if neighborhood_strategy is None:
self.neighborhood_strategy = NearestNeighborsStrategy()
else:
self.neighborhood_strategy = neighborhood_strategy
def all_other_items(self, user_id, **params):
'''
Parameters
----------
user_id: int or string
User for which recommendations are to be computed. (default= 'user_similarity')
Optional Parameters
--------------------
n_similarity: string
The similarity used in the neighborhood strategy
distance: the metrics.pairwise function to set.
The pairwise function to compute the similarity (default = euclidean_distances)
nhood_size: int
The neighborhood size (default=None ALL)
minimal_similarity: float
minimal similarity required for neighbors (default = 0.0)
sampling_rate: int
percentage of users to consider when building neighborhood
(default = 1)
Returns
---------
Return items in the `model` for which the user has not expressed
the preference and could possibly be recommended to the user.
'''
n_similarity = params.pop('n_similarity', 'user_similarity')
distance = params.pop('distance', self.similarity.distance)
nhood_size = params.pop('nhood_size', None)
nearest_neighbors = self.neighborhood_strategy.user_neighborhood(user_id,
self.model, n_similarity, distance, nhood_size, **params)
items_from_user_id = self.model.items_from_user(user_id)
possible_items = []
for to_user_id in nearest_neighbors:
possible_items.extend(self.model.items_from_user(to_user_id))
possible_items = np.unique(np.array(possible_items).flatten())
return np.setdiff1d(possible_items, items_from_user_id)
def estimate_preference(self, user_id, item_id, **params):
'''
Parameters
----------
user_id: int or string
User for which recommendations are to be computed.
item_id: int or string
ID of item for which wants to find the estimated preference.
Returns
-------
Return an estimated preference if the user has not expressed a
preference for the item, or else the user's actual preference for the
item. If a preference cannot be estimated, returns None.
'''
preference = self.model.preference_value(user_id, item_id)
if not np.isnan(preference):
return preference
n_similarity = params.pop('n_similarity', 'user_similarity')
distance = params.pop('distance', self.similarity.distance)
nhood_size = params.pop('nhood_size', None)
nearest_neighbors = self.neighborhood_strategy.user_neighborhood(user_id,
self.model, n_similarity, distance, nhood_size, **params)
preference = 0.0
total_similarity = 0.0
similarities = np.array([self.similarity.get_similarity(user_id, to_user_id)
for to_user_id in nearest_neighbors]).flatten()
prefs = np.array([self.model.preference_value(to_user_id, item_id)
for to_user_id in nearest_neighbors])
# prefs = prefs[~np.isnan(prefs)]
# similarities = similarities[~np.isnan(prefs)]
prefs_sim = np.sum(prefs[~np.isnan(similarities)] *
similarities[~np.isnan(similarities)])
total_similarity = np.sum(similarities)
#Throw out the estimate if it was based on no data points,
#of course, but also if based on just one. This is a bit
#of a band-aid on the 'stock' item-based algorithm for
#the moment. The reason is that in this case the estimate
#is, simply, the user's rating for one item that happened
#to have a defined similarity. The similarity score doesn't
#matter, and that seems like a bad situation.
if total_similarity == 0.0 or \
not similarities[~np.isnan(similarities)].size:
return np.nan
estimated = prefs_sim / total_similarity
if self.capper:
max_p = self.model.maximum_preference_value()
min_p = self.model.minimum_preference_value()
estimated = max_p if estimated > max_p else min_p \
if estimated < min_p else estimated
return estimated
def most_similar_users(self, user_id, how_many=None):
'''
Return the most similar users to the given user, ordered
from most similar to least.
Parameters
-----------
user_id: int or string
ID of user for which to find most similar other users
how_many: int
Desired number of most similar users to find (default=None ALL)
'''
old_how_many = self.similarity.num_best
#+1 since it returns the identity.
self.similarity.num_best = how_many + 1 \
if how_many is not None else None
similarities = self.similarity[user_id]
self.similarity.num_best = old_how_many
return np.array([to_user_id for to_user_id, pref in similarities \
if user_id != to_user_id and not np.isnan(pref)])
def recommend(self, user_id, how_many=None, **params):
'''
Return a list of recommended items, ordered from most strongly
recommend to least.
Parameters
----------
user_id: int or string
User for which recommendations are to be computed.
how_many: int
Desired number of recommendations (default=None ALL)
'''
self.set_params(**params)
candidate_items = self.all_other_items(user_id, **params)
recommendable_items = self._top_matches(user_id, \
candidate_items, how_many)
return recommendable_items
def _top_matches(self, source_id, target_ids, how_many=None, **params):
'''
Parameters
----------
target_ids: array of shape [n_target_ids]
source_id: int or string
item id to compare against.
how_many: int
Desired number of most top items to recommend (default=None ALL)
Returns
--------
Return the top N matches
It can be user_ids or item_ids.
'''
#Empty target_ids
if target_ids.size == 0:
return np.array([])
estimate_preferences = np.vectorize(self.estimate_preference)
preferences = estimate_preferences(source_id, target_ids)
preference_values = preferences[~np.isnan(preferences)]
target_ids = target_ids[~np.isnan(preferences)]
sorted_preferences = np.lexsort((preference_values,))[::-1]
sorted_preferences = sorted_preferences[0:how_many] \
if how_many and sorted_preferences.size > how_many \
else sorted_preferences
if self.with_preference:
top_n_recs = [(target_ids[ind], \
preferences[ind]) for ind in sorted_preferences]
else:
top_n_recs = [target_ids[ind]
for ind in sorted_preferences]
return top_n_recs
def recommended_because(self, user_id, item_id, how_many=None, **params):
'''
Returns the users that were most influential in recommending a
given item to a given user. In most implementations, this
method will return users that prefers the recommended item and that
are similar to the given user.
Parameters
-----------
user_id : int or string
ID of the user who was recommended the item
item_id: int or string
ID of item that was recommended
how_many: int
Maximum number of items to return (default=None ALL)
Returns
----------
The list of items ordered from most influential in
recommended the given item to least
'''
preferences = self.model.preferences_for_item(item_id)
if self.model.has_preference_values():
similarities = \
np.array([self.similarity.get_similarity(user_id, to_user_id) \
for to_user_id, pref in preferences
if to_user_id != user_id]).flatten()
prefs = np.array([pref for it, pref in preferences])
user_ids = np.array([usr for usr, pref in preferences])
else:
similarities = \
np.array([self.similarity.get_similarity(user_id, to_user_id) \
for to_user_id in preferences
if to_user_id != user_id]).flatten()
prefs = np.array([1.0 for it in preferences])
user_ids = np.array(preferences)
scores = prefs[~np.isnan(similarities)] * \
(1.0 + similarities[~np.isnan(similarities)])
sorted_preferences = np.lexsort((scores,))[::-1]
sorted_preferences = sorted_preferences[0:how_many] \
if how_many and sorted_preferences.size > how_many \
else sorted_preferences
if self.with_preference:
top_n_recs = [(user_ids[ind], \
prefs[ind]) for ind in sorted_preferences]
else:
top_n_recs = [user_ids[ind]
for ind in sorted_preferences]
return top_n_recs
| 37.316901 | 98 | 0.627062 |
from sklearn.base import BaseEstimator
from .base import ItemRecommender, UserRecommender
from .item_strategies import ItemsNeighborhoodStrategy
from .neighborhood_strategies import NearestNeighborsStrategy
import numpy as np
class ItemBasedRecommender(ItemRecommender):
def __init__(self, model, similarity, items_selection_strategy=None,
capper=True, with_preference=False):
ItemRecommender.__init__(self, model, with_preference)
self.similarity = similarity
self.capper = capper
if items_selection_strategy is None:
self.items_selection_strategy = ItemsNeighborhoodStrategy()
else:
self.items_selection_strategy = items_selection_strategy
def recommend(self, user_id, how_many=None, **params):
self._set_params(**params)
candidate_items = self.all_other_items(user_id)
recommendable_items = self._top_matches(user_id, \
candidate_items, how_many)
return recommendable_items
def estimate_preference(self, user_id, item_id, **params):
preference = self.model.preference_value(user_id, item_id)
if not np.isnan(preference):
return preference
prefs = self.model.preferences_from_user(user_id)
if not self.model.has_preference_values():
prefs = [(pref, 1.0) for pref in prefs]
similarities = \
np.array([self.similarity.get_similarity(item_id, to_item_id) \
for to_item_id, pref in prefs if to_item_id != item_id]).flatten()
prefs = np.array([pref for it, pref in prefs])
prefs_sim = np.sum(prefs[~np.isnan(similarities)] *
similarities[~np.isnan(similarities)])
total_similarity = np.sum(similarities)
#that happened to have a defined similarity.
#The similarity score doesn't matter, and that
if total_similarity == 0.0 or \
not similarities[~np.isnan(similarities)].size:
return np.nan
estimated = prefs_sim / total_similarity
if self.capper:
max_p = self.model.maximum_preference_value()
min_p = self.model.minimum_preference_value()
estimated = max_p if estimated > max_p else min_p \
if estimated < min_p else estimated
return estimated
def all_other_items(self, user_id, **params):
return self.items_selection_strategy.candidate_items(user_id, \
self.model)
def _top_matches(self, source_id, target_ids, how_many=None, **params):
if target_ids.size == 0:
return np.array([])
estimate_preferences = np.vectorize(self.estimate_preference)
preferences = estimate_preferences(source_id, target_ids)
preference_values = preferences[~np.isnan(preferences)]
target_ids = target_ids[~np.isnan(preferences)]
sorted_preferences = np.lexsort((preference_values,))[::-1]
sorted_preferences = sorted_preferences[0:how_many] \
if how_many and sorted_preferences.size > how_many \
else sorted_preferences
if self.with_preference:
top_n_recs = [(target_ids[ind], \
preferences[ind]) for ind in sorted_preferences]
else:
top_n_recs = [target_ids[ind]
for ind in sorted_preferences]
return top_n_recs
def most_similar_items(self, item_id, how_many=None):
old_how_many = self.similarity.num_best
self.similarity.num_best = how_many + 1 \
if how_many is not None else None
similarities = self.similarity[item_id]
self.similarity.num_best = old_how_many
return np.array([item for item, pref in similarities \
if item != item_id and not np.isnan(pref)])
def recommended_because(self, user_id, item_id, how_many=None, **params):
preferences = self.model.preferences_from_user(user_id)
if self.model.has_preference_values():
similarities = \
np.array([self.similarity.get_similarity(item_id, to_item_id) \
for to_item_id, pref in preferences
if to_item_id != item_id]).flatten()
prefs = np.array([pref for it, pref in preferences])
item_ids = np.array([it for it, pref in preferences])
else:
similarities = \
np.array([self.similarity.get_similarity(item_id, to_item_id) \
for to_item_id in preferences
if to_item_id != item_id]).flatten()
prefs = np.array([1.0 for it in preferences])
item_ids = np.array(preferences)
scores = prefs[~np.isnan(similarities)] * \
(1.0 + similarities[~np.isnan(similarities)])
sorted_preferences = np.lexsort((scores,))[::-1]
sorted_preferences = sorted_preferences[0:how_many] \
if how_many and sorted_preferences.size > how_many \
else sorted_preferences
if self.with_preference:
top_n_recs = [(item_ids[ind], \
prefs[ind]) for ind in sorted_preferences]
else:
top_n_recs = [item_ids[ind]
for ind in sorted_preferences]
return top_n_recs
class UserBasedRecommender(UserRecommender):
def __init__(self, model, similarity, neighborhood_strategy=None,
capper=True, with_preference=False):
UserRecommender.__init__(self, model, with_preference)
self.similarity = similarity
self.capper = capper
if neighborhood_strategy is None:
self.neighborhood_strategy = NearestNeighborsStrategy()
else:
self.neighborhood_strategy = neighborhood_strategy
def all_other_items(self, user_id, **params):
n_similarity = params.pop('n_similarity', 'user_similarity')
distance = params.pop('distance', self.similarity.distance)
nhood_size = params.pop('nhood_size', None)
nearest_neighbors = self.neighborhood_strategy.user_neighborhood(user_id,
self.model, n_similarity, distance, nhood_size, **params)
items_from_user_id = self.model.items_from_user(user_id)
possible_items = []
for to_user_id in nearest_neighbors:
possible_items.extend(self.model.items_from_user(to_user_id))
possible_items = np.unique(np.array(possible_items).flatten())
return np.setdiff1d(possible_items, items_from_user_id)
def estimate_preference(self, user_id, item_id, **params):
preference = self.model.preference_value(user_id, item_id)
if not np.isnan(preference):
return preference
n_similarity = params.pop('n_similarity', 'user_similarity')
distance = params.pop('distance', self.similarity.distance)
nhood_size = params.pop('nhood_size', None)
nearest_neighbors = self.neighborhood_strategy.user_neighborhood(user_id,
self.model, n_similarity, distance, nhood_size, **params)
preference = 0.0
total_similarity = 0.0
similarities = np.array([self.similarity.get_similarity(user_id, to_user_id)
for to_user_id in nearest_neighbors]).flatten()
prefs = np.array([self.model.preference_value(to_user_id, item_id)
for to_user_id in nearest_neighbors])
prefs_sim = np.sum(prefs[~np.isnan(similarities)] *
similarities[~np.isnan(similarities)])
total_similarity = np.sum(similarities)
#to have a defined similarity. The similarity score doesn't
if total_similarity == 0.0 or \
not similarities[~np.isnan(similarities)].size:
return np.nan
estimated = prefs_sim / total_similarity
if self.capper:
max_p = self.model.maximum_preference_value()
min_p = self.model.minimum_preference_value()
estimated = max_p if estimated > max_p else min_p \
if estimated < min_p else estimated
return estimated
def most_similar_users(self, user_id, how_many=None):
old_how_many = self.similarity.num_best
self.similarity.num_best = how_many + 1 \
if how_many is not None else None
similarities = self.similarity[user_id]
self.similarity.num_best = old_how_many
return np.array([to_user_id for to_user_id, pref in similarities \
if user_id != to_user_id and not np.isnan(pref)])
def recommend(self, user_id, how_many=None, **params):
self.set_params(**params)
candidate_items = self.all_other_items(user_id, **params)
recommendable_items = self._top_matches(user_id, \
candidate_items, how_many)
return recommendable_items
def _top_matches(self, source_id, target_ids, how_many=None, **params):
if target_ids.size == 0:
return np.array([])
estimate_preferences = np.vectorize(self.estimate_preference)
preferences = estimate_preferences(source_id, target_ids)
preference_values = preferences[~np.isnan(preferences)]
target_ids = target_ids[~np.isnan(preferences)]
sorted_preferences = np.lexsort((preference_values,))[::-1]
sorted_preferences = sorted_preferences[0:how_many] \
if how_many and sorted_preferences.size > how_many \
else sorted_preferences
if self.with_preference:
top_n_recs = [(target_ids[ind], \
preferences[ind]) for ind in sorted_preferences]
else:
top_n_recs = [target_ids[ind]
for ind in sorted_preferences]
return top_n_recs
def recommended_because(self, user_id, item_id, how_many=None, **params):
preferences = self.model.preferences_for_item(item_id)
if self.model.has_preference_values():
similarities = \
np.array([self.similarity.get_similarity(user_id, to_user_id) \
for to_user_id, pref in preferences
if to_user_id != user_id]).flatten()
prefs = np.array([pref for it, pref in preferences])
user_ids = np.array([usr for usr, pref in preferences])
else:
similarities = \
np.array([self.similarity.get_similarity(user_id, to_user_id) \
for to_user_id in preferences
if to_user_id != user_id]).flatten()
prefs = np.array([1.0 for it in preferences])
user_ids = np.array(preferences)
scores = prefs[~np.isnan(similarities)] * \
(1.0 + similarities[~np.isnan(similarities)])
sorted_preferences = np.lexsort((scores,))[::-1]
sorted_preferences = sorted_preferences[0:how_many] \
if how_many and sorted_preferences.size > how_many \
else sorted_preferences
if self.with_preference:
top_n_recs = [(user_ids[ind], \
prefs[ind]) for ind in sorted_preferences]
else:
top_n_recs = [user_ids[ind]
for ind in sorted_preferences]
return top_n_recs
| true | true |
f72fbfc6053fee1b605915399588d9a35599ebe1 | 13,242 | py | Python | care/utils/tests/test_base.py | agzuniverse/care | 952babf5b394921fcdb4fd4b1405cb571261f322 | [
"MIT"
] | null | null | null | care/utils/tests/test_base.py | agzuniverse/care | 952babf5b394921fcdb4fd4b1405cb571261f322 | [
"MIT"
] | null | null | null | care/utils/tests/test_base.py | agzuniverse/care | 952babf5b394921fcdb4fd4b1405cb571261f322 | [
"MIT"
] | null | null | null | import abc
import datetime
from collections import OrderedDict
from typing import Any, Dict
import dateparser
from django.contrib.gis.geos import Point
from pytz import unicode
from rest_framework import status
from rest_framework.test import APITestCase
from care.facility.models import (
CATEGORY_CHOICES,
DISEASE_CHOICES_MAP,
SYMPTOM_CHOICES,
Disease,
DiseaseStatusEnum,
Facility,
LocalBody,
PatientConsultation,
PatientRegistration,
User,
)
from care.users.models import District, State
from config.tests.helper import EverythingEquals, mock_equal
class TestBase(APITestCase):
"""
Base class for tests, handles most of the test setup and tools for setting up data
"""
maxDiff = None
@classmethod
def create_user(cls, district: District, username: str = "user", **kwargs):
data = {
"email": f"{username}@somedomain.com",
"phone_number": "5554446667",
"age": 30,
"gender": 2,
"verified": True,
"username": username,
"password": "bar",
"district": district,
"user_type": User.TYPE_VALUE_MAP["Staff"],
}
data.update(kwargs)
return User.objects.create_user(**data)
@classmethod
def create_super_user(cls, district: District, username: str = "superuser"):
user = cls.create_user(district=district, username=username, user_type=User.TYPE_VALUE_MAP["DistrictAdmin"],)
user.is_superuser = True
user.save()
return user
@classmethod
def create_district(cls, state: State):
return District.objects.create(state=state, name=f"District{datetime.datetime.now().timestamp()}")
@classmethod
def create_state(cls):
return State.objects.create(name=f"State{datetime.datetime.now().timestamp()}")
@classmethod
def create_facility(cls, district: District, user: User = None, **kwargs):
user = user or cls.user
data = {
"name": "Foo",
"district": district,
"facility_type": 1,
"address": "8/88, 1st Cross, 1st Main, Boo Layout",
"location": Point(24.452545, 49.878248),
"oxygen_capacity": 10,
"phone_number": "9998887776",
"created_by": user,
}
data.update(kwargs)
f = Facility(**data)
f.save()
return f
@classmethod
def create_patient(cls, **kwargs):
patient_data = cls.get_patient_data().copy()
patient_data.update(kwargs)
medical_history = patient_data.pop("medical_history", [])
district_id = patient_data.pop("district", None)
state_id = patient_data.pop("state", None)
patient_data.update(
{
"district_id": district_id,
"state_id": state_id,
"disease_status": getattr(DiseaseStatusEnum, patient_data["disease_status"]).value,
}
)
patient = PatientRegistration.objects.create(**patient_data)
diseases = [
Disease.objects.create(patient=patient, disease=DISEASE_CHOICES_MAP[mh["disease"]], details=mh["details"])
for mh in medical_history
]
patient.medical_history.set(diseases)
return patient
@classmethod
def get_user_data(cls, district: District = None, user_type: str = None):
"""
Returns the data to be used for API testing
Returns:
dict
Params:
district: District
user_type: str(A valid mapping for the integer types mentioned inside the models)
"""
district = district or cls.district
user_type = user_type or User.TYPE_VALUE_MAP["Staff"]
return {
"user_type": user_type,
"district": district,
"state": district.state,
"phone_number": "8887776665",
"gender": 2,
"age": 30,
"email": "foo@foobar.com",
"username": "user",
"password": "bar",
}
@classmethod
def get_facility_data(cls, district):
"""
Returns the data to be used for API testing
Returns:
dict
Params:
district: int
An id for the instance of District object created
user_type: str
A valid mapping for the integer types mentioned inside the models
"""
return {
"name": "Foo",
"district": (district or cls.district).id,
"facility_type": 1,
"address": f"Address {datetime.datetime.now().timestamp}",
"location": {"latitude": 49.878248, "longitude": 24.452545},
"oxygen_capacity": 10,
"phone_number": "9998887776",
"capacity": [],
}
@classmethod
def get_patient_data(cls, district=None, state=None):
return {
"name": "Foo",
"age": 32,
"date_of_birth": datetime.date(1992, 4, 1),
"gender": 2,
"is_medical_worker": True,
"blood_group": "O+",
"ongoing_medication": "",
"date_of_return": datetime.datetime(2020, 4, 1, 15, 30, 00),
"disease_status": "SUSPECTED",
"phone_number": "+918888888888",
"address": "Global citizen",
"contact_with_confirmed_carrier": True,
"contact_with_suspected_carrier": True,
"estimated_contact_date": None,
"past_travel": False,
"countries_travelled": "",
"present_health": "Fine",
"has_SARI": False,
"is_active": True,
"state": (state or cls.state).id,
"district": (district or cls.district).id,
"local_body": None,
"number_of_aged_dependents": 2,
"number_of_chronic_diseased_dependents": 1,
"medical_history": [{"disease": "Diabetes", "details": "150 count"}],
"date_of_receipt_of_information": datetime.datetime(2020, 4, 1, 15, 30, 00),
}
@classmethod
def setUpClass(cls) -> None:
super(TestBase, cls).setUpClass()
cls.state = cls.create_state()
cls.district = cls.create_district(cls.state)
cls.user_type = User.TYPE_VALUE_MAP["Staff"]
cls.user = cls.create_user(cls.district)
cls.super_user = cls.create_super_user(district=cls.district)
cls.facility = cls.create_facility(cls.district)
cls.patient = cls.create_patient()
cls.user_data = cls.get_user_data(cls.district, cls.user_type)
cls.facility_data = cls.get_facility_data(cls.district)
cls.patient_data = cls.get_patient_data(cls.district)
def setUp(self) -> None:
self.client.force_login(self.user)
@abc.abstractmethod
def get_base_url(self):
"""
Should return the base url of the testing viewset
WITHOUT trailing slash
eg: return "api/v1/facility"
:return: str
"""
raise NotImplementedError()
def get_url(self, entry_id=None, action=None, *args, **kwargs):
url = self.get_base_url(*args, **kwargs)
if entry_id is not None:
url = f"{url}/{entry_id}"
if action is not None:
url = f"{url}/{action}"
return f"{url}/"
@classmethod
def clone_object(cls, obj, save=True):
new_obj = obj._meta.model.objects.get(pk=obj.id)
new_obj.pk = None
new_obj.id = None
if save:
new_obj.save()
return new_obj
@abc.abstractmethod
def get_list_representation(self, obj) -> dict:
"""
Returns the dict representation of the obj in list API
:param obj: Object to be represented
:return: dict
"""
raise NotImplementedError()
@abc.abstractmethod
def get_detail_representation(self, obj=None) -> dict:
"""
Returns the dict representation of the obj in detail/retrieve API
:param obj: Object to be represented
:param data: data
:return: dict
"""
raise NotImplementedError()
def get_local_body_district_state_representation(self, obj):
"""
Returns the local body, district and state representation for the obj.
The obj is expected to have `local_body`, `district` and `state` in it's attributes
Eg: Facility, Patient, User
:param obj: Any object which has `local_body`, `district` and `state` in attrs
:return:
"""
response = {}
response.update(self.get_local_body_representation(getattr(obj, "local_body", None)))
response.update(self.get_district_representation(getattr(obj, "district", None)))
response.update(self.get_state_representation(getattr(obj, "state", None)))
return response
def get_local_body_representation(self, local_body: LocalBody):
if local_body is None:
return {"local_body": None, "local_body_object": None}
else:
return {
"local_body": local_body.id,
"local_body_object": {
"id": local_body.id,
"name": local_body.name,
"district": local_body.district.id,
},
}
def get_district_representation(self, district: District):
if district is None:
return {"district": None, "district_object": None}
return {
"district": district.id,
"district_object": {"id": district.id, "name": district.name, "state": district.state.id,},
}
def get_state_representation(self, state: State):
if state is None:
return {"state": None, "state_object": None}
return {"state": state.id, "state_object": {"id": state.id, "name": state.name}}
def assertDictEqual(self, first: Dict[Any, Any], second: Dict[Any, Any], msg: Any = ...) -> None:
first_dict = self._convert_to_matchable_types(first.copy())
second_dict = self._convert_to_matchable_types(second.copy())
return super(TestBase, self).assertDictEqual(first_dict, second_dict, msg)
def _convert_to_matchable_types(self, d):
def dict_to_matching_type(d: dict):
return {k: to_matching_type(k, v) for k, v in d.items()}
def to_matching_type(name: str, value):
if isinstance(value, (OrderedDict, dict)):
return dict_to_matching_type(dict(value))
elif isinstance(value, list):
return [to_matching_type("", v) for v in value]
elif "date" in name and not isinstance(value, (type(None), EverythingEquals)):
return_value = value
if isinstance(value, (str, unicode,)):
return_value = dateparser.parse(value)
return (
return_value.astimezone(tz=datetime.timezone.utc)
if isinstance(return_value, datetime.datetime)
else return_value
)
return value
return dict_to_matching_type(d)
def execute_list(self, user=None):
user = user or self.user
self.client.force_authenticate(user)
response = self.client.get(self.get_url(), format="json")
self.assertEqual(response.status_code, status.HTTP_200_OK)
return response
def get_facility_representation(self, facility):
if facility is None:
return facility
else:
return {
"id": facility.id,
"name": facility.name,
"facility_type": {"id": facility.facility_type, "name": facility.get_facility_type_display()},
**self.get_local_body_district_state_representation(facility),
}
@classmethod
def get_consultation_data(cls):
return {
"patient": cls.patient,
"facility": cls.facility,
"symptoms": [SYMPTOM_CHOICES[0][0], SYMPTOM_CHOICES[1][0]],
"other_symptoms": "No other symptoms",
"symptoms_onset_date": datetime.datetime(2020, 4, 7, 15, 30),
"category": CATEGORY_CHOICES[0][0],
"examination_details": "examination_details",
"existing_medication": "existing_medication",
"prescribed_medication": "prescribed_medication",
"suggestion": PatientConsultation.SUGGESTION_CHOICES[0][0],
"referred_to": None,
"admitted": False,
"admitted_to": None,
"admission_date": None,
"discharge_date": None,
"created_date": mock_equal,
"modified_date": mock_equal,
}
@classmethod
def create_consultation(cls, patient=None, facility=None, **kwargs) -> PatientConsultation:
data = cls.get_consultation_data()
kwargs.update({"patient": patient or cls.patient, "facility": facility or cls.facility})
data.update(kwargs)
return PatientConsultation.objects.create(**data)
| 35.596774 | 118 | 0.589337 | import abc
import datetime
from collections import OrderedDict
from typing import Any, Dict
import dateparser
from django.contrib.gis.geos import Point
from pytz import unicode
from rest_framework import status
from rest_framework.test import APITestCase
from care.facility.models import (
CATEGORY_CHOICES,
DISEASE_CHOICES_MAP,
SYMPTOM_CHOICES,
Disease,
DiseaseStatusEnum,
Facility,
LocalBody,
PatientConsultation,
PatientRegistration,
User,
)
from care.users.models import District, State
from config.tests.helper import EverythingEquals, mock_equal
class TestBase(APITestCase):
maxDiff = None
@classmethod
def create_user(cls, district: District, username: str = "user", **kwargs):
data = {
"email": f"{username}@somedomain.com",
"phone_number": "5554446667",
"age": 30,
"gender": 2,
"verified": True,
"username": username,
"password": "bar",
"district": district,
"user_type": User.TYPE_VALUE_MAP["Staff"],
}
data.update(kwargs)
return User.objects.create_user(**data)
@classmethod
def create_super_user(cls, district: District, username: str = "superuser"):
user = cls.create_user(district=district, username=username, user_type=User.TYPE_VALUE_MAP["DistrictAdmin"],)
user.is_superuser = True
user.save()
return user
@classmethod
def create_district(cls, state: State):
return District.objects.create(state=state, name=f"District{datetime.datetime.now().timestamp()}")
@classmethod
def create_state(cls):
return State.objects.create(name=f"State{datetime.datetime.now().timestamp()}")
@classmethod
def create_facility(cls, district: District, user: User = None, **kwargs):
user = user or cls.user
data = {
"name": "Foo",
"district": district,
"facility_type": 1,
"address": "8/88, 1st Cross, 1st Main, Boo Layout",
"location": Point(24.452545, 49.878248),
"oxygen_capacity": 10,
"phone_number": "9998887776",
"created_by": user,
}
data.update(kwargs)
f = Facility(**data)
f.save()
return f
@classmethod
def create_patient(cls, **kwargs):
patient_data = cls.get_patient_data().copy()
patient_data.update(kwargs)
medical_history = patient_data.pop("medical_history", [])
district_id = patient_data.pop("district", None)
state_id = patient_data.pop("state", None)
patient_data.update(
{
"district_id": district_id,
"state_id": state_id,
"disease_status": getattr(DiseaseStatusEnum, patient_data["disease_status"]).value,
}
)
patient = PatientRegistration.objects.create(**patient_data)
diseases = [
Disease.objects.create(patient=patient, disease=DISEASE_CHOICES_MAP[mh["disease"]], details=mh["details"])
for mh in medical_history
]
patient.medical_history.set(diseases)
return patient
@classmethod
def get_user_data(cls, district: District = None, user_type: str = None):
district = district or cls.district
user_type = user_type or User.TYPE_VALUE_MAP["Staff"]
return {
"user_type": user_type,
"district": district,
"state": district.state,
"phone_number": "8887776665",
"gender": 2,
"age": 30,
"email": "foo@foobar.com",
"username": "user",
"password": "bar",
}
@classmethod
def get_facility_data(cls, district):
return {
"name": "Foo",
"district": (district or cls.district).id,
"facility_type": 1,
"address": f"Address {datetime.datetime.now().timestamp}",
"location": {"latitude": 49.878248, "longitude": 24.452545},
"oxygen_capacity": 10,
"phone_number": "9998887776",
"capacity": [],
}
@classmethod
def get_patient_data(cls, district=None, state=None):
return {
"name": "Foo",
"age": 32,
"date_of_birth": datetime.date(1992, 4, 1),
"gender": 2,
"is_medical_worker": True,
"blood_group": "O+",
"ongoing_medication": "",
"date_of_return": datetime.datetime(2020, 4, 1, 15, 30, 00),
"disease_status": "SUSPECTED",
"phone_number": "+918888888888",
"address": "Global citizen",
"contact_with_confirmed_carrier": True,
"contact_with_suspected_carrier": True,
"estimated_contact_date": None,
"past_travel": False,
"countries_travelled": "",
"present_health": "Fine",
"has_SARI": False,
"is_active": True,
"state": (state or cls.state).id,
"district": (district or cls.district).id,
"local_body": None,
"number_of_aged_dependents": 2,
"number_of_chronic_diseased_dependents": 1,
"medical_history": [{"disease": "Diabetes", "details": "150 count"}],
"date_of_receipt_of_information": datetime.datetime(2020, 4, 1, 15, 30, 00),
}
@classmethod
def setUpClass(cls) -> None:
super(TestBase, cls).setUpClass()
cls.state = cls.create_state()
cls.district = cls.create_district(cls.state)
cls.user_type = User.TYPE_VALUE_MAP["Staff"]
cls.user = cls.create_user(cls.district)
cls.super_user = cls.create_super_user(district=cls.district)
cls.facility = cls.create_facility(cls.district)
cls.patient = cls.create_patient()
cls.user_data = cls.get_user_data(cls.district, cls.user_type)
cls.facility_data = cls.get_facility_data(cls.district)
cls.patient_data = cls.get_patient_data(cls.district)
def setUp(self) -> None:
self.client.force_login(self.user)
@abc.abstractmethod
def get_base_url(self):
raise NotImplementedError()
def get_url(self, entry_id=None, action=None, *args, **kwargs):
url = self.get_base_url(*args, **kwargs)
if entry_id is not None:
url = f"{url}/{entry_id}"
if action is not None:
url = f"{url}/{action}"
return f"{url}/"
@classmethod
def clone_object(cls, obj, save=True):
new_obj = obj._meta.model.objects.get(pk=obj.id)
new_obj.pk = None
new_obj.id = None
if save:
new_obj.save()
return new_obj
@abc.abstractmethod
def get_list_representation(self, obj) -> dict:
raise NotImplementedError()
@abc.abstractmethod
def get_detail_representation(self, obj=None) -> dict:
raise NotImplementedError()
def get_local_body_district_state_representation(self, obj):
response = {}
response.update(self.get_local_body_representation(getattr(obj, "local_body", None)))
response.update(self.get_district_representation(getattr(obj, "district", None)))
response.update(self.get_state_representation(getattr(obj, "state", None)))
return response
def get_local_body_representation(self, local_body: LocalBody):
if local_body is None:
return {"local_body": None, "local_body_object": None}
else:
return {
"local_body": local_body.id,
"local_body_object": {
"id": local_body.id,
"name": local_body.name,
"district": local_body.district.id,
},
}
def get_district_representation(self, district: District):
if district is None:
return {"district": None, "district_object": None}
return {
"district": district.id,
"district_object": {"id": district.id, "name": district.name, "state": district.state.id,},
}
def get_state_representation(self, state: State):
if state is None:
return {"state": None, "state_object": None}
return {"state": state.id, "state_object": {"id": state.id, "name": state.name}}
def assertDictEqual(self, first: Dict[Any, Any], second: Dict[Any, Any], msg: Any = ...) -> None:
first_dict = self._convert_to_matchable_types(first.copy())
second_dict = self._convert_to_matchable_types(second.copy())
return super(TestBase, self).assertDictEqual(first_dict, second_dict, msg)
def _convert_to_matchable_types(self, d):
def dict_to_matching_type(d: dict):
return {k: to_matching_type(k, v) for k, v in d.items()}
def to_matching_type(name: str, value):
if isinstance(value, (OrderedDict, dict)):
return dict_to_matching_type(dict(value))
elif isinstance(value, list):
return [to_matching_type("", v) for v in value]
elif "date" in name and not isinstance(value, (type(None), EverythingEquals)):
return_value = value
if isinstance(value, (str, unicode,)):
return_value = dateparser.parse(value)
return (
return_value.astimezone(tz=datetime.timezone.utc)
if isinstance(return_value, datetime.datetime)
else return_value
)
return value
return dict_to_matching_type(d)
def execute_list(self, user=None):
user = user or self.user
self.client.force_authenticate(user)
response = self.client.get(self.get_url(), format="json")
self.assertEqual(response.status_code, status.HTTP_200_OK)
return response
def get_facility_representation(self, facility):
if facility is None:
return facility
else:
return {
"id": facility.id,
"name": facility.name,
"facility_type": {"id": facility.facility_type, "name": facility.get_facility_type_display()},
**self.get_local_body_district_state_representation(facility),
}
@classmethod
def get_consultation_data(cls):
return {
"patient": cls.patient,
"facility": cls.facility,
"symptoms": [SYMPTOM_CHOICES[0][0], SYMPTOM_CHOICES[1][0]],
"other_symptoms": "No other symptoms",
"symptoms_onset_date": datetime.datetime(2020, 4, 7, 15, 30),
"category": CATEGORY_CHOICES[0][0],
"examination_details": "examination_details",
"existing_medication": "existing_medication",
"prescribed_medication": "prescribed_medication",
"suggestion": PatientConsultation.SUGGESTION_CHOICES[0][0],
"referred_to": None,
"admitted": False,
"admitted_to": None,
"admission_date": None,
"discharge_date": None,
"created_date": mock_equal,
"modified_date": mock_equal,
}
@classmethod
def create_consultation(cls, patient=None, facility=None, **kwargs) -> PatientConsultation:
data = cls.get_consultation_data()
kwargs.update({"patient": patient or cls.patient, "facility": facility or cls.facility})
data.update(kwargs)
return PatientConsultation.objects.create(**data)
| true | true |
f72fc0334115b183ce538c3c6dd415915cddc916 | 1,223 | py | Python | data_statistics.py | Dipeshtamboli/domain-shift | 3f29577df6ab7269ad69a5fc651b63ed78708f0b | [
"MIT"
] | null | null | null | data_statistics.py | Dipeshtamboli/domain-shift | 3f29577df6ab7269ad69a5fc651b63ed78708f0b | [
"MIT"
] | null | null | null | data_statistics.py | Dipeshtamboli/domain-shift | 3f29577df6ab7269ad69a5fc651b63ed78708f0b | [
"MIT"
] | null | null | null | import pdb
import numpy as np
import os
import glob
import torch
import torch.nn as nn
import torchvision.models as models
import torchvision.transforms as transforms
from torch.autograd import Variable
from PIL import Image
from tqdm import tqdm
relative_path = 'datasets/resnet_features_subset_office31/'
# relative_path = 'datasets/office-31_10_class_subset/'
all_npys = glob.glob(os.path.dirname(os.path.realpath(__file__))+'/'+relative_path+"**/*.npy" , recursive=True)
num_plot_classes = 31
all_features = np.zeros((num_plot_classes*3*5,1000))
all_feat = {
"amazon": np.zeros((num_plot_classes*5,1000)),
"dslr": np.zeros((num_plot_classes*5,1000)),
"webcam": np.zeros((num_plot_classes*5,1000)),
}
domain_names =[]
class_names = []
counter = 0
for i, npy_loc in enumerate(all_npys):
unique_labels, unique_counts = np.unique(class_names, return_counts=True)
domain = npy_loc.split('/')[-3]
class_name = npy_loc.split('/')[-2]
if len(np.unique(class_names)) < num_plot_classes or class_name in class_names:
all_features[counter] = np.load(npy_loc)
counter += 1
domain_names.append(domain)
class_names.append(class_name) | 33.054054 | 112 | 0.713001 | import pdb
import numpy as np
import os
import glob
import torch
import torch.nn as nn
import torchvision.models as models
import torchvision.transforms as transforms
from torch.autograd import Variable
from PIL import Image
from tqdm import tqdm
relative_path = 'datasets/resnet_features_subset_office31/'
all_npys = glob.glob(os.path.dirname(os.path.realpath(__file__))+'/'+relative_path+"**/*.npy" , recursive=True)
num_plot_classes = 31
all_features = np.zeros((num_plot_classes*3*5,1000))
all_feat = {
"amazon": np.zeros((num_plot_classes*5,1000)),
"dslr": np.zeros((num_plot_classes*5,1000)),
"webcam": np.zeros((num_plot_classes*5,1000)),
}
domain_names =[]
class_names = []
counter = 0
for i, npy_loc in enumerate(all_npys):
unique_labels, unique_counts = np.unique(class_names, return_counts=True)
domain = npy_loc.split('/')[-3]
class_name = npy_loc.split('/')[-2]
if len(np.unique(class_names)) < num_plot_classes or class_name in class_names:
all_features[counter] = np.load(npy_loc)
counter += 1
domain_names.append(domain)
class_names.append(class_name) | true | true |
f72fc06d644f387753e387544faebf08963a1082 | 16,871 | py | Python | tensorflow_probability/python/distributions/poisson_lognormal.py | hephaex/probability | 740d0db0bf2b1e1a04cfd0b55481c44380b3cb05 | [
"Apache-2.0"
] | null | null | null | tensorflow_probability/python/distributions/poisson_lognormal.py | hephaex/probability | 740d0db0bf2b1e1a04cfd0b55481c44380b3cb05 | [
"Apache-2.0"
] | null | null | null | tensorflow_probability/python/distributions/poisson_lognormal.py | hephaex/probability | 740d0db0bf2b1e1a04cfd0b55481c44380b3cb05 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""The PoissonLogNormalQuadratureCompound distribution class."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Dependency imports
import numpy as np
import tensorflow as tf
from tensorflow_probability.python.bijectors import exp as exp_bijector
from tensorflow_probability.python.distributions import categorical
from tensorflow_probability.python.distributions import distribution
from tensorflow_probability.python.distributions import normal
from tensorflow_probability.python.distributions import poisson
from tensorflow_probability.python.distributions import seed_stream
from tensorflow_probability.python.distributions import transformed_distribution
from tensorflow_probability.python.internal import distribution_util
from tensorflow_probability.python.internal import dtype_util
from tensorflow_probability.python.internal import reparameterization
__all__ = [
"PoissonLogNormalQuadratureCompound",
"quadrature_scheme_lognormal_gauss_hermite",
"quadrature_scheme_lognormal_quantiles",
]
def quadrature_scheme_lognormal_gauss_hermite(
loc, scale, quadrature_size,
validate_args=False, name=None): # pylint: disable=unused-argument
"""Use Gauss-Hermite quadrature to form quadrature on positive-reals.
Note: for a given `quadrature_size`, this method is generally less accurate
than `quadrature_scheme_lognormal_quantiles`.
Args:
loc: `float`-like (batch of) scalar `Tensor`; the location parameter of
the LogNormal prior.
scale: `float`-like (batch of) scalar `Tensor`; the scale parameter of
the LogNormal prior.
quadrature_size: Python `int` scalar representing the number of quadrature
points.
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs.
name: Python `str` name prefixed to Ops created by this class.
Returns:
grid: (Batch of) length-`quadrature_size` vectors representing the
`log_rate` parameters of a `Poisson`.
probs: (Batch of) length-`quadrature_size` vectors representing the
weight associate with each `grid` value.
"""
with tf.name_scope(name, "vector_diffeomixture_quadrature_gauss_hermite",
[loc, scale]):
grid, probs = np.polynomial.hermite.hermgauss(deg=quadrature_size)
grid = grid.astype(loc.dtype.as_numpy_dtype)
probs = probs.astype(loc.dtype.as_numpy_dtype)
probs /= np.linalg.norm(probs, ord=1, keepdims=True)
probs = tf.convert_to_tensor(value=probs, name="probs", dtype=loc.dtype)
# The following maps the broadcast of `loc` and `scale` to each grid
# point, i.e., we are creating several log-rates that correspond to the
# different Gauss-Hermite quadrature points and (possible) batches of
# `loc` and `scale`.
grid = (loc[..., tf.newaxis] + np.sqrt(2.) * scale[..., tf.newaxis] * grid)
return grid, probs
def quadrature_scheme_lognormal_quantiles(
loc, scale, quadrature_size,
validate_args=False, name=None):
"""Use LogNormal quantiles to form quadrature on positive-reals.
Args:
loc: `float`-like (batch of) scalar `Tensor`; the location parameter of
the LogNormal prior.
scale: `float`-like (batch of) scalar `Tensor`; the scale parameter of
the LogNormal prior.
quadrature_size: Python `int` scalar representing the number of quadrature
points.
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs.
name: Python `str` name prefixed to Ops created by this class.
Returns:
grid: (Batch of) length-`quadrature_size` vectors representing the
`log_rate` parameters of a `Poisson`.
probs: (Batch of) length-`quadrature_size` vectors representing the
weight associate with each `grid` value.
"""
with tf.name_scope(name, "quadrature_scheme_lognormal_quantiles",
[loc, scale]):
# Create a LogNormal distribution.
dist = transformed_distribution.TransformedDistribution(
distribution=normal.Normal(loc=loc, scale=scale),
bijector=exp_bijector.Exp(),
validate_args=validate_args)
batch_ndims = dist.batch_shape.ndims
if batch_ndims is None:
batch_ndims = tf.shape(input=dist.batch_shape_tensor())[0]
def _compute_quantiles():
"""Helper to build quantiles."""
# Omit {0, 1} since they might lead to Inf/NaN.
zero = tf.zeros([], dtype=dist.dtype)
edges = tf.linspace(zero, 1., quadrature_size + 3)[1:-1]
# Expand edges so its broadcast across batch dims.
edges = tf.reshape(
edges,
shape=tf.concat(
[[-1], tf.ones([batch_ndims], dtype=tf.int32)], axis=0))
quantiles = dist.quantile(edges)
# Cyclically permute left by one.
perm = tf.concat([tf.range(1, 1 + batch_ndims), [0]], axis=0)
quantiles = tf.transpose(a=quantiles, perm=perm)
return quantiles
quantiles = _compute_quantiles()
# Compute grid as quantile midpoints.
grid = (quantiles[..., :-1] + quantiles[..., 1:]) / 2.
# Set shape hints.
grid.set_shape(dist.batch_shape.concatenate([quadrature_size]))
# By construction probs is constant, i.e., `1 / quadrature_size`. This is
# important, because non-constant probs leads to non-reparameterizable
# samples.
probs = tf.fill(
dims=[quadrature_size], value=1. / tf.cast(quadrature_size, dist.dtype))
return grid, probs
class PoissonLogNormalQuadratureCompound(distribution.Distribution):
"""`PoissonLogNormalQuadratureCompound` distribution.
The `PoissonLogNormalQuadratureCompound` is an approximation to a
Poisson-LogNormal [compound distribution](
https://en.wikipedia.org/wiki/Compound_probability_distribution), i.e.,
```none
p(k|loc, scale)
= int_{R_+} dl LogNormal(l | loc, scale) Poisson(k | l)
approx= sum{ prob[d] Poisson(k | lambda(grid[d])) : d=0, ..., deg-1 }
```
By default, the `grid` is chosen as quantiles of the `LogNormal` distribution
parameterized by `loc`, `scale` and the `prob` vector is
`[1. / quadrature_size]*quadrature_size`.
In the non-approximation case, a draw from the LogNormal prior represents the
Poisson rate parameter. Unfortunately, the non-approximate distribution lacks
an analytical probability density function (pdf). Therefore the
`PoissonLogNormalQuadratureCompound` class implements an approximation based
on [quadrature](https://en.wikipedia.org/wiki/Numerical_integration).
Note: although the `PoissonLogNormalQuadratureCompound` is approximately the
Poisson-LogNormal compound distribution, it is itself a valid distribution.
Viz., it possesses a `sample`, `log_prob`, `mean`, `variance`, etc. which are
all mutually consistent.
#### Mathematical Details
The `PoissonLogNormalQuadratureCompound` approximates a Poisson-LogNormal
[compound distribution](
https://en.wikipedia.org/wiki/Compound_probability_distribution). Using
variable-substitution and [numerical quadrature](
https://en.wikipedia.org/wiki/Numerical_integration) (default:
based on `LogNormal` quantiles) we can redefine the distribution to be a
parameter-less convex combination of `deg` different Poisson samples.
That is, defined over positive integers, this distribution is parameterized
by a (batch of) `loc` and `scale` scalars.
The probability density function (pdf) is,
```none
pdf(k | loc, scale, deg)
= sum{ prob[d] Poisson(k | lambda=exp(grid[d]))
: d=0, ..., deg-1 }
```
#### Examples
```python
tfd = tfp.distributions
# Create two batches of PoissonLogNormalQuadratureCompounds, one with
# prior `loc = 0.` and another with `loc = 1.` In both cases `scale = 1.`
pln = tfd.PoissonLogNormalQuadratureCompound(
loc=[0., -0.5],
scale=1.,
quadrature_size=10,
validate_args=True)
"""
def __init__(self,
loc,
scale,
quadrature_size=8,
quadrature_fn=quadrature_scheme_lognormal_quantiles,
validate_args=False,
allow_nan_stats=True,
name="PoissonLogNormalQuadratureCompound"):
"""Constructs the PoissonLogNormalQuadratureCompound`.
Note: `probs` returned by (optional) `quadrature_fn` are presumed to be
either a length-`quadrature_size` vector or a batch of vectors in 1-to-1
correspondence with the returned `grid`. (I.e., broadcasting is only
partially supported.)
Args:
loc: `float`-like (batch of) scalar `Tensor`; the location parameter of
the LogNormal prior.
scale: `float`-like (batch of) scalar `Tensor`; the scale parameter of
the LogNormal prior.
quadrature_size: Python `int` scalar representing the number of quadrature
points.
quadrature_fn: Python callable taking `loc`, `scale`,
`quadrature_size`, `validate_args` and returning `tuple(grid, probs)`
representing the LogNormal grid and corresponding normalized weight.
normalized) weight.
Default value: `quadrature_scheme_lognormal_quantiles`.
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs.
allow_nan_stats: Python `bool`, default `True`. When `True`,
statistics (e.g., mean, mode, variance) use the value "`NaN`" to
indicate the result is undefined. When `False`, an exception is raised
if one or more of the statistic's batch members are undefined.
name: Python `str` name prefixed to Ops created by this class.
Raises:
TypeError: if `quadrature_grid` and `quadrature_probs` have different base
`dtype`.
"""
parameters = dict(locals())
with tf.name_scope(name, values=[loc, scale]) as name:
dtype = dtype_util.common_dtype([loc, scale], tf.float32)
if loc is not None:
loc = tf.convert_to_tensor(value=loc, name="loc", dtype=dtype)
if scale is not None:
scale = tf.convert_to_tensor(value=scale, dtype=dtype, name="scale")
self._quadrature_grid, self._quadrature_probs = tuple(quadrature_fn(
loc, scale, quadrature_size, validate_args))
dt = self._quadrature_grid.dtype
if dt.base_dtype != self._quadrature_probs.dtype.base_dtype:
raise TypeError("Quadrature grid dtype ({}) does not match quadrature "
"probs dtype ({}).".format(
dt.name, self._quadrature_probs.dtype.name))
self._distribution = poisson.Poisson(
log_rate=self._quadrature_grid,
validate_args=validate_args,
allow_nan_stats=allow_nan_stats)
self._mixture_distribution = categorical.Categorical(
logits=tf.math.log(self._quadrature_probs),
validate_args=validate_args,
allow_nan_stats=allow_nan_stats)
self._loc = loc
self._scale = scale
self._quadrature_size = quadrature_size
super(PoissonLogNormalQuadratureCompound, self).__init__(
dtype=dt,
reparameterization_type=reparameterization.NOT_REPARAMETERIZED,
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
parameters=parameters,
graph_parents=[loc, scale],
name=name)
@property
def mixture_distribution(self):
"""Distribution which randomly selects a Poisson with quadrature param."""
return self._mixture_distribution
@property
def distribution(self):
"""Base Poisson parameterized by a quadrature grid."""
return self._distribution
@property
def loc(self):
"""Location parameter of the LogNormal prior."""
return self._loc
@property
def scale(self):
"""Scale parameter of the LogNormal prior."""
return self._scale
@property
def quadrature_size(self):
return self._quadrature_size
def _batch_shape_tensor(self):
return tf.broadcast_dynamic_shape(
self.distribution.batch_shape_tensor(),
tf.shape(input=self.mixture_distribution.logits))[:-1]
def _batch_shape(self):
return tf.broadcast_static_shape(
self.distribution.batch_shape,
self.mixture_distribution.logits.shape)[:-1]
def _event_shape(self):
return tf.TensorShape([])
def _sample_n(self, n, seed=None):
# Get ids as a [n, batch_size]-shaped matrix, unless batch_shape=[] then get
# ids as a [n]-shaped vector.
batch_size = self.batch_shape.num_elements()
if batch_size is None:
batch_size = tf.reduce_prod(input_tensor=self.batch_shape_tensor())
# We need to "sample extra" from the mixture distribution if it doesn't
# already specify a probs vector for each batch coordinate.
# We only support this kind of reduced broadcasting, i.e., there is exactly
# one probs vector for all batch dims or one for each.
stream = seed_stream.SeedStream(
seed, salt="PoissonLogNormalQuadratureCompound")
ids = self._mixture_distribution.sample(
sample_shape=concat_vectors(
[n],
distribution_util.pick_vector(
self.mixture_distribution.is_scalar_batch(),
[batch_size],
np.int32([]))),
seed=stream())
# We need to flatten batch dims in case mixture_distribution has its own
# batch dims.
ids = tf.reshape(
ids,
shape=concat_vectors([n],
distribution_util.pick_vector(
self.is_scalar_batch(), np.int32([]),
np.int32([-1]))))
# Stride `quadrature_size` for `batch_size` number of times.
offset = tf.range(
start=0,
limit=batch_size * self._quadrature_size,
delta=self._quadrature_size,
dtype=ids.dtype)
ids += offset
rate = tf.gather(tf.reshape(self.distribution.rate, shape=[-1]), ids)
rate = tf.reshape(
rate, shape=concat_vectors([n], self.batch_shape_tensor()))
return tf.random.poisson(lam=rate, shape=[], dtype=self.dtype, seed=seed)
def _log_prob(self, x):
return tf.reduce_logsumexp(
input_tensor=(self.mixture_distribution.logits +
self.distribution.log_prob(x[..., tf.newaxis])),
axis=-1)
def _mean(self):
return tf.exp(
tf.reduce_logsumexp(
input_tensor=self.mixture_distribution.logits +
self.distribution.log_rate,
axis=-1))
def _variance(self):
return tf.exp(self._log_variance())
def _stddev(self):
return tf.exp(0.5 * self._log_variance())
def _log_variance(self):
# Following calculation is based on law of total variance:
#
# Var[Z] = E[Var[Z | V]] + Var[E[Z | V]]
#
# where,
#
# Z|v ~ interpolate_affine[v](distribution)
# V ~ mixture_distribution
#
# thus,
#
# E[Var[Z | V]] = sum{ prob[d] Var[d] : d=0, ..., deg-1 }
# Var[E[Z | V]] = sum{ prob[d] (Mean[d] - Mean)**2 : d=0, ..., deg-1 }
v = tf.stack(
[
# log(self.distribution.variance()) = log(Var[d]) = log(rate[d])
self.distribution.log_rate,
# log((Mean[d] - Mean)**2)
2. * tf.math.log(
tf.abs(self.distribution.mean() -
self._mean()[..., tf.newaxis])),
],
axis=-1)
return tf.reduce_logsumexp(
input_tensor=self.mixture_distribution.logits[..., tf.newaxis] + v,
axis=[-2, -1])
def concat_vectors(*args):
"""Concatenates input vectors, statically if possible."""
args_ = [tf.get_static_value(x) for x in args]
if any(vec is None for vec in args_):
return tf.concat(args, axis=0)
return [val for vec in args_ for val in vec]
| 39.510539 | 80 | 0.684073 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
from tensorflow_probability.python.bijectors import exp as exp_bijector
from tensorflow_probability.python.distributions import categorical
from tensorflow_probability.python.distributions import distribution
from tensorflow_probability.python.distributions import normal
from tensorflow_probability.python.distributions import poisson
from tensorflow_probability.python.distributions import seed_stream
from tensorflow_probability.python.distributions import transformed_distribution
from tensorflow_probability.python.internal import distribution_util
from tensorflow_probability.python.internal import dtype_util
from tensorflow_probability.python.internal import reparameterization
__all__ = [
"PoissonLogNormalQuadratureCompound",
"quadrature_scheme_lognormal_gauss_hermite",
"quadrature_scheme_lognormal_quantiles",
]
def quadrature_scheme_lognormal_gauss_hermite(
loc, scale, quadrature_size,
validate_args=False, name=None):
with tf.name_scope(name, "vector_diffeomixture_quadrature_gauss_hermite",
[loc, scale]):
grid, probs = np.polynomial.hermite.hermgauss(deg=quadrature_size)
grid = grid.astype(loc.dtype.as_numpy_dtype)
probs = probs.astype(loc.dtype.as_numpy_dtype)
probs /= np.linalg.norm(probs, ord=1, keepdims=True)
probs = tf.convert_to_tensor(value=probs, name="probs", dtype=loc.dtype)
grid = (loc[..., tf.newaxis] + np.sqrt(2.) * scale[..., tf.newaxis] * grid)
return grid, probs
def quadrature_scheme_lognormal_quantiles(
loc, scale, quadrature_size,
validate_args=False, name=None):
with tf.name_scope(name, "quadrature_scheme_lognormal_quantiles",
[loc, scale]):
dist = transformed_distribution.TransformedDistribution(
distribution=normal.Normal(loc=loc, scale=scale),
bijector=exp_bijector.Exp(),
validate_args=validate_args)
batch_ndims = dist.batch_shape.ndims
if batch_ndims is None:
batch_ndims = tf.shape(input=dist.batch_shape_tensor())[0]
def _compute_quantiles():
zero = tf.zeros([], dtype=dist.dtype)
edges = tf.linspace(zero, 1., quadrature_size + 3)[1:-1]
edges = tf.reshape(
edges,
shape=tf.concat(
[[-1], tf.ones([batch_ndims], dtype=tf.int32)], axis=0))
quantiles = dist.quantile(edges)
perm = tf.concat([tf.range(1, 1 + batch_ndims), [0]], axis=0)
quantiles = tf.transpose(a=quantiles, perm=perm)
return quantiles
quantiles = _compute_quantiles()
grid = (quantiles[..., :-1] + quantiles[..., 1:]) / 2.
grid.set_shape(dist.batch_shape.concatenate([quadrature_size]))
probs = tf.fill(
dims=[quadrature_size], value=1. / tf.cast(quadrature_size, dist.dtype))
return grid, probs
class PoissonLogNormalQuadratureCompound(distribution.Distribution):
def __init__(self,
loc,
scale,
quadrature_size=8,
quadrature_fn=quadrature_scheme_lognormal_quantiles,
validate_args=False,
allow_nan_stats=True,
name="PoissonLogNormalQuadratureCompound"):
parameters = dict(locals())
with tf.name_scope(name, values=[loc, scale]) as name:
dtype = dtype_util.common_dtype([loc, scale], tf.float32)
if loc is not None:
loc = tf.convert_to_tensor(value=loc, name="loc", dtype=dtype)
if scale is not None:
scale = tf.convert_to_tensor(value=scale, dtype=dtype, name="scale")
self._quadrature_grid, self._quadrature_probs = tuple(quadrature_fn(
loc, scale, quadrature_size, validate_args))
dt = self._quadrature_grid.dtype
if dt.base_dtype != self._quadrature_probs.dtype.base_dtype:
raise TypeError("Quadrature grid dtype ({}) does not match quadrature "
"probs dtype ({}).".format(
dt.name, self._quadrature_probs.dtype.name))
self._distribution = poisson.Poisson(
log_rate=self._quadrature_grid,
validate_args=validate_args,
allow_nan_stats=allow_nan_stats)
self._mixture_distribution = categorical.Categorical(
logits=tf.math.log(self._quadrature_probs),
validate_args=validate_args,
allow_nan_stats=allow_nan_stats)
self._loc = loc
self._scale = scale
self._quadrature_size = quadrature_size
super(PoissonLogNormalQuadratureCompound, self).__init__(
dtype=dt,
reparameterization_type=reparameterization.NOT_REPARAMETERIZED,
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
parameters=parameters,
graph_parents=[loc, scale],
name=name)
@property
def mixture_distribution(self):
return self._mixture_distribution
@property
def distribution(self):
return self._distribution
@property
def loc(self):
return self._loc
@property
def scale(self):
return self._scale
@property
def quadrature_size(self):
return self._quadrature_size
def _batch_shape_tensor(self):
return tf.broadcast_dynamic_shape(
self.distribution.batch_shape_tensor(),
tf.shape(input=self.mixture_distribution.logits))[:-1]
def _batch_shape(self):
return tf.broadcast_static_shape(
self.distribution.batch_shape,
self.mixture_distribution.logits.shape)[:-1]
def _event_shape(self):
return tf.TensorShape([])
def _sample_n(self, n, seed=None):
batch_size = self.batch_shape.num_elements()
if batch_size is None:
batch_size = tf.reduce_prod(input_tensor=self.batch_shape_tensor())
# already specify a probs vector for each batch coordinate.
# We only support this kind of reduced broadcasting, i.e., there is exactly
# one probs vector for all batch dims or one for each.
stream = seed_stream.SeedStream(
seed, salt="PoissonLogNormalQuadratureCompound")
ids = self._mixture_distribution.sample(
sample_shape=concat_vectors(
[n],
distribution_util.pick_vector(
self.mixture_distribution.is_scalar_batch(),
[batch_size],
np.int32([]))),
seed=stream())
# We need to flatten batch dims in case mixture_distribution has its own
# batch dims.
ids = tf.reshape(
ids,
shape=concat_vectors([n],
distribution_util.pick_vector(
self.is_scalar_batch(), np.int32([]),
np.int32([-1]))))
# Stride `quadrature_size` for `batch_size` number of times.
offset = tf.range(
start=0,
limit=batch_size * self._quadrature_size,
delta=self._quadrature_size,
dtype=ids.dtype)
ids += offset
rate = tf.gather(tf.reshape(self.distribution.rate, shape=[-1]), ids)
rate = tf.reshape(
rate, shape=concat_vectors([n], self.batch_shape_tensor()))
return tf.random.poisson(lam=rate, shape=[], dtype=self.dtype, seed=seed)
def _log_prob(self, x):
return tf.reduce_logsumexp(
input_tensor=(self.mixture_distribution.logits +
self.distribution.log_prob(x[..., tf.newaxis])),
axis=-1)
def _mean(self):
return tf.exp(
tf.reduce_logsumexp(
input_tensor=self.mixture_distribution.logits +
self.distribution.log_rate,
axis=-1))
def _variance(self):
return tf.exp(self._log_variance())
def _stddev(self):
return tf.exp(0.5 * self._log_variance())
def _log_variance(self):
# Following calculation is based on law of total variance:
#
# Var[Z] = E[Var[Z | V]] + Var[E[Z | V]]
#
# where,
#
# Z|v ~ interpolate_affine[v](distribution)
# V ~ mixture_distribution
#
# thus,
#
# E[Var[Z | V]] = sum{ prob[d] Var[d] : d=0, ..., deg-1 }
# Var[E[Z | V]] = sum{ prob[d] (Mean[d] - Mean)**2 : d=0, ..., deg-1 }
v = tf.stack(
[
# log(self.distribution.variance()) = log(Var[d]) = log(rate[d])
self.distribution.log_rate,
# log((Mean[d] - Mean)**2)
2. * tf.math.log(
tf.abs(self.distribution.mean() -
self._mean()[..., tf.newaxis])),
],
axis=-1)
return tf.reduce_logsumexp(
input_tensor=self.mixture_distribution.logits[..., tf.newaxis] + v,
axis=[-2, -1])
def concat_vectors(*args):
args_ = [tf.get_static_value(x) for x in args]
if any(vec is None for vec in args_):
return tf.concat(args, axis=0)
return [val for vec in args_ for val in vec]
| true | true |
f72fc09102b906fd3f59703976525e7e5cd9e483 | 2,338 | py | Python | tests/test_paramark.py | mrzechonek/pytest-paramark | 2c899e200eb0d68e66cd4e32e46c9cdd396845ec | [
"MIT"
] | 1 | 2021-12-23T11:21:16.000Z | 2021-12-23T11:21:16.000Z | tests/test_paramark.py | mrzechonek/pytest-paramark | 2c899e200eb0d68e66cd4e32e46c9cdd396845ec | [
"MIT"
] | null | null | null | tests/test_paramark.py | mrzechonek/pytest-paramark | 2c899e200eb0d68e66cd4e32e46c9cdd396845ec | [
"MIT"
] | null | null | null | from namedlist import namedlist
import pytest
# fmt: off
@pytest.fixture(indirect=True)
def foo(request):
Foo = namedlist('Foo', (
('some_option', 42),
('another_option', 'test'),
))
return Foo(**request.param)
@pytest.fixture(indirect=True)
def bar(request):
Bar = namedlist('Bar', (
('some_option', True),
('another_option', False),
))
return Bar(**request.param)
def test_default(foo, bar):
assert foo.some_option == 42
assert foo.another_option == 'test'
assert bar.some_option is True
assert bar.another_option is False
@pytest.mark.parametrize(
('foo.some_option', 'foo_plus_three',),
[
(1, 4),
(7, 10),
],
)
def test_fixture_and_argument(foo, foo_plus_three):
assert foo.some_option + 3 == foo_plus_three
@pytest.mark.parametrize(
('foo.some_option', 'bar.some_option',),
[
(5, 5),
(3, 7),
]
)
def test_two_fixtures(foo, bar):
assert foo.some_option + bar.some_option == 10
@pytest.mark.parametrize(
'foo.some_option',
[
0x420,
]
)
@pytest.mark.parametrize(
'foo.another_option',
[
5,
6,
]
)
def test_parametrized_nesting(request, foo):
assert foo.some_option == 0x420
assert foo.another_option in (5, 6)
@pytest.mark.parametrize(
'foo.*',
[
dict(some_option=0x420),
]
)
def test_indirect(request, foo):
assert foo.some_option == 0x420
@pytest.mark.parametrize(
('foo.some_option', 'qux', 'bar.another_option'),
[
(0x420, 'qux', 5),
]
)
def test_parametrized_mixed(foo, bar, qux):
assert foo.some_option == 0x420
assert bar.another_option == 5
assert qux == 'qux'
@pytest.mark.foo(some_option=24, another_option='five')
def test_shortcut(foo, bar):
assert foo.some_option == 24
assert foo.another_option == 'five'
assert bar.some_option is True
assert bar.another_option is False
@pytest.mark.parametrize('foo.some_option', [3])
@pytest.mark.parametrize('foo.some_option', [1])
@pytest.mark.parametrize('foo.some_option', [2])
def test_closest(foo):
assert foo.some_option == 2
@pytest.mark.foo(some_option=3)
@pytest.mark.foo(some_option=1)
@pytest.mark.foo(some_option=2)
def test_closest_shortcut(foo):
assert foo.some_option == 2
| 20.155172 | 55 | 0.641574 | from namedlist import namedlist
import pytest
@pytest.fixture(indirect=True)
def foo(request):
Foo = namedlist('Foo', (
('some_option', 42),
('another_option', 'test'),
))
return Foo(**request.param)
@pytest.fixture(indirect=True)
def bar(request):
Bar = namedlist('Bar', (
('some_option', True),
('another_option', False),
))
return Bar(**request.param)
def test_default(foo, bar):
assert foo.some_option == 42
assert foo.another_option == 'test'
assert bar.some_option is True
assert bar.another_option is False
@pytest.mark.parametrize(
('foo.some_option', 'foo_plus_three',),
[
(1, 4),
(7, 10),
],
)
def test_fixture_and_argument(foo, foo_plus_three):
assert foo.some_option + 3 == foo_plus_three
@pytest.mark.parametrize(
('foo.some_option', 'bar.some_option',),
[
(5, 5),
(3, 7),
]
)
def test_two_fixtures(foo, bar):
assert foo.some_option + bar.some_option == 10
@pytest.mark.parametrize(
'foo.some_option',
[
0x420,
]
)
@pytest.mark.parametrize(
'foo.another_option',
[
5,
6,
]
)
def test_parametrized_nesting(request, foo):
assert foo.some_option == 0x420
assert foo.another_option in (5, 6)
@pytest.mark.parametrize(
'foo.*',
[
dict(some_option=0x420),
]
)
def test_indirect(request, foo):
assert foo.some_option == 0x420
@pytest.mark.parametrize(
('foo.some_option', 'qux', 'bar.another_option'),
[
(0x420, 'qux', 5),
]
)
def test_parametrized_mixed(foo, bar, qux):
assert foo.some_option == 0x420
assert bar.another_option == 5
assert qux == 'qux'
@pytest.mark.foo(some_option=24, another_option='five')
def test_shortcut(foo, bar):
assert foo.some_option == 24
assert foo.another_option == 'five'
assert bar.some_option is True
assert bar.another_option is False
@pytest.mark.parametrize('foo.some_option', [3])
@pytest.mark.parametrize('foo.some_option', [1])
@pytest.mark.parametrize('foo.some_option', [2])
def test_closest(foo):
assert foo.some_option == 2
@pytest.mark.foo(some_option=3)
@pytest.mark.foo(some_option=1)
@pytest.mark.foo(some_option=2)
def test_closest_shortcut(foo):
assert foo.some_option == 2
| true | true |
f72fc0b2e52b6be3a20c325a24aba237a4e6319d | 1,372 | py | Python | bigtable/hello_happybase/main_test.py | thesugar/python-docs-samples | 1a59ca688f1d7602d52cd4088fa7b6e3afe0afd0 | [
"Apache-2.0"
] | 34 | 2020-07-27T19:14:01.000Z | 2022-03-31T14:46:53.000Z | bigtable/hello_happybase/main_test.py | thesugar/python-docs-samples | 1a59ca688f1d7602d52cd4088fa7b6e3afe0afd0 | [
"Apache-2.0"
] | 254 | 2020-01-31T23:44:06.000Z | 2022-03-23T22:52:49.000Z | bigtable/hello_happybase/main_test.py | thesugar/python-docs-samples | 1a59ca688f1d7602d52cd4088fa7b6e3afe0afd0 | [
"Apache-2.0"
] | 30 | 2020-01-31T20:45:34.000Z | 2022-03-23T19:56:42.000Z | # Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import random
from main import main
PROJECT = os.environ['GOOGLE_CLOUD_PROJECT']
BIGTABLE_INSTANCE = os.environ['BIGTABLE_INSTANCE']
TABLE_NAME_FORMAT = 'hello-world-hb-test-{}'
TABLE_NAME_RANGE = 10000
def test_main(capsys):
table_name = TABLE_NAME_FORMAT.format(
random.randrange(TABLE_NAME_RANGE))
main(
PROJECT,
BIGTABLE_INSTANCE,
table_name)
out, _ = capsys.readouterr()
assert 'Creating the {} table.'.format(table_name) in out
assert 'Writing some greetings to the table.' in out
assert 'Getting a single greeting by row key.' in out
assert 'Hello World!' in out
assert 'Scanning for all greetings' in out
assert 'Hello Cloud Bigtable!' in out
assert 'Deleting the {} table.'.format(table_name) in out
| 32.666667 | 74 | 0.729592 |
import os
import random
from main import main
PROJECT = os.environ['GOOGLE_CLOUD_PROJECT']
BIGTABLE_INSTANCE = os.environ['BIGTABLE_INSTANCE']
TABLE_NAME_FORMAT = 'hello-world-hb-test-{}'
TABLE_NAME_RANGE = 10000
def test_main(capsys):
table_name = TABLE_NAME_FORMAT.format(
random.randrange(TABLE_NAME_RANGE))
main(
PROJECT,
BIGTABLE_INSTANCE,
table_name)
out, _ = capsys.readouterr()
assert 'Creating the {} table.'.format(table_name) in out
assert 'Writing some greetings to the table.' in out
assert 'Getting a single greeting by row key.' in out
assert 'Hello World!' in out
assert 'Scanning for all greetings' in out
assert 'Hello Cloud Bigtable!' in out
assert 'Deleting the {} table.'.format(table_name) in out
| true | true |
f72fc11fa59ceffe2e3f49244bef15eddabf9421 | 7,807 | py | Python | cloudcafe/compute/flavors_api/models/flavor.py | ProjectMeniscus/cloudcafe | fa8fd796b303f0c5f0d6e98b2b5d01f6ea8fefe9 | [
"Apache-2.0"
] | null | null | null | cloudcafe/compute/flavors_api/models/flavor.py | ProjectMeniscus/cloudcafe | fa8fd796b303f0c5f0d6e98b2b5d01f6ea8fefe9 | [
"Apache-2.0"
] | null | null | null | cloudcafe/compute/flavors_api/models/flavor.py | ProjectMeniscus/cloudcafe | fa8fd796b303f0c5f0d6e98b2b5d01f6ea8fefe9 | [
"Apache-2.0"
] | 1 | 2020-11-17T19:05:08.000Z | 2020-11-17T19:05:08.000Z | """
Copyright 2013 Rackspace
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import json
import xml.etree.ElementTree as ET
from cafe.engine.models.base import AutoMarshallingModel
from cloudcafe.compute.common.equality_tools import EqualityTools
from cloudcafe.compute.common.constants import Constants
from cloudcafe.compute.common.models.link import Links
class CreateFlavor(AutoMarshallingModel):
def __init__(self, name=None, ram=None, vcpus=None,
disk=None, id=None, is_public=None):
super(CreateFlavor, self).__init__()
self.id = id
self.name = name
self.ram = ram
self.disk = disk
self.vcpus = vcpus
self.is_public = is_public
def _obj_to_json(self):
ret = {'flavor': self._obj_to_dict()}
return json.dumps(ret)
def _obj_to_dict(self):
ret = {}
ret['id'] = self.id
ret['name'] = self.name
ret['ram'] = int(self.ram)
ret['disk'] = int(self.disk)
ret['vcpus'] = int(self.vcpus)
ret['os-flavor-access:is_public'] = self.is_public
return ret
@classmethod
def _xml_to_obj(cls, serialized_str):
raise NotImplemented
@classmethod
def _xml_list_to_obj(cls, xml_list):
raise NotImplemented
class Flavor(AutoMarshallingModel):
def __init__(self, id=None, name=None, ram=None, disk=None, vcpus=None,
swap=None, rxtx_factor=None, links=None):
"""
An object that represents a flavor.
"""
self.id = id
self.name = name
self.ram = ram
self.disk = disk
self.vcpus = vcpus
self.links = links
def __repr__(self):
values = []
for prop in self.__dict__:
values.append("%s: %s" % (prop, self.__dict__[prop]))
return '[' + ', '.join(values) + ']'
@classmethod
def _json_to_obj(cls, serialized_str):
"""
Returns an instance of a Flavor based on the json serialized_str
passed in.
"""
json_dict = json.loads(serialized_str)
if 'flavor' in json_dict.keys():
flavor = cls._dict_to_obj(json_dict['flavor'])
return flavor
if 'flavors' in json_dict.keys():
flavors = []
for flavor_dict in json_dict['flavors']:
flavor = cls._dict_to_obj(flavor_dict)
flavors.append(flavor)
return flavors
@classmethod
def _dict_to_obj(cls, flavor_dict):
"""Helper method to turn dictionary into Server instance."""
flavor = Flavor(id=flavor_dict.get('id'),
name=flavor_dict.get('name'),
ram=flavor_dict.get('ram'),
disk=flavor_dict.get('disk'),
vcpus=flavor_dict.get('vcpus'))
flavor.links = Links._dict_to_obj(flavor_dict['links'])
return flavor
@classmethod
def _xml_to_obj(cls, serialized_str):
"""
Returns an instance of a Flavor based on the xml serialized_str
passed in.
"""
element = ET.fromstring(serialized_str)
cls._remove_xml_etree_namespace(element, Constants.XML_API_NAMESPACE)
cls._remove_xml_etree_namespace(element,
Constants.XML_API_ATOM_NAMESPACE)
if element.tag == 'flavor':
flavor = cls._xml_ele_to_obj(element)
return flavor
if element.tag == 'flavors':
flavors = []
for flavor in element.findall('flavor'):
flavor = cls._xml_ele_to_obj(flavor)
flavors.append(flavor)
return flavors
@classmethod
def _xml_ele_to_obj(cls, element):
"""Helper method to turn ElementTree instance to Flavor instance."""
flavor_dict = element.attrib
if 'vcpus' in flavor_dict:
flavor_dict['vcpus'] = (flavor_dict.get('vcpus') and
int(flavor_dict.get('vcpus')))
if 'disk' in flavor_dict:
flavor_dict['disk'] = (flavor_dict.get('disk') and
int(flavor_dict.get('disk')))
if 'rxtx_factor' in flavor_dict:
flavor_dict['rxtx_factor'] = flavor_dict.get('rxtx_factor') \
and float(flavor_dict.get('rxtx_factor'))
if 'ram' in flavor_dict:
flavor_dict['ram'] = flavor_dict.get('ram') \
and int(flavor_dict.get('ram'))
if 'swap' in flavor_dict:
flavor_dict['swap'] = flavor_dict.get('swap') \
and int(flavor_dict.get('swap'))
links = Links._xml_ele_to_obj(element)
flavor = Flavor(flavor_dict.get('id'), flavor_dict.get('name'),
flavor_dict.get('ram'), flavor_dict.get('disk'),
flavor_dict.get('vcpus'), flavor_dict.get('swap'),
flavor_dict.get('rxtx_factor'), links)
return flavor
def __eq__(self, other):
"""
@summary: Overrides the default equals
@param other: Flavor object to compare with
@type other: Flavor
@return: True if Flavor objects are equal, False otherwise
@rtype: bool
"""
return EqualityTools.are_objects_equal(self, other, ['links'])
def __ne__(self, other):
"""
@summary: Overrides the default not-equals
@param other: Flavor object to compare with
@type other: Flavor
@return: True if Flavor objects are not equal, False otherwise
@rtype: bool
"""
return not self == other
class FlavorMin(Flavor):
"""
@summary: Represents minimum details of a flavor
"""
def __init__(self, **kwargs):
"""Flavor Min has only id, name and links"""
for keys, values in kwargs.items():
setattr(self, keys, values)
def __eq__(self, other):
"""
@summary: Overrides the default equals
@param other: FlavorMin object to compare with
@type other: FlavorMin
@return: True if FlavorMin objects are equal, False otherwise
@rtype: bool
"""
return EqualityTools.are_objects_equal(self, other, ['links'])
def __ne__(self, other):
"""
@summary: Overrides the default not-equals
@param other: FlavorMin object to compare with
@type other: FlavorMin
@return: True if FlavorMin objects are not equal, False otherwise
@rtype: bool
"""
return not self == other
@classmethod
def _xml_ele_to_obj(cls, element):
"""Helper method to turn ElementTree instance to Server instance."""
flavor_dict = element.attrib
flavor_min = FlavorMin(id=flavor_dict.get('id'),
name=flavor_dict.get('name'))
flavor_min.links = Links._xml_ele_to_obj(element)
return flavor_min
@classmethod
def _dict_to_obj(cls, flavor_dict):
"""Helper method to turn dictionary into Server instance."""
flavor_min = FlavorMin(id=flavor_dict.get('id'),
name=flavor_dict.get('name'))
flavor_min.links = Links._dict_to_obj(flavor_dict['links'])
return flavor_min
| 34.39207 | 77 | 0.599846 |
import json
import xml.etree.ElementTree as ET
from cafe.engine.models.base import AutoMarshallingModel
from cloudcafe.compute.common.equality_tools import EqualityTools
from cloudcafe.compute.common.constants import Constants
from cloudcafe.compute.common.models.link import Links
class CreateFlavor(AutoMarshallingModel):
def __init__(self, name=None, ram=None, vcpus=None,
disk=None, id=None, is_public=None):
super(CreateFlavor, self).__init__()
self.id = id
self.name = name
self.ram = ram
self.disk = disk
self.vcpus = vcpus
self.is_public = is_public
def _obj_to_json(self):
ret = {'flavor': self._obj_to_dict()}
return json.dumps(ret)
def _obj_to_dict(self):
ret = {}
ret['id'] = self.id
ret['name'] = self.name
ret['ram'] = int(self.ram)
ret['disk'] = int(self.disk)
ret['vcpus'] = int(self.vcpus)
ret['os-flavor-access:is_public'] = self.is_public
return ret
@classmethod
def _xml_to_obj(cls, serialized_str):
raise NotImplemented
@classmethod
def _xml_list_to_obj(cls, xml_list):
raise NotImplemented
class Flavor(AutoMarshallingModel):
def __init__(self, id=None, name=None, ram=None, disk=None, vcpus=None,
swap=None, rxtx_factor=None, links=None):
self.id = id
self.name = name
self.ram = ram
self.disk = disk
self.vcpus = vcpus
self.links = links
def __repr__(self):
values = []
for prop in self.__dict__:
values.append("%s: %s" % (prop, self.__dict__[prop]))
return '[' + ', '.join(values) + ']'
@classmethod
def _json_to_obj(cls, serialized_str):
json_dict = json.loads(serialized_str)
if 'flavor' in json_dict.keys():
flavor = cls._dict_to_obj(json_dict['flavor'])
return flavor
if 'flavors' in json_dict.keys():
flavors = []
for flavor_dict in json_dict['flavors']:
flavor = cls._dict_to_obj(flavor_dict)
flavors.append(flavor)
return flavors
@classmethod
def _dict_to_obj(cls, flavor_dict):
flavor = Flavor(id=flavor_dict.get('id'),
name=flavor_dict.get('name'),
ram=flavor_dict.get('ram'),
disk=flavor_dict.get('disk'),
vcpus=flavor_dict.get('vcpus'))
flavor.links = Links._dict_to_obj(flavor_dict['links'])
return flavor
@classmethod
def _xml_to_obj(cls, serialized_str):
element = ET.fromstring(serialized_str)
cls._remove_xml_etree_namespace(element, Constants.XML_API_NAMESPACE)
cls._remove_xml_etree_namespace(element,
Constants.XML_API_ATOM_NAMESPACE)
if element.tag == 'flavor':
flavor = cls._xml_ele_to_obj(element)
return flavor
if element.tag == 'flavors':
flavors = []
for flavor in element.findall('flavor'):
flavor = cls._xml_ele_to_obj(flavor)
flavors.append(flavor)
return flavors
@classmethod
def _xml_ele_to_obj(cls, element):
flavor_dict = element.attrib
if 'vcpus' in flavor_dict:
flavor_dict['vcpus'] = (flavor_dict.get('vcpus') and
int(flavor_dict.get('vcpus')))
if 'disk' in flavor_dict:
flavor_dict['disk'] = (flavor_dict.get('disk') and
int(flavor_dict.get('disk')))
if 'rxtx_factor' in flavor_dict:
flavor_dict['rxtx_factor'] = flavor_dict.get('rxtx_factor') \
and float(flavor_dict.get('rxtx_factor'))
if 'ram' in flavor_dict:
flavor_dict['ram'] = flavor_dict.get('ram') \
and int(flavor_dict.get('ram'))
if 'swap' in flavor_dict:
flavor_dict['swap'] = flavor_dict.get('swap') \
and int(flavor_dict.get('swap'))
links = Links._xml_ele_to_obj(element)
flavor = Flavor(flavor_dict.get('id'), flavor_dict.get('name'),
flavor_dict.get('ram'), flavor_dict.get('disk'),
flavor_dict.get('vcpus'), flavor_dict.get('swap'),
flavor_dict.get('rxtx_factor'), links)
return flavor
def __eq__(self, other):
return EqualityTools.are_objects_equal(self, other, ['links'])
def __ne__(self, other):
return not self == other
class FlavorMin(Flavor):
def __init__(self, **kwargs):
for keys, values in kwargs.items():
setattr(self, keys, values)
def __eq__(self, other):
return EqualityTools.are_objects_equal(self, other, ['links'])
def __ne__(self, other):
return not self == other
@classmethod
def _xml_ele_to_obj(cls, element):
flavor_dict = element.attrib
flavor_min = FlavorMin(id=flavor_dict.get('id'),
name=flavor_dict.get('name'))
flavor_min.links = Links._xml_ele_to_obj(element)
return flavor_min
@classmethod
def _dict_to_obj(cls, flavor_dict):
flavor_min = FlavorMin(id=flavor_dict.get('id'),
name=flavor_dict.get('name'))
flavor_min.links = Links._dict_to_obj(flavor_dict['links'])
return flavor_min
| true | true |
f72fc1298f0c5130bd5594ba286c250a4a144484 | 524 | py | Python | build/srslib_test/catkin_generated/pkg.develspace.context.pc.py | 6RiverSystems/darknet_ros | 03c72b96afa99f7cc75f7792b51deb4a7f4ed379 | [
"BSD-3-Clause"
] | null | null | null | build/srslib_test/catkin_generated/pkg.develspace.context.pc.py | 6RiverSystems/darknet_ros | 03c72b96afa99f7cc75f7792b51deb4a7f4ed379 | [
"BSD-3-Clause"
] | null | null | null | build/srslib_test/catkin_generated/pkg.develspace.context.pc.py | 6RiverSystems/darknet_ros | 03c72b96afa99f7cc75f7792b51deb4a7f4ed379 | [
"BSD-3-Clause"
] | null | null | null | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "/home/kalyco/mfp_workspace/src/srslib_test/include".split(';') if "/home/kalyco/mfp_workspace/src/srslib_test/include" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "-lsrslib_test".split(';') if "-lsrslib_test" != "" else []
PROJECT_NAME = "srslib_test"
PROJECT_SPACE_DIR = "/home/kalyco/mfp_workspace/devel/.private/srslib_test"
PROJECT_VERSION = "1.0.0"
| 58.222222 | 167 | 0.757634 |
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "/home/kalyco/mfp_workspace/src/srslib_test/include".split(';') if "/home/kalyco/mfp_workspace/src/srslib_test/include" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "-lsrslib_test".split(';') if "-lsrslib_test" != "" else []
PROJECT_NAME = "srslib_test"
PROJECT_SPACE_DIR = "/home/kalyco/mfp_workspace/devel/.private/srslib_test"
PROJECT_VERSION = "1.0.0"
| true | true |
f72fc1629529690ac89591087b2b8586758a03f9 | 1,138 | py | Python | scripts/package.py | ralfonso/theory | 41684969313cfc545d74b306e409fd5bf21387b3 | [
"MIT"
] | 4 | 2015-07-03T19:53:59.000Z | 2016-04-25T03:03:56.000Z | scripts/package.py | ralfonso/theory | 41684969313cfc545d74b306e409fd5bf21387b3 | [
"MIT"
] | null | null | null | scripts/package.py | ralfonso/theory | 41684969313cfc545d74b306e409fd5bf21387b3 | [
"MIT"
] | 2 | 2020-03-29T22:02:29.000Z | 2021-07-13T07:17:19.000Z | #!/usr/bin/env python
import os
import shutil
import sys
import subprocess
import cairo
def main():
version = '0.1.11'
script_location = sys.argv[0]
script_path = os.path.abspath(script_location)
app_path = os.sep.join(script_path.split(os.sep)[:-3])
src = os.path.join(app_path,'theory')
dest = os.path.join(app_path,"theory-%s" % version)
tar_file = os.path.join(app_path,"theory-%s.tar.bz2" % version)
exclude_file = os.path.join(src,"tar_exclude")
# remove destination dir in case it exists
try:
shutil.rmtree(dest)
except OSError:
pass
shutil.copytree(src,dest)
# draw logo
imgpath = os.path.join(dest,'theory','public','img','theory-logo.png')
logo_exec = os.path.join(app_path,'theory','scripts','draw_theory_logo.py')
args = [logo_exec,version,imgpath]
subprocess.call(args)
os.chdir(app_path)
args = ["tar","jcvf",tar_file,"--exclude-from=%s" % exclude_file,"--exclude-vcs","theory-%s" % version]
subprocess.call(args)
def exclude_check(f):
print "check_exclude: %s" % f
if __name__ == "__main__":
main()
| 23.708333 | 107 | 0.654657 |
import os
import shutil
import sys
import subprocess
import cairo
def main():
version = '0.1.11'
script_location = sys.argv[0]
script_path = os.path.abspath(script_location)
app_path = os.sep.join(script_path.split(os.sep)[:-3])
src = os.path.join(app_path,'theory')
dest = os.path.join(app_path,"theory-%s" % version)
tar_file = os.path.join(app_path,"theory-%s.tar.bz2" % version)
exclude_file = os.path.join(src,"tar_exclude")
try:
shutil.rmtree(dest)
except OSError:
pass
shutil.copytree(src,dest)
imgpath = os.path.join(dest,'theory','public','img','theory-logo.png')
logo_exec = os.path.join(app_path,'theory','scripts','draw_theory_logo.py')
args = [logo_exec,version,imgpath]
subprocess.call(args)
os.chdir(app_path)
args = ["tar","jcvf",tar_file,"--exclude-from=%s" % exclude_file,"--exclude-vcs","theory-%s" % version]
subprocess.call(args)
def exclude_check(f):
print "check_exclude: %s" % f
if __name__ == "__main__":
main()
| false | true |
f72fc212dd0b3eb11cf3285fa9470daba40b1324 | 9,865 | py | Python | web3/providers/eth_tester/middleware.py | ayushkumar63123/web3.py | 4dda2db9d27a409f1a9c2b4a8ec917b53c51383f | [
"MIT"
] | 1 | 2022-03-19T03:49:34.000Z | 2022-03-19T03:49:34.000Z | web3/providers/eth_tester/middleware.py | ayushkumar63123/web3.py | 4dda2db9d27a409f1a9c2b4a8ec917b53c51383f | [
"MIT"
] | null | null | null | web3/providers/eth_tester/middleware.py | ayushkumar63123/web3.py | 4dda2db9d27a409f1a9c2b4a8ec917b53c51383f | [
"MIT"
] | 1 | 2021-11-12T00:38:42.000Z | 2021-11-12T00:38:42.000Z | import operator
from typing import (
TYPE_CHECKING,
Any,
Callable,
)
from eth_typing import (
ChecksumAddress,
)
from eth_utils import (
is_dict,
is_hex,
is_string,
)
from eth_utils.curried import (
apply_formatter_if,
apply_formatters_to_dict,
)
from eth_utils.toolz import (
assoc,
complement,
compose,
curry,
identity,
partial,
pipe,
)
from web3._utils.formatters import (
apply_formatter_to_array,
apply_formatters_to_args,
apply_key_map,
hex_to_integer,
integer_to_hex,
is_array_of_dicts,
static_return,
)
from web3.middleware import (
construct_formatting_middleware,
)
from web3.types import (
RPCEndpoint,
RPCResponse,
TxParams,
)
if TYPE_CHECKING:
from web3 import ( # noqa: F401
Web3,
)
def is_named_block(value: Any) -> bool:
return value in {"latest", "earliest", "pending"}
def is_hexstr(value: Any) -> bool:
return is_string(value) and is_hex(value)
to_integer_if_hex = apply_formatter_if(is_hexstr, hex_to_integer)
is_not_named_block = complement(is_named_block)
TRANSACTION_KEY_MAPPINGS = {
'access_list': 'accessList',
'block_hash': 'blockHash',
'block_number': 'blockNumber',
'gas_price': 'gasPrice',
'max_fee_per_gas': 'maxFeePerGas',
'max_priority_fee_per_gas': 'maxPriorityFeePerGas',
'transaction_hash': 'transactionHash',
'transaction_index': 'transactionIndex',
}
transaction_key_remapper = apply_key_map(TRANSACTION_KEY_MAPPINGS)
LOG_KEY_MAPPINGS = {
'log_index': 'logIndex',
'transaction_index': 'transactionIndex',
'transaction_hash': 'transactionHash',
'block_hash': 'blockHash',
'block_number': 'blockNumber',
}
log_key_remapper = apply_key_map(LOG_KEY_MAPPINGS)
RECEIPT_KEY_MAPPINGS = {
'block_hash': 'blockHash',
'block_number': 'blockNumber',
'contract_address': 'contractAddress',
'gas_used': 'gasUsed',
'cumulative_gas_used': 'cumulativeGasUsed',
'effective_gas_price': 'effectiveGasPrice',
'transaction_hash': 'transactionHash',
'transaction_index': 'transactionIndex',
}
receipt_key_remapper = apply_key_map(RECEIPT_KEY_MAPPINGS)
BLOCK_KEY_MAPPINGS = {
'gas_limit': 'gasLimit',
'sha3_uncles': 'sha3Uncles',
'transactions_root': 'transactionsRoot',
'parent_hash': 'parentHash',
'bloom': 'logsBloom',
'state_root': 'stateRoot',
'receipt_root': 'receiptsRoot',
'total_difficulty': 'totalDifficulty',
'extra_data': 'extraData',
'gas_used': 'gasUsed',
'base_fee_per_gas': 'baseFeePerGas',
}
block_key_remapper = apply_key_map(BLOCK_KEY_MAPPINGS)
TRANSACTION_PARAMS_MAPPING = {
'gasPrice': 'gas_price',
'maxFeePerGas': 'max_fee_per_gas',
'maxPriorityFeePerGas': 'max_priority_fee_per_gas',
'accessList': 'access_list',
}
transaction_params_remapper = apply_key_map(TRANSACTION_PARAMS_MAPPING)
REQUEST_TRANSACTION_FORMATTERS = {
'gas': to_integer_if_hex,
'gasPrice': to_integer_if_hex,
'value': to_integer_if_hex,
'nonce': to_integer_if_hex,
'maxFeePerGas': to_integer_if_hex,
'maxPriorityFeePerGas': to_integer_if_hex,
}
request_transaction_formatter = apply_formatters_to_dict(REQUEST_TRANSACTION_FORMATTERS)
FILTER_PARAMS_MAPPINGS = {
'fromBlock': 'from_block',
'toBlock': 'to_block',
}
filter_params_remapper = apply_key_map(FILTER_PARAMS_MAPPINGS)
FILTER_PARAMS_FORMATTERS = {
'fromBlock': to_integer_if_hex,
'toBlock': to_integer_if_hex,
}
filter_params_formatter = apply_formatters_to_dict(FILTER_PARAMS_FORMATTERS)
filter_params_transformer = compose(filter_params_remapper, filter_params_formatter)
RESPONSE_TRANSACTION_FORMATTERS = {
'to': apply_formatter_if(partial(operator.eq, ''), static_return(None)),
}
response_transaction_formatter = apply_formatters_to_dict(RESPONSE_TRANSACTION_FORMATTERS)
RECEIPT_FORMATTERS = {
'logs': apply_formatter_to_array(log_key_remapper),
}
receipt_formatter = apply_formatters_to_dict(RECEIPT_FORMATTERS)
transaction_params_transformer = compose(transaction_params_remapper, request_transaction_formatter)
ethereum_tester_middleware = construct_formatting_middleware(
request_formatters={
# Eth
RPCEndpoint('eth_getBlockByNumber'): apply_formatters_to_args(
apply_formatter_if(is_not_named_block, to_integer_if_hex),
),
RPCEndpoint('eth_getFilterChanges'): apply_formatters_to_args(hex_to_integer),
RPCEndpoint('eth_getFilterLogs'): apply_formatters_to_args(hex_to_integer),
RPCEndpoint('eth_getBlockTransactionCountByNumber'): apply_formatters_to_args(
apply_formatter_if(is_not_named_block, to_integer_if_hex),
),
RPCEndpoint('eth_getUncleCountByBlockNumber'): apply_formatters_to_args(
apply_formatter_if(is_not_named_block, to_integer_if_hex),
),
RPCEndpoint('eth_getTransactionByBlockHashAndIndex'): apply_formatters_to_args(
identity,
to_integer_if_hex,
),
RPCEndpoint('eth_getTransactionByBlockNumberAndIndex'): apply_formatters_to_args(
apply_formatter_if(is_not_named_block, to_integer_if_hex),
to_integer_if_hex,
),
RPCEndpoint('eth_getUncleByBlockNumberAndIndex'): apply_formatters_to_args(
apply_formatter_if(is_not_named_block, to_integer_if_hex),
to_integer_if_hex,
),
RPCEndpoint('eth_newFilter'): apply_formatters_to_args(
filter_params_transformer,
),
RPCEndpoint('eth_getLogs'): apply_formatters_to_args(
filter_params_transformer,
),
RPCEndpoint('eth_sendTransaction'): apply_formatters_to_args(
transaction_params_transformer,
),
RPCEndpoint('eth_estimateGas'): apply_formatters_to_args(
transaction_params_transformer,
),
RPCEndpoint('eth_call'): apply_formatters_to_args(
transaction_params_transformer,
apply_formatter_if(is_not_named_block, to_integer_if_hex),
),
RPCEndpoint('eth_uninstallFilter'): apply_formatters_to_args(hex_to_integer),
RPCEndpoint('eth_getCode'): apply_formatters_to_args(
identity,
apply_formatter_if(is_not_named_block, to_integer_if_hex),
),
RPCEndpoint('eth_getBalance'): apply_formatters_to_args(
identity,
apply_formatter_if(is_not_named_block, to_integer_if_hex),
),
# EVM
RPCEndpoint('evm_revert'): apply_formatters_to_args(hex_to_integer),
# Personal
RPCEndpoint('personal_sendTransaction'): apply_formatters_to_args(
transaction_params_transformer,
identity,
),
},
result_formatters={
RPCEndpoint('eth_getBlockByHash'): apply_formatter_if(
is_dict,
block_key_remapper,
),
RPCEndpoint('eth_getBlockByNumber'): apply_formatter_if(
is_dict,
block_key_remapper,
),
RPCEndpoint('eth_getBlockTransactionCountByHash'): apply_formatter_if(
is_dict,
transaction_key_remapper,
),
RPCEndpoint('eth_getBlockTransactionCountByNumber'): apply_formatter_if(
is_dict,
transaction_key_remapper,
),
RPCEndpoint('eth_getTransactionByHash'): apply_formatter_if(
is_dict,
compose(transaction_key_remapper, response_transaction_formatter),
),
RPCEndpoint('eth_getTransactionReceipt'): apply_formatter_if(
is_dict,
compose(receipt_key_remapper, receipt_formatter),
),
RPCEndpoint('eth_newFilter'): integer_to_hex,
RPCEndpoint('eth_newBlockFilter'): integer_to_hex,
RPCEndpoint('eth_newPendingTransactionFilter'): integer_to_hex,
RPCEndpoint('eth_getLogs'): apply_formatter_if(
is_array_of_dicts,
apply_formatter_to_array(log_key_remapper),
),
RPCEndpoint('eth_getFilterChanges'): apply_formatter_if(
is_array_of_dicts,
apply_formatter_to_array(log_key_remapper),
),
RPCEndpoint('eth_getFilterLogs'): apply_formatter_if(
is_array_of_dicts,
apply_formatter_to_array(log_key_remapper),
),
# EVM
RPCEndpoint('evm_snapshot'): integer_to_hex,
},
)
def guess_from(web3: "Web3", _: TxParams) -> ChecksumAddress:
coinbase = web3.eth.coinbase
if coinbase is not None:
return coinbase
try:
return web3.eth.accounts[0]
except KeyError:
# no accounts available to pre-fill, carry on
pass
return None
@curry
def fill_default(
field: str, guess_func: Callable[..., Any], web3: "Web3", transaction: TxParams
) -> TxParams:
# type ignored b/c TxParams keys must be string literal types
if field in transaction and transaction[field] is not None: # type: ignore
return transaction
else:
guess_val = guess_func(web3, transaction)
return assoc(transaction, field, guess_val)
def default_transaction_fields_middleware(
make_request: Callable[[RPCEndpoint, Any], Any], web3: "Web3"
) -> Callable[[RPCEndpoint, Any], RPCResponse]:
fill_default_from = fill_default('from', guess_from, web3)
def middleware(method: RPCEndpoint, params: Any) -> RPCResponse:
if method in (
'eth_call',
'eth_estimateGas',
'eth_sendTransaction',
):
filled_transaction = pipe(
params[0],
fill_default_from,
)
return make_request(method, [filled_transaction] + list(params)[1:])
else:
return make_request(method, params)
return middleware
| 31.119874 | 100 | 0.693259 | import operator
from typing import (
TYPE_CHECKING,
Any,
Callable,
)
from eth_typing import (
ChecksumAddress,
)
from eth_utils import (
is_dict,
is_hex,
is_string,
)
from eth_utils.curried import (
apply_formatter_if,
apply_formatters_to_dict,
)
from eth_utils.toolz import (
assoc,
complement,
compose,
curry,
identity,
partial,
pipe,
)
from web3._utils.formatters import (
apply_formatter_to_array,
apply_formatters_to_args,
apply_key_map,
hex_to_integer,
integer_to_hex,
is_array_of_dicts,
static_return,
)
from web3.middleware import (
construct_formatting_middleware,
)
from web3.types import (
RPCEndpoint,
RPCResponse,
TxParams,
)
if TYPE_CHECKING:
from web3 import (
Web3,
)
def is_named_block(value: Any) -> bool:
return value in {"latest", "earliest", "pending"}
def is_hexstr(value: Any) -> bool:
return is_string(value) and is_hex(value)
to_integer_if_hex = apply_formatter_if(is_hexstr, hex_to_integer)
is_not_named_block = complement(is_named_block)
TRANSACTION_KEY_MAPPINGS = {
'access_list': 'accessList',
'block_hash': 'blockHash',
'block_number': 'blockNumber',
'gas_price': 'gasPrice',
'max_fee_per_gas': 'maxFeePerGas',
'max_priority_fee_per_gas': 'maxPriorityFeePerGas',
'transaction_hash': 'transactionHash',
'transaction_index': 'transactionIndex',
}
transaction_key_remapper = apply_key_map(TRANSACTION_KEY_MAPPINGS)
LOG_KEY_MAPPINGS = {
'log_index': 'logIndex',
'transaction_index': 'transactionIndex',
'transaction_hash': 'transactionHash',
'block_hash': 'blockHash',
'block_number': 'blockNumber',
}
log_key_remapper = apply_key_map(LOG_KEY_MAPPINGS)
RECEIPT_KEY_MAPPINGS = {
'block_hash': 'blockHash',
'block_number': 'blockNumber',
'contract_address': 'contractAddress',
'gas_used': 'gasUsed',
'cumulative_gas_used': 'cumulativeGasUsed',
'effective_gas_price': 'effectiveGasPrice',
'transaction_hash': 'transactionHash',
'transaction_index': 'transactionIndex',
}
receipt_key_remapper = apply_key_map(RECEIPT_KEY_MAPPINGS)
BLOCK_KEY_MAPPINGS = {
'gas_limit': 'gasLimit',
'sha3_uncles': 'sha3Uncles',
'transactions_root': 'transactionsRoot',
'parent_hash': 'parentHash',
'bloom': 'logsBloom',
'state_root': 'stateRoot',
'receipt_root': 'receiptsRoot',
'total_difficulty': 'totalDifficulty',
'extra_data': 'extraData',
'gas_used': 'gasUsed',
'base_fee_per_gas': 'baseFeePerGas',
}
block_key_remapper = apply_key_map(BLOCK_KEY_MAPPINGS)
TRANSACTION_PARAMS_MAPPING = {
'gasPrice': 'gas_price',
'maxFeePerGas': 'max_fee_per_gas',
'maxPriorityFeePerGas': 'max_priority_fee_per_gas',
'accessList': 'access_list',
}
transaction_params_remapper = apply_key_map(TRANSACTION_PARAMS_MAPPING)
REQUEST_TRANSACTION_FORMATTERS = {
'gas': to_integer_if_hex,
'gasPrice': to_integer_if_hex,
'value': to_integer_if_hex,
'nonce': to_integer_if_hex,
'maxFeePerGas': to_integer_if_hex,
'maxPriorityFeePerGas': to_integer_if_hex,
}
request_transaction_formatter = apply_formatters_to_dict(REQUEST_TRANSACTION_FORMATTERS)
FILTER_PARAMS_MAPPINGS = {
'fromBlock': 'from_block',
'toBlock': 'to_block',
}
filter_params_remapper = apply_key_map(FILTER_PARAMS_MAPPINGS)
FILTER_PARAMS_FORMATTERS = {
'fromBlock': to_integer_if_hex,
'toBlock': to_integer_if_hex,
}
filter_params_formatter = apply_formatters_to_dict(FILTER_PARAMS_FORMATTERS)
filter_params_transformer = compose(filter_params_remapper, filter_params_formatter)
RESPONSE_TRANSACTION_FORMATTERS = {
'to': apply_formatter_if(partial(operator.eq, ''), static_return(None)),
}
response_transaction_formatter = apply_formatters_to_dict(RESPONSE_TRANSACTION_FORMATTERS)
RECEIPT_FORMATTERS = {
'logs': apply_formatter_to_array(log_key_remapper),
}
receipt_formatter = apply_formatters_to_dict(RECEIPT_FORMATTERS)
transaction_params_transformer = compose(transaction_params_remapper, request_transaction_formatter)
ethereum_tester_middleware = construct_formatting_middleware(
request_formatters={
RPCEndpoint('eth_getBlockByNumber'): apply_formatters_to_args(
apply_formatter_if(is_not_named_block, to_integer_if_hex),
),
RPCEndpoint('eth_getFilterChanges'): apply_formatters_to_args(hex_to_integer),
RPCEndpoint('eth_getFilterLogs'): apply_formatters_to_args(hex_to_integer),
RPCEndpoint('eth_getBlockTransactionCountByNumber'): apply_formatters_to_args(
apply_formatter_if(is_not_named_block, to_integer_if_hex),
),
RPCEndpoint('eth_getUncleCountByBlockNumber'): apply_formatters_to_args(
apply_formatter_if(is_not_named_block, to_integer_if_hex),
),
RPCEndpoint('eth_getTransactionByBlockHashAndIndex'): apply_formatters_to_args(
identity,
to_integer_if_hex,
),
RPCEndpoint('eth_getTransactionByBlockNumberAndIndex'): apply_formatters_to_args(
apply_formatter_if(is_not_named_block, to_integer_if_hex),
to_integer_if_hex,
),
RPCEndpoint('eth_getUncleByBlockNumberAndIndex'): apply_formatters_to_args(
apply_formatter_if(is_not_named_block, to_integer_if_hex),
to_integer_if_hex,
),
RPCEndpoint('eth_newFilter'): apply_formatters_to_args(
filter_params_transformer,
),
RPCEndpoint('eth_getLogs'): apply_formatters_to_args(
filter_params_transformer,
),
RPCEndpoint('eth_sendTransaction'): apply_formatters_to_args(
transaction_params_transformer,
),
RPCEndpoint('eth_estimateGas'): apply_formatters_to_args(
transaction_params_transformer,
),
RPCEndpoint('eth_call'): apply_formatters_to_args(
transaction_params_transformer,
apply_formatter_if(is_not_named_block, to_integer_if_hex),
),
RPCEndpoint('eth_uninstallFilter'): apply_formatters_to_args(hex_to_integer),
RPCEndpoint('eth_getCode'): apply_formatters_to_args(
identity,
apply_formatter_if(is_not_named_block, to_integer_if_hex),
),
RPCEndpoint('eth_getBalance'): apply_formatters_to_args(
identity,
apply_formatter_if(is_not_named_block, to_integer_if_hex),
),
RPCEndpoint('evm_revert'): apply_formatters_to_args(hex_to_integer),
RPCEndpoint('personal_sendTransaction'): apply_formatters_to_args(
transaction_params_transformer,
identity,
),
},
result_formatters={
RPCEndpoint('eth_getBlockByHash'): apply_formatter_if(
is_dict,
block_key_remapper,
),
RPCEndpoint('eth_getBlockByNumber'): apply_formatter_if(
is_dict,
block_key_remapper,
),
RPCEndpoint('eth_getBlockTransactionCountByHash'): apply_formatter_if(
is_dict,
transaction_key_remapper,
),
RPCEndpoint('eth_getBlockTransactionCountByNumber'): apply_formatter_if(
is_dict,
transaction_key_remapper,
),
RPCEndpoint('eth_getTransactionByHash'): apply_formatter_if(
is_dict,
compose(transaction_key_remapper, response_transaction_formatter),
),
RPCEndpoint('eth_getTransactionReceipt'): apply_formatter_if(
is_dict,
compose(receipt_key_remapper, receipt_formatter),
),
RPCEndpoint('eth_newFilter'): integer_to_hex,
RPCEndpoint('eth_newBlockFilter'): integer_to_hex,
RPCEndpoint('eth_newPendingTransactionFilter'): integer_to_hex,
RPCEndpoint('eth_getLogs'): apply_formatter_if(
is_array_of_dicts,
apply_formatter_to_array(log_key_remapper),
),
RPCEndpoint('eth_getFilterChanges'): apply_formatter_if(
is_array_of_dicts,
apply_formatter_to_array(log_key_remapper),
),
RPCEndpoint('eth_getFilterLogs'): apply_formatter_if(
is_array_of_dicts,
apply_formatter_to_array(log_key_remapper),
),
RPCEndpoint('evm_snapshot'): integer_to_hex,
},
)
def guess_from(web3: "Web3", _: TxParams) -> ChecksumAddress:
coinbase = web3.eth.coinbase
if coinbase is not None:
return coinbase
try:
return web3.eth.accounts[0]
except KeyError:
pass
return None
@curry
def fill_default(
field: str, guess_func: Callable[..., Any], web3: "Web3", transaction: TxParams
) -> TxParams:
if field in transaction and transaction[field] is not None:
return transaction
else:
guess_val = guess_func(web3, transaction)
return assoc(transaction, field, guess_val)
def default_transaction_fields_middleware(
make_request: Callable[[RPCEndpoint, Any], Any], web3: "Web3"
) -> Callable[[RPCEndpoint, Any], RPCResponse]:
fill_default_from = fill_default('from', guess_from, web3)
def middleware(method: RPCEndpoint, params: Any) -> RPCResponse:
if method in (
'eth_call',
'eth_estimateGas',
'eth_sendTransaction',
):
filled_transaction = pipe(
params[0],
fill_default_from,
)
return make_request(method, [filled_transaction] + list(params)[1:])
else:
return make_request(method, params)
return middleware
| true | true |
f72fc26e54686c0677dd432b4718786ee33861af | 188 | py | Python | toal/annotators/WebAnnotators.py | Bhaskers-Blu-Org1/text-oriented-active-learning | facfb40673a59e43391b7bdb508e612dff1988d9 | [
"MIT"
] | 4 | 2020-10-23T14:42:30.000Z | 2021-06-10T13:29:04.000Z | toal/annotators/WebAnnotators.py | Bhaskers-Blu-Org1/text-oriented-active-learning | facfb40673a59e43391b7bdb508e612dff1988d9 | [
"MIT"
] | null | null | null | toal/annotators/WebAnnotators.py | Bhaskers-Blu-Org1/text-oriented-active-learning | facfb40673a59e43391b7bdb508e612dff1988d9 | [
"MIT"
] | 1 | 2020-07-30T10:35:09.000Z | 2020-07-30T10:35:09.000Z | from .AbstractAnnotator import AbstractAnnotator
class WebAnnotator(AbstractAnnotator):
def annotate(self, unlab_index, unlabeled_x, unlabeled_y):
raise NotImplementedError() | 31.333333 | 62 | 0.797872 | from .AbstractAnnotator import AbstractAnnotator
class WebAnnotator(AbstractAnnotator):
def annotate(self, unlab_index, unlabeled_x, unlabeled_y):
raise NotImplementedError() | true | true |
f72fc3c32c354207da9306ce6997164be7d90d1b | 9,544 | py | Python | venv/lib/python3.8/site-packages/awscli/customizations/eks/kubeconfig.py | sr9dc/DS_Systems_Project_2 | 0b348c1dd300756f732b4ce13e04239036dc601a | [
"MIT"
] | 4 | 2022-01-07T13:37:33.000Z | 2022-03-31T03:21:17.000Z | venv/lib/python3.8/site-packages/awscli/customizations/eks/kubeconfig.py | sr9dc/DS_Systems_Project_2 | 0b348c1dd300756f732b4ce13e04239036dc601a | [
"MIT"
] | 1 | 2022-01-27T04:21:58.000Z | 2022-01-27T04:21:58.000Z | venv/lib/python3.8/site-packages/awscli/customizations/eks/kubeconfig.py | sr9dc/DS_Systems_Project_2 | 0b348c1dd300756f732b4ce13e04239036dc601a | [
"MIT"
] | null | null | null | # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import os
import yaml
import logging
import errno
from botocore.compat import OrderedDict
from awscli.customizations.eks.exceptions import EKSError
from awscli.customizations.eks.ordered_yaml import (ordered_yaml_load,
ordered_yaml_dump)
class KubeconfigError(EKSError):
""" Base class for all kubeconfig errors."""
class KubeconfigCorruptedError(KubeconfigError):
""" Raised when a kubeconfig cannot be parsed."""
class KubeconfigInaccessableError(KubeconfigError):
""" Raised when a kubeconfig cannot be opened for read/writing."""
def _get_new_kubeconfig_content():
return OrderedDict([
("apiVersion", "v1"),
("clusters", []),
("contexts", []),
("current-context", ""),
("kind", "Config"),
("preferences", OrderedDict()),
("users", [])
])
class Kubeconfig(object):
def __init__(self, path, content=None):
self.path = path
if content is None:
content = _get_new_kubeconfig_content()
self.content = content
def dump_content(self):
""" Return the stored content in yaml format. """
return ordered_yaml_dump(self.content)
def has_cluster(self, name):
"""
Return true if this kubeconfig contains an entry
For the passed cluster name.
"""
if 'clusters' not in self.content:
return False
return name in [cluster['name']
for cluster in self.content['clusters']]
class KubeconfigValidator(object):
def __init__(self):
# Validation_content is an empty Kubeconfig
# It is used as a way to know what types different entries should be
self._validation_content = Kubeconfig(None, None).content
def validate_config(self, config):
"""
Raises KubeconfigCorruptedError if the passed content is invalid
:param config: The config to validate
:type config: Kubeconfig
"""
if not isinstance(config, Kubeconfig):
raise KubeconfigCorruptedError("Internal error: "
"Not a Kubeconfig object.")
self._validate_config_types(config)
self._validate_list_entry_types(config)
def _validate_config_types(self, config):
"""
Raises KubeconfigCorruptedError if any of the entries in config
are the wrong type
:param config: The config to validate
:type config: Kubeconfig
"""
if not isinstance(config.content, dict):
raise KubeconfigCorruptedError("Content not a dictionary.")
for key, value in self._validation_content.items():
if (key in config.content and
config.content[key] is not None and
not isinstance(config.content[key], type(value))):
raise KubeconfigCorruptedError(
"{0} is wrong type:{1} "
"(Should be {2})".format(
key,
type(config.content[key]),
type(value)
)
)
def _validate_list_entry_types(self, config):
"""
Raises KubeconfigCorruptedError if any lists in config contain objects
which are not dictionaries
:param config: The config to validate
:type config: Kubeconfig
"""
for key, value in self._validation_content.items():
if (key in config.content and
type(config.content[key]) == list):
for element in config.content[key]:
if not isinstance(element, OrderedDict):
raise KubeconfigCorruptedError(
"Entry in {0} not a dictionary.".format(key))
class KubeconfigLoader(object):
def __init__(self, validator=None):
if validator is None:
validator = KubeconfigValidator()
self._validator = validator
def load_kubeconfig(self, path):
"""
Loads the kubeconfig found at the given path.
If no file is found at the given path,
Generate a new kubeconfig to write back.
If the kubeconfig is valid, loads the content from it.
If the kubeconfig is invalid, throw the relevant exception.
:param path: The path to load a kubeconfig from
:type path: string
:raises KubeconfigInaccessableError: if the kubeconfig can't be opened
:raises KubeconfigCorruptedError: if the kubeconfig is invalid
:return: The loaded kubeconfig
:rtype: Kubeconfig
"""
try:
with open(path, "r") as stream:
loaded_content = ordered_yaml_load(stream)
except IOError as e:
if e.errno == errno.ENOENT:
loaded_content = None
else:
raise KubeconfigInaccessableError(
"Can't open kubeconfig for reading: {0}".format(e))
except yaml.YAMLError as e:
raise KubeconfigCorruptedError(
"YamlError while loading kubeconfig: {0}".format(e))
loaded_config = Kubeconfig(path, loaded_content)
self._validator.validate_config(loaded_config)
return loaded_config
class KubeconfigWriter(object):
def write_kubeconfig(self, config):
"""
Write config to disk.
OK if the file doesn't exist.
:param config: The kubeconfig to write
:type config: Kubeconfig
:raises KubeconfigInaccessableError: if the kubeconfig
can't be opened for writing
"""
directory = os.path.dirname(config.path)
try:
os.makedirs(directory)
except OSError as e:
if e.errno != errno.EEXIST:
raise KubeconfigInaccessableError(
"Can't create directory for writing: {0}".format(e))
try:
with os.fdopen(
os.open(
config.path,
os.O_CREAT | os.O_RDWR | os.O_TRUNC,
0o600),
"w+") as stream:
ordered_yaml_dump(config.content, stream)
except (IOError, OSError) as e:
raise KubeconfigInaccessableError(
"Can't open kubeconfig for writing: {0}".format(e))
class KubeconfigAppender(object):
def insert_entry(self, config, key, entry):
"""
Insert entry into the array at content[key]
Overwrite an existing entry if they share the same name
:param config: The kubeconfig to insert an entry into
:type config: Kubeconfig
"""
if key not in config.content:
config.content[key] = []
array = config.content[key]
if not isinstance(array, list):
raise KubeconfigError("Tried to insert into {0},"
"which is a {1} "
"not a {2}".format(key,
type(array),
list))
found = False
for counter, existing_entry in enumerate(array):
if "name" in existing_entry and\
"name" in entry and\
existing_entry["name"] == entry["name"]:
array[counter] = entry
found = True
if not found:
array.append(entry)
config.content[key] = array
return config
def _make_context(self, cluster, user, alias=None):
""" Generate a context to associate cluster and user with a given alias."""
return OrderedDict([
("context", OrderedDict([
("cluster", cluster["name"]),
("user", user["name"])
])),
("name", alias or user["name"])
])
def insert_cluster_user_pair(self, config, cluster, user, alias=None):
"""
Insert the passed cluster entry and user entry,
then make a context to associate them
and set current-context to be the new context.
Returns the new context
:param config: the Kubeconfig to insert the pair into
:type config: Kubeconfig
:param cluster: the cluster entry
:type cluster: OrderedDict
:param user: the user entry
:type user: OrderedDict
:param alias: the alias for the context; defaults top user entry name
:type context: str
:return: The generated context
:rtype: OrderedDict
"""
context = self._make_context(cluster, user, alias=alias)
self.insert_entry(config, "clusters", cluster)
self.insert_entry(config, "users", user)
self.insert_entry(config, "contexts", context)
config.content["current-context"] = context["name"]
return context
| 34.454874 | 83 | 0.585813 |
import os
import yaml
import logging
import errno
from botocore.compat import OrderedDict
from awscli.customizations.eks.exceptions import EKSError
from awscli.customizations.eks.ordered_yaml import (ordered_yaml_load,
ordered_yaml_dump)
class KubeconfigError(EKSError):
class KubeconfigCorruptedError(KubeconfigError):
class KubeconfigInaccessableError(KubeconfigError):
def _get_new_kubeconfig_content():
return OrderedDict([
("apiVersion", "v1"),
("clusters", []),
("contexts", []),
("current-context", ""),
("kind", "Config"),
("preferences", OrderedDict()),
("users", [])
])
class Kubeconfig(object):
def __init__(self, path, content=None):
self.path = path
if content is None:
content = _get_new_kubeconfig_content()
self.content = content
def dump_content(self):
return ordered_yaml_dump(self.content)
def has_cluster(self, name):
if 'clusters' not in self.content:
return False
return name in [cluster['name']
for cluster in self.content['clusters']]
class KubeconfigValidator(object):
def __init__(self):
self._validation_content = Kubeconfig(None, None).content
def validate_config(self, config):
if not isinstance(config, Kubeconfig):
raise KubeconfigCorruptedError("Internal error: "
"Not a Kubeconfig object.")
self._validate_config_types(config)
self._validate_list_entry_types(config)
def _validate_config_types(self, config):
if not isinstance(config.content, dict):
raise KubeconfigCorruptedError("Content not a dictionary.")
for key, value in self._validation_content.items():
if (key in config.content and
config.content[key] is not None and
not isinstance(config.content[key], type(value))):
raise KubeconfigCorruptedError(
"{0} is wrong type:{1} "
"(Should be {2})".format(
key,
type(config.content[key]),
type(value)
)
)
def _validate_list_entry_types(self, config):
for key, value in self._validation_content.items():
if (key in config.content and
type(config.content[key]) == list):
for element in config.content[key]:
if not isinstance(element, OrderedDict):
raise KubeconfigCorruptedError(
"Entry in {0} not a dictionary.".format(key))
class KubeconfigLoader(object):
def __init__(self, validator=None):
if validator is None:
validator = KubeconfigValidator()
self._validator = validator
def load_kubeconfig(self, path):
try:
with open(path, "r") as stream:
loaded_content = ordered_yaml_load(stream)
except IOError as e:
if e.errno == errno.ENOENT:
loaded_content = None
else:
raise KubeconfigInaccessableError(
"Can't open kubeconfig for reading: {0}".format(e))
except yaml.YAMLError as e:
raise KubeconfigCorruptedError(
"YamlError while loading kubeconfig: {0}".format(e))
loaded_config = Kubeconfig(path, loaded_content)
self._validator.validate_config(loaded_config)
return loaded_config
class KubeconfigWriter(object):
def write_kubeconfig(self, config):
directory = os.path.dirname(config.path)
try:
os.makedirs(directory)
except OSError as e:
if e.errno != errno.EEXIST:
raise KubeconfigInaccessableError(
"Can't create directory for writing: {0}".format(e))
try:
with os.fdopen(
os.open(
config.path,
os.O_CREAT | os.O_RDWR | os.O_TRUNC,
0o600),
"w+") as stream:
ordered_yaml_dump(config.content, stream)
except (IOError, OSError) as e:
raise KubeconfigInaccessableError(
"Can't open kubeconfig for writing: {0}".format(e))
class KubeconfigAppender(object):
def insert_entry(self, config, key, entry):
if key not in config.content:
config.content[key] = []
array = config.content[key]
if not isinstance(array, list):
raise KubeconfigError("Tried to insert into {0},"
"which is a {1} "
"not a {2}".format(key,
type(array),
list))
found = False
for counter, existing_entry in enumerate(array):
if "name" in existing_entry and\
"name" in entry and\
existing_entry["name"] == entry["name"]:
array[counter] = entry
found = True
if not found:
array.append(entry)
config.content[key] = array
return config
def _make_context(self, cluster, user, alias=None):
return OrderedDict([
("context", OrderedDict([
("cluster", cluster["name"]),
("user", user["name"])
])),
("name", alias or user["name"])
])
def insert_cluster_user_pair(self, config, cluster, user, alias=None):
context = self._make_context(cluster, user, alias=alias)
self.insert_entry(config, "clusters", cluster)
self.insert_entry(config, "users", user)
self.insert_entry(config, "contexts", context)
config.content["current-context"] = context["name"]
return context
| true | true |
f72fc4014791e9cb00ad25357fa03b020d005be5 | 8,006 | py | Python | mxdc/devices/cryojet.py | michel4j/mxdc | 844f0854cc696553c8a51f8e9b5b06a8e4345261 | [
"BSD-3-Clause"
] | 2 | 2018-10-23T19:05:40.000Z | 2021-03-18T20:06:32.000Z | mxdc/devices/cryojet.py | michel4j/mxdc | 844f0854cc696553c8a51f8e9b5b06a8e4345261 | [
"BSD-3-Clause"
] | null | null | null | mxdc/devices/cryojet.py | michel4j/mxdc | 844f0854cc696553c8a51f8e9b5b06a8e4345261 | [
"BSD-3-Clause"
] | null | null | null | from enum import Enum
from gi.repository import GLib
from zope.interface import implementer
import mxdc.devices.shutter
from mxdc import Device, Signal, Property
from mxdc.devices import misc
from mxdc.utils.log import get_module_logger
from .interfaces import ICryostat
logger = get_module_logger(__name__)
class CryoJetNozzle(mxdc.devices.shutter.EPICSShutter):
"""
A specialized in-out actuator for pneumatic Cryojet nozzles.
:param name: The process variable name of the devices
"""
def __init__(self, name):
open_name = "%s:opr:open" % name
close_name = "%s:opr:close" % name
state_name = "%s:out" % name
mxdc.devices.shutter.EPICSShutter.__init__(self, open_name, close_name, state_name)
self._messages = ['Restoring', 'Retracting']
self._name = 'Cryojet Nozzle'
@implementer(ICryostat)
class CryostatBase(Device):
"""
Base class for all cryostat devices. A cryostat maintains low temperatures at the sample position.
Signals:
- temp (float,): Sample temperature
- level (float,): Cryogen level
- sample (float,): Cryogen flow-rate
- shield (float,): Shield flow-rate
"""
class Positions(Enum):
IN, OUT = range(2)
class Signals:
temp = Signal('temp', arg_types=(float,))
level = Signal('level', arg_types=(float,))
sample = Signal('sample', arg_types=(float,))
shield = Signal('shield', arg_types=(float,))
pos = Signal('position', arg_types=(object,))
# Properties
temperature = Property(type=float, default=0.0)
shield = Property(type=float, default=0.0)
sample = Property(type=float, default=0.0)
level = Property(type=float, default=0.0)
def configure(self, temp=None, sample=None, shield=None, position=None):
"""
Configure the Cryostat.
:param temp: Set the target sample temperature
:param sample: Set the sample flow rate
:param shield: Set the shield flow rate
:param position: If the cryostat set the position. Should be one of Positions.IN, Positions.OUT
"""
def stop(self):
"""
Stop the cryostat
"""
def start(self):
"""
Start the cryostat
"""
@implementer(ICryostat)
class CryoJetBase(Device):
"""
Cryogenic Nozzle Jet Device
"""
temperature = Property(type=float, default=0.0)
shield = Property(type=float, default=0.0)
sample = Property(type=float, default=0.0)
level = Property(type=float, default=0.0)
def __init__(self, *args, **kwargs):
super().__init__()
self.name = 'Cryojet'
self._previous_flow = 7.0
self.setup(*args, **kwargs)
def setup(self, *args, **kwargs):
pass
def anneal(self, duration):
"""
Anneal for the specified duration
:param duration: duration in seconds to stop cooling
"""
pass
def on_temp(self, obj, val):
if val < 110:
self.set_state(health=(0, 'temp', ''))
elif val < 115:
self.set_state(health=(2, 'temp', 'Temp. high!'))
else:
self.set_state(health=(4, 'temp', 'Temp. too high!'))
self.set_property('temperature', val)
def on_sample(self, obj, val):
if val > 5:
self.set_state(health=(0, 'sample', ''))
elif val > 4:
self.set_state(health=(2, 'sample', 'Sample Flow Low!'))
else:
self.set_state(health=(4, 'sample','Sample Flow Too Low!'))
self.set_property('sample', val)
def on_shield(self, obj, val):
if val > 5:
self.set_state(health=(0, 'shield', ''))
elif val > 4:
self.set_state(health=(2, 'shield','Shield Flow Low!'))
else:
self.set_state(health=(4, 'shield','Shield Flow Too Low!'))
self.set_property('shield', val)
def on_level(self, obj, val):
if val < 15:
self.set_state(health=(4, 'cryo','Cryogen too low!'))
elif val < 20:
self.set_state(health=(2, 'cryo','Cryogen low!'))
else:
self.set_state(health=(0, 'cryo', ''))
self.set_property('level', val)
def on_nozzle(self, obj, val):
if val:
self.set_state(health=(1, 'nozzle', 'Retracted!'))
else:
self.set_state(health=(0, 'nozzle', 'Restored'))
class CryoJet(CryoJetBase):
def setup(self, name, level_name, nozzle_name):
self.temp_fbk = self.add_pv('{}:sensorTemp:get'.format(name))
self.sample_fbk = self.add_pv('{}:SampleFlow:get'.format(name))
self.shield_fbk = self.add_pv('{}:ShieldFlow:get'.format(name))
self.sample_sp = self.add_pv('{}:sampleFlow:set'.format(name))
self.level_fbk = self.add_pv('{}:ch1LVL:get'.format(level_name))
self.fill_status = self.add_pv('{}:status:ch1:N.SVAL'.format(level_name))
self.nozzle = CryoJetNozzle(nozzle_name)
# connect signals for monitoring state
self.temp_fbk.connect('changed', self.on_temp)
self.level_fbk.connect('changed', self.on_level)
self.sample_fbk.connect('changed', self.on_sample)
self.sample_fbk.connect('changed', self.on_shield)
self.nozzle.connect('changed', self.on_nozzle)
def on_level(self, obj, val):
if val < 150:
self.set_state(health=(4, 'cryo','Cryogen too low!'))
elif val < 200:
self.set_state(health=(2, 'cryo','Cryogen low!'))
else:
self.set_state(health=(0, 'cryo', ''))
self.set_property('level', val/10.)
def anneal(self, duration):
previous_flow = self.sample_fbk.get()
self.sample_sp.put(0.0)
GLib.timeout_add(duration*1000, self.sample_sp.put, previous_flow)
class CryoJet5(CryoJetBase):
def setup(self, name, nozzle_name):
self.temp_fbk = self.add_pv('{}:sample:temp:fbk'.format(name))
self.sample_fbk = self.add_pv('{}:sample:flow:fbk'.format(name))
self.shield_fbk = self.add_pv('{}:shield:flow:fbk'.format(name))
self.sample_sp = self.add_pv('{}:sample:flow'.format(name))
self.level_fbk = self.add_pv('{}:autofill:level:fbk'.format(name))
self.fill_status = self.add_pv('{}:autofill:state'.format(name))
self.nozzle = CryoJetNozzle(nozzle_name)
# connect signals for monitoring state
self.temp_fbk.connect('changed', self.on_temp)
self.level_fbk.connect('changed', self.on_level)
self.sample_fbk.connect('changed', self.on_sample)
self.shield_fbk.connect('changed', self.on_shield)
self.nozzle.connect('changed', self.on_nozzle)
class SimCryoJet(CryoJetBase):
def setup(self, *args, **kwargs):
self.nozzle = mxdc.devices.shutter.SimShutter('Sim Cryo Nozzle')
self.temp_fbk = misc.SimPositioner('Cryo Temperature', pos=102.5, noise=3)
self.sample_fbk = misc.SimPositioner('Cryo Sample flow', pos=6.5, noise=1)
self.shield_fbk = misc.SimPositioner('Cryo Shield flow', pos=9.5, noise=1)
self.level_fbk = misc.SimPositioner('Cryo Level', pos=35.5, noise=10)
self.name = 'Sim CryoJet'
# connect signals for monitoring state
self.temp_fbk.connect('changed', self.on_temp)
self.level_fbk.connect('changed', self.on_level)
self.sample_fbk.connect('changed', self.on_sample)
self.shield_fbk.connect('changed', self.on_shield)
self.nozzle.connect('changed', self.on_nozzle)
def _simulate_nozzle(self, *args, **kwargs):
if self.nozzle.is_open():
self.nozzle.close()
else:
self.nozzle.open()
return True
def anneal(self, duration):
previous_flow = self.sample_fbk.get()
self.sample_sp.put(0.0)
GLib.timeout_add(duration*1000, self.sample_fbk.put, previous_flow)
__all__ = ['CryoJet', 'CryoJet5', 'SimCryoJet']
| 33.923729 | 103 | 0.62066 | from enum import Enum
from gi.repository import GLib
from zope.interface import implementer
import mxdc.devices.shutter
from mxdc import Device, Signal, Property
from mxdc.devices import misc
from mxdc.utils.log import get_module_logger
from .interfaces import ICryostat
logger = get_module_logger(__name__)
class CryoJetNozzle(mxdc.devices.shutter.EPICSShutter):
def __init__(self, name):
open_name = "%s:opr:open" % name
close_name = "%s:opr:close" % name
state_name = "%s:out" % name
mxdc.devices.shutter.EPICSShutter.__init__(self, open_name, close_name, state_name)
self._messages = ['Restoring', 'Retracting']
self._name = 'Cryojet Nozzle'
@implementer(ICryostat)
class CryostatBase(Device):
class Positions(Enum):
IN, OUT = range(2)
class Signals:
temp = Signal('temp', arg_types=(float,))
level = Signal('level', arg_types=(float,))
sample = Signal('sample', arg_types=(float,))
shield = Signal('shield', arg_types=(float,))
pos = Signal('position', arg_types=(object,))
temperature = Property(type=float, default=0.0)
shield = Property(type=float, default=0.0)
sample = Property(type=float, default=0.0)
level = Property(type=float, default=0.0)
def configure(self, temp=None, sample=None, shield=None, position=None):
def stop(self):
def start(self):
@implementer(ICryostat)
class CryoJetBase(Device):
temperature = Property(type=float, default=0.0)
shield = Property(type=float, default=0.0)
sample = Property(type=float, default=0.0)
level = Property(type=float, default=0.0)
def __init__(self, *args, **kwargs):
super().__init__()
self.name = 'Cryojet'
self._previous_flow = 7.0
self.setup(*args, **kwargs)
def setup(self, *args, **kwargs):
pass
def anneal(self, duration):
pass
def on_temp(self, obj, val):
if val < 110:
self.set_state(health=(0, 'temp', ''))
elif val < 115:
self.set_state(health=(2, 'temp', 'Temp. high!'))
else:
self.set_state(health=(4, 'temp', 'Temp. too high!'))
self.set_property('temperature', val)
def on_sample(self, obj, val):
if val > 5:
self.set_state(health=(0, 'sample', ''))
elif val > 4:
self.set_state(health=(2, 'sample', 'Sample Flow Low!'))
else:
self.set_state(health=(4, 'sample','Sample Flow Too Low!'))
self.set_property('sample', val)
def on_shield(self, obj, val):
if val > 5:
self.set_state(health=(0, 'shield', ''))
elif val > 4:
self.set_state(health=(2, 'shield','Shield Flow Low!'))
else:
self.set_state(health=(4, 'shield','Shield Flow Too Low!'))
self.set_property('shield', val)
def on_level(self, obj, val):
if val < 15:
self.set_state(health=(4, 'cryo','Cryogen too low!'))
elif val < 20:
self.set_state(health=(2, 'cryo','Cryogen low!'))
else:
self.set_state(health=(0, 'cryo', ''))
self.set_property('level', val)
def on_nozzle(self, obj, val):
if val:
self.set_state(health=(1, 'nozzle', 'Retracted!'))
else:
self.set_state(health=(0, 'nozzle', 'Restored'))
class CryoJet(CryoJetBase):
def setup(self, name, level_name, nozzle_name):
self.temp_fbk = self.add_pv('{}:sensorTemp:get'.format(name))
self.sample_fbk = self.add_pv('{}:SampleFlow:get'.format(name))
self.shield_fbk = self.add_pv('{}:ShieldFlow:get'.format(name))
self.sample_sp = self.add_pv('{}:sampleFlow:set'.format(name))
self.level_fbk = self.add_pv('{}:ch1LVL:get'.format(level_name))
self.fill_status = self.add_pv('{}:status:ch1:N.SVAL'.format(level_name))
self.nozzle = CryoJetNozzle(nozzle_name)
self.temp_fbk.connect('changed', self.on_temp)
self.level_fbk.connect('changed', self.on_level)
self.sample_fbk.connect('changed', self.on_sample)
self.sample_fbk.connect('changed', self.on_shield)
self.nozzle.connect('changed', self.on_nozzle)
def on_level(self, obj, val):
if val < 150:
self.set_state(health=(4, 'cryo','Cryogen too low!'))
elif val < 200:
self.set_state(health=(2, 'cryo','Cryogen low!'))
else:
self.set_state(health=(0, 'cryo', ''))
self.set_property('level', val/10.)
def anneal(self, duration):
previous_flow = self.sample_fbk.get()
self.sample_sp.put(0.0)
GLib.timeout_add(duration*1000, self.sample_sp.put, previous_flow)
class CryoJet5(CryoJetBase):
def setup(self, name, nozzle_name):
self.temp_fbk = self.add_pv('{}:sample:temp:fbk'.format(name))
self.sample_fbk = self.add_pv('{}:sample:flow:fbk'.format(name))
self.shield_fbk = self.add_pv('{}:shield:flow:fbk'.format(name))
self.sample_sp = self.add_pv('{}:sample:flow'.format(name))
self.level_fbk = self.add_pv('{}:autofill:level:fbk'.format(name))
self.fill_status = self.add_pv('{}:autofill:state'.format(name))
self.nozzle = CryoJetNozzle(nozzle_name)
self.temp_fbk.connect('changed', self.on_temp)
self.level_fbk.connect('changed', self.on_level)
self.sample_fbk.connect('changed', self.on_sample)
self.shield_fbk.connect('changed', self.on_shield)
self.nozzle.connect('changed', self.on_nozzle)
class SimCryoJet(CryoJetBase):
def setup(self, *args, **kwargs):
self.nozzle = mxdc.devices.shutter.SimShutter('Sim Cryo Nozzle')
self.temp_fbk = misc.SimPositioner('Cryo Temperature', pos=102.5, noise=3)
self.sample_fbk = misc.SimPositioner('Cryo Sample flow', pos=6.5, noise=1)
self.shield_fbk = misc.SimPositioner('Cryo Shield flow', pos=9.5, noise=1)
self.level_fbk = misc.SimPositioner('Cryo Level', pos=35.5, noise=10)
self.name = 'Sim CryoJet'
self.temp_fbk.connect('changed', self.on_temp)
self.level_fbk.connect('changed', self.on_level)
self.sample_fbk.connect('changed', self.on_sample)
self.shield_fbk.connect('changed', self.on_shield)
self.nozzle.connect('changed', self.on_nozzle)
def _simulate_nozzle(self, *args, **kwargs):
if self.nozzle.is_open():
self.nozzle.close()
else:
self.nozzle.open()
return True
def anneal(self, duration):
previous_flow = self.sample_fbk.get()
self.sample_sp.put(0.0)
GLib.timeout_add(duration*1000, self.sample_fbk.put, previous_flow)
__all__ = ['CryoJet', 'CryoJet5', 'SimCryoJet']
| true | true |
f72fc514b8852f9e17acbed322e8818424cb6190 | 59,548 | py | Python | test/azure/Expected/AcceptanceTests/Paging/paging/aio/operations/_paging_operations.py | amrElroumy/autorest.python | b37af1779f6d53b4fa0d92da62151f8133006f98 | [
"MIT"
] | null | null | null | test/azure/Expected/AcceptanceTests/Paging/paging/aio/operations/_paging_operations.py | amrElroumy/autorest.python | b37af1779f6d53b4fa0d92da62151f8133006f98 | [
"MIT"
] | null | null | null | test/azure/Expected/AcceptanceTests/Paging/paging/aio/operations/_paging_operations.py | amrElroumy/autorest.python | b37af1779f6d53b4fa0d92da62151f8133006f98 | [
"MIT"
] | null | null | null | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
map_error,
)
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.core.polling.async_base_polling import AsyncLROBasePolling
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from ... import models as _models
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class PagingOperations:
"""PagingOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~paging.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def get_no_item_name_pages(self, **kwargs) -> AsyncIterable["_models.ProductResultValue"]:
"""A paging operation that must return result of the default 'value' node.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ProductResultValue or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~paging.models.ProductResultValue]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType["_models.ProductResultValue"]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters["Accept"] = self._serialize.header("accept", accept, "str")
if not next_link:
# Construct URL
url = self.get_no_item_name_pages.metadata["url"] # type: ignore
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ProductResultValue", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
get_no_item_name_pages.metadata = {"url": "/paging/noitemname"} # type: ignore
@distributed_trace
def get_null_next_link_name_pages(self, **kwargs) -> AsyncIterable["_models.ProductResult"]:
"""A paging operation that must ignore any kind of nextLink, and stop after page 1.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ProductResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~paging.models.ProductResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType["_models.ProductResult"]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters["Accept"] = self._serialize.header("accept", accept, "str")
if not next_link:
# Construct URL
url = self.get_null_next_link_name_pages.metadata["url"] # type: ignore
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ProductResult", pipeline_response)
list_of_elem = deserialized.values
if cls:
list_of_elem = cls(list_of_elem)
return None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
get_null_next_link_name_pages.metadata = {"url": "/paging/nullnextlink"} # type: ignore
@distributed_trace
def get_single_pages(self, **kwargs) -> AsyncIterable["_models.ProductResult"]:
"""A paging operation that finishes on the first call without a nextlink.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ProductResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~paging.models.ProductResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType["_models.ProductResult"]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters["Accept"] = self._serialize.header("accept", accept, "str")
if not next_link:
# Construct URL
url = self.get_single_pages.metadata["url"] # type: ignore
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ProductResult", pipeline_response)
list_of_elem = deserialized.values
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
get_single_pages.metadata = {"url": "/paging/single"} # type: ignore
@distributed_trace
def first_response_empty(self, **kwargs) -> AsyncIterable["_models.ProductResultValue"]:
"""A paging operation whose first response's items list is empty, but still returns a next link.
Second (and final) call, will give you an items list of 1.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ProductResultValue or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~paging.models.ProductResultValue]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType["_models.ProductResultValue"]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters["Accept"] = self._serialize.header("accept", accept, "str")
if not next_link:
# Construct URL
url = self.first_response_empty.metadata["url"] # type: ignore
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ProductResultValue", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
first_response_empty.metadata = {"url": "/paging/firstResponseEmpty/1"} # type: ignore
@distributed_trace
def get_multiple_pages(
self,
client_request_id: Optional[str] = None,
paging_get_multiple_pages_options: Optional["_models.PagingGetMultiplePagesOptions"] = None,
**kwargs
) -> AsyncIterable["_models.ProductResult"]:
"""A paging operation that includes a nextLink that has 10 pages.
:param client_request_id:
:type client_request_id: str
:param paging_get_multiple_pages_options: Parameter group.
:type paging_get_multiple_pages_options: ~paging.models.PagingGetMultiplePagesOptions
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ProductResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~paging.models.ProductResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType["_models.ProductResult"]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
_maxresults = None
_timeout = None
if paging_get_multiple_pages_options is not None:
_maxresults = paging_get_multiple_pages_options.maxresults
_timeout = paging_get_multiple_pages_options.timeout
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if client_request_id is not None:
header_parameters["client-request-id"] = self._serialize.header(
"client_request_id", client_request_id, "str"
)
if _maxresults is not None:
header_parameters["maxresults"] = self._serialize.header("maxresults", _maxresults, "int")
if _timeout is not None:
header_parameters["timeout"] = self._serialize.header("timeout", _timeout, "int")
header_parameters["Accept"] = self._serialize.header("accept", accept, "str")
if not next_link:
# Construct URL
url = self.get_multiple_pages.metadata["url"] # type: ignore
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ProductResult", pipeline_response)
list_of_elem = deserialized.values
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
get_multiple_pages.metadata = {"url": "/paging/multiple"} # type: ignore
@distributed_trace
def get_with_query_params(self, required_query_parameter: int, **kwargs) -> AsyncIterable["_models.ProductResult"]:
"""A paging operation that includes a next operation. It has a different query parameter from it's
next operation nextOperationWithQueryParams. Returns a ProductResult.
:param required_query_parameter: A required integer query parameter. Put in value '100' to pass
test.
:type required_query_parameter: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ProductResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~paging.models.ProductResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType["_models.ProductResult"]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
query_constant = True
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters["Accept"] = self._serialize.header("accept", accept, "str")
if not next_link:
# Construct URL
url = self.get_with_query_params.metadata["url"] # type: ignore
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters["requiredQueryParameter"] = self._serialize.query(
"required_query_parameter", required_query_parameter, "int"
)
query_parameters["queryConstant"] = self._serialize.query("query_constant", query_constant, "bool")
request = self._client.get(url, query_parameters, header_parameters)
else:
url = "/paging/multiple/nextOperationWithQueryParams"
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters["queryConstant"] = self._serialize.query("query_constant", query_constant, "bool")
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ProductResult", pipeline_response)
list_of_elem = deserialized.values
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
get_with_query_params.metadata = {"url": "/paging/multiple/getWithQueryParams"} # type: ignore
@distributed_trace
def get_odata_multiple_pages(
self,
client_request_id: Optional[str] = None,
paging_get_odata_multiple_pages_options: Optional["_models.PagingGetOdataMultiplePagesOptions"] = None,
**kwargs
) -> AsyncIterable["_models.OdataProductResult"]:
"""A paging operation that includes a nextLink in odata format that has 10 pages.
:param client_request_id:
:type client_request_id: str
:param paging_get_odata_multiple_pages_options: Parameter group.
:type paging_get_odata_multiple_pages_options: ~paging.models.PagingGetOdataMultiplePagesOptions
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either OdataProductResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~paging.models.OdataProductResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType["_models.OdataProductResult"]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
_maxresults = None
_timeout = None
if paging_get_odata_multiple_pages_options is not None:
_maxresults = paging_get_odata_multiple_pages_options.maxresults
_timeout = paging_get_odata_multiple_pages_options.timeout
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if client_request_id is not None:
header_parameters["client-request-id"] = self._serialize.header(
"client_request_id", client_request_id, "str"
)
if _maxresults is not None:
header_parameters["maxresults"] = self._serialize.header("maxresults", _maxresults, "int")
if _timeout is not None:
header_parameters["timeout"] = self._serialize.header("timeout", _timeout, "int")
header_parameters["Accept"] = self._serialize.header("accept", accept, "str")
if not next_link:
# Construct URL
url = self.get_odata_multiple_pages.metadata["url"] # type: ignore
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("OdataProductResult", pipeline_response)
list_of_elem = deserialized.values
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.odata_next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
get_odata_multiple_pages.metadata = {"url": "/paging/multiple/odata"} # type: ignore
@distributed_trace
def get_multiple_pages_with_offset(
self,
paging_get_multiple_pages_with_offset_options: "_models.PagingGetMultiplePagesWithOffsetOptions",
client_request_id: Optional[str] = None,
**kwargs
) -> AsyncIterable["_models.ProductResult"]:
"""A paging operation that includes a nextLink that has 10 pages.
:param paging_get_multiple_pages_with_offset_options: Parameter group.
:type paging_get_multiple_pages_with_offset_options: ~paging.models.PagingGetMultiplePagesWithOffsetOptions
:param client_request_id:
:type client_request_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ProductResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~paging.models.ProductResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType["_models.ProductResult"]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
_maxresults = None
_offset = None
_timeout = None
if paging_get_multiple_pages_with_offset_options is not None:
_maxresults = paging_get_multiple_pages_with_offset_options.maxresults
_offset = paging_get_multiple_pages_with_offset_options.offset
_timeout = paging_get_multiple_pages_with_offset_options.timeout
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if client_request_id is not None:
header_parameters["client-request-id"] = self._serialize.header(
"client_request_id", client_request_id, "str"
)
if _maxresults is not None:
header_parameters["maxresults"] = self._serialize.header("maxresults", _maxresults, "int")
if _timeout is not None:
header_parameters["timeout"] = self._serialize.header("timeout", _timeout, "int")
header_parameters["Accept"] = self._serialize.header("accept", accept, "str")
if not next_link:
# Construct URL
url = self.get_multiple_pages_with_offset.metadata["url"] # type: ignore
path_format_arguments = {
"offset": self._serialize.url("offset", _offset, "int"),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ProductResult", pipeline_response)
list_of_elem = deserialized.values
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
get_multiple_pages_with_offset.metadata = {"url": "/paging/multiple/withpath/{offset}"} # type: ignore
@distributed_trace
def get_multiple_pages_retry_first(self, **kwargs) -> AsyncIterable["_models.ProductResult"]:
"""A paging operation that fails on the first call with 500 and then retries and then get a
response including a nextLink that has 10 pages.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ProductResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~paging.models.ProductResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType["_models.ProductResult"]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters["Accept"] = self._serialize.header("accept", accept, "str")
if not next_link:
# Construct URL
url = self.get_multiple_pages_retry_first.metadata["url"] # type: ignore
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ProductResult", pipeline_response)
list_of_elem = deserialized.values
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
get_multiple_pages_retry_first.metadata = {"url": "/paging/multiple/retryfirst"} # type: ignore
@distributed_trace
def get_multiple_pages_retry_second(self, **kwargs) -> AsyncIterable["_models.ProductResult"]:
"""A paging operation that includes a nextLink that has 10 pages, of which the 2nd call fails
first with 500. The client should retry and finish all 10 pages eventually.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ProductResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~paging.models.ProductResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType["_models.ProductResult"]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters["Accept"] = self._serialize.header("accept", accept, "str")
if not next_link:
# Construct URL
url = self.get_multiple_pages_retry_second.metadata["url"] # type: ignore
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ProductResult", pipeline_response)
list_of_elem = deserialized.values
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
get_multiple_pages_retry_second.metadata = {"url": "/paging/multiple/retrysecond"} # type: ignore
@distributed_trace
def get_single_pages_failure(self, **kwargs) -> AsyncIterable["_models.ProductResult"]:
"""A paging operation that receives a 400 on the first call.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ProductResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~paging.models.ProductResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType["_models.ProductResult"]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters["Accept"] = self._serialize.header("accept", accept, "str")
if not next_link:
# Construct URL
url = self.get_single_pages_failure.metadata["url"] # type: ignore
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ProductResult", pipeline_response)
list_of_elem = deserialized.values
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
get_single_pages_failure.metadata = {"url": "/paging/single/failure"} # type: ignore
@distributed_trace
def get_multiple_pages_failure(self, **kwargs) -> AsyncIterable["_models.ProductResult"]:
"""A paging operation that receives a 400 on the second call.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ProductResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~paging.models.ProductResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType["_models.ProductResult"]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters["Accept"] = self._serialize.header("accept", accept, "str")
if not next_link:
# Construct URL
url = self.get_multiple_pages_failure.metadata["url"] # type: ignore
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ProductResult", pipeline_response)
list_of_elem = deserialized.values
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
get_multiple_pages_failure.metadata = {"url": "/paging/multiple/failure"} # type: ignore
@distributed_trace
def get_multiple_pages_failure_uri(self, **kwargs) -> AsyncIterable["_models.ProductResult"]:
"""A paging operation that receives an invalid nextLink.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ProductResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~paging.models.ProductResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType["_models.ProductResult"]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters["Accept"] = self._serialize.header("accept", accept, "str")
if not next_link:
# Construct URL
url = self.get_multiple_pages_failure_uri.metadata["url"] # type: ignore
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ProductResult", pipeline_response)
list_of_elem = deserialized.values
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
get_multiple_pages_failure_uri.metadata = {"url": "/paging/multiple/failureuri"} # type: ignore
@distributed_trace
def get_multiple_pages_fragment_next_link(
self, api_version: str, tenant: str, **kwargs
) -> AsyncIterable["_models.OdataProductResult"]:
"""A paging operation that doesn't return a full URL, just a fragment.
:param api_version: Sets the api version to use.
:type api_version: str
:param tenant: Sets the tenant to use.
:type tenant: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either OdataProductResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~paging.models.OdataProductResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType["_models.OdataProductResult"]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters["Accept"] = self._serialize.header("accept", accept, "str")
if not next_link:
# Construct URL
url = self.get_multiple_pages_fragment_next_link.metadata["url"] # type: ignore
path_format_arguments = {
"tenant": self._serialize.url("tenant", tenant, "str"),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters["api_version"] = self._serialize.query("api_version", api_version, "str")
request = self._client.get(url, query_parameters, header_parameters)
else:
url = "/paging/multiple/fragment/{tenant}/{nextLink}"
path_format_arguments = {
"tenant": self._serialize.url("tenant", tenant, "str"),
"nextLink": self._serialize.url("next_link", next_link, "str", skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters["api_version"] = self._serialize.query("api_version", api_version, "str")
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("OdataProductResult", pipeline_response)
list_of_elem = deserialized.values
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.odata_next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
get_multiple_pages_fragment_next_link.metadata = {"url": "/paging/multiple/fragment/{tenant}"} # type: ignore
@distributed_trace
def get_multiple_pages_fragment_with_grouping_next_link(
self, custom_parameter_group: "_models.CustomParameterGroup", **kwargs
) -> AsyncIterable["_models.OdataProductResult"]:
"""A paging operation that doesn't return a full URL, just a fragment with parameters grouped.
:param custom_parameter_group: Parameter group.
:type custom_parameter_group: ~paging.models.CustomParameterGroup
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either OdataProductResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~paging.models.OdataProductResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType["_models.OdataProductResult"]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
_api_version = None
_tenant = None
if custom_parameter_group is not None:
_api_version = custom_parameter_group.api_version
_tenant = custom_parameter_group.tenant
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters["Accept"] = self._serialize.header("accept", accept, "str")
if not next_link:
# Construct URL
url = self.get_multiple_pages_fragment_with_grouping_next_link.metadata["url"] # type: ignore
path_format_arguments = {
"tenant": self._serialize.url("tenant", _tenant, "str"),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters["api_version"] = self._serialize.query("api_version", _api_version, "str")
request = self._client.get(url, query_parameters, header_parameters)
else:
url = "/paging/multiple/fragmentwithgrouping/{tenant}/{nextLink}"
path_format_arguments = {
"tenant": self._serialize.url("tenant", _tenant, "str"),
"nextLink": self._serialize.url("next_link", next_link, "str", skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters["api_version"] = self._serialize.query("api_version", _api_version, "str")
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("OdataProductResult", pipeline_response)
list_of_elem = deserialized.values
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.odata_next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
get_multiple_pages_fragment_with_grouping_next_link.metadata = {"url": "/paging/multiple/fragmentwithgrouping/{tenant}"} # type: ignore
async def _get_multiple_pages_lro_initial(
self,
client_request_id: Optional[str] = None,
paging_get_multiple_pages_lro_options: Optional["_models.PagingGetMultiplePagesLroOptions"] = None,
**kwargs
) -> "_models.ProductResult":
cls = kwargs.pop("cls", None) # type: ClsType["_models.ProductResult"]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
_maxresults = None
_timeout = None
if paging_get_multiple_pages_lro_options is not None:
_maxresults = paging_get_multiple_pages_lro_options.maxresults
_timeout = paging_get_multiple_pages_lro_options.timeout
accept = "application/json"
# Construct URL
url = self._get_multiple_pages_lro_initial.metadata["url"] # type: ignore
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if client_request_id is not None:
header_parameters["client-request-id"] = self._serialize.header(
"client_request_id", client_request_id, "str"
)
if _maxresults is not None:
header_parameters["maxresults"] = self._serialize.header("maxresults", _maxresults, "int")
if _timeout is not None:
header_parameters["timeout"] = self._serialize.header("timeout", _timeout, "int")
header_parameters["Accept"] = self._serialize.header("accept", accept, "str")
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
deserialized = self._deserialize("ProductResult", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_multiple_pages_lro_initial.metadata = {"url": "/paging/multiple/lro"} # type: ignore
@distributed_trace_async
async def begin_get_multiple_pages_lro(
self,
client_request_id: Optional[str] = None,
paging_get_multiple_pages_lro_options: Optional["_models.PagingGetMultiplePagesLroOptions"] = None,
**kwargs
) -> AsyncLROPoller[AsyncItemPaged["_models.ProductResult"]]:
"""A long-running paging operation that includes a nextLink that has 10 pages.
:param client_request_id:
:type client_request_id: str
:param paging_get_multiple_pages_lro_options: Parameter group.
:type paging_get_multiple_pages_lro_options: ~paging.models.PagingGetMultiplePagesLroOptions
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncLROBasePolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns an iterator like instance of either ProductResult or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.core.async_paging.AsyncItemPaged[~paging.models.ProductResult]]
:raises ~azure.core.exceptions.HttpResponseError:
"""
cls = kwargs.pop("cls", None) # type: ClsType["_models.ProductResult"]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
_maxresults = None
_timeout = None
if paging_get_multiple_pages_lro_options is not None:
_maxresults = paging_get_multiple_pages_lro_options.maxresults
_timeout = paging_get_multiple_pages_lro_options.timeout
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if client_request_id is not None:
header_parameters["client-request-id"] = self._serialize.header(
"client_request_id", client_request_id, "str"
)
if _maxresults is not None:
header_parameters["maxresults"] = self._serialize.header("maxresults", _maxresults, "int")
if _timeout is not None:
header_parameters["timeout"] = self._serialize.header("timeout", _timeout, "int")
header_parameters["Accept"] = self._serialize.header("accept", accept, "str")
if not next_link:
# Construct URL
url = self.get_multiple_pages_lro.metadata["url"] # type: ignore
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
request = self._client.post(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ProductResult", pipeline_response)
list_of_elem = deserialized.values
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
polling = kwargs.pop("polling", False) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType["_models.ProductResult"]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = await self._get_multiple_pages_lro_initial(
client_request_id=client_request_id,
paging_get_multiple_pages_lro_options=paging_get_multiple_pages_lro_options,
cls=lambda x, y, z: x,
**kwargs
)
kwargs.pop("error_map", None)
kwargs.pop("content_type", None)
def get_long_running_output(pipeline_response):
async def internal_get_next(next_link=None):
if next_link is None:
return pipeline_response
else:
return await get_next(next_link)
return AsyncItemPaged(internal_get_next, extract_data)
if polling is True:
polling_method = AsyncLROBasePolling(lro_delay, **kwargs)
elif polling is False:
polling_method = AsyncNoPolling()
else:
polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_multiple_pages_lro.metadata = {"url": "/paging/multiple/lro"} # type: ignore
@distributed_trace
def get_paging_model_with_item_name_with_xms_client_name(
self, **kwargs
) -> AsyncIterable["_models.ProductResultValueWithXMSClientName"]:
"""A paging operation that returns a paging model whose item name is is overriden by x-ms-client-
name 'indexes'.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ProductResultValueWithXMSClientName or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~paging.models.ProductResultValueWithXMSClientName]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType["_models.ProductResultValueWithXMSClientName"]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters["Accept"] = self._serialize.header("accept", accept, "str")
if not next_link:
# Construct URL
url = self.get_paging_model_with_item_name_with_xms_client_name.metadata["url"] # type: ignore
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ProductResultValueWithXMSClientName", pipeline_response)
list_of_elem = deserialized.indexes
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
get_paging_model_with_item_name_with_xms_client_name.metadata = {"url": "/paging/itemNameWithXMSClientName"} # type: ignore
| 47.983884 | 140 | 0.653171 |
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
map_error,
)
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.core.polling.async_base_polling import AsyncLROBasePolling
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from ... import models as _models
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class PagingOperations:
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def get_no_item_name_pages(self, **kwargs) -> AsyncIterable["_models.ProductResultValue"]:
cls = kwargs.pop("cls", None)
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
accept = "application/json"
def prepare_request(next_link=None):
header_parameters = {}
header_parameters["Accept"] = self._serialize.header("accept", accept, "str")
if not next_link:
url = self.get_no_item_name_pages.metadata["url"]
query_parameters = {}
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {}
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ProductResultValue", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
get_no_item_name_pages.metadata = {"url": "/paging/noitemname"}
@distributed_trace
def get_null_next_link_name_pages(self, **kwargs) -> AsyncIterable["_models.ProductResult"]:
cls = kwargs.pop("cls", None)
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
accept = "application/json"
def prepare_request(next_link=None):
header_parameters = {}
header_parameters["Accept"] = self._serialize.header("accept", accept, "str")
if not next_link:
url = self.get_null_next_link_name_pages.metadata["url"]
query_parameters = {}
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {}
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ProductResult", pipeline_response)
list_of_elem = deserialized.values
if cls:
list_of_elem = cls(list_of_elem)
return None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
get_null_next_link_name_pages.metadata = {"url": "/paging/nullnextlink"}
@distributed_trace
def get_single_pages(self, **kwargs) -> AsyncIterable["_models.ProductResult"]:
cls = kwargs.pop("cls", None)
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
accept = "application/json"
def prepare_request(next_link=None):
header_parameters = {}
header_parameters["Accept"] = self._serialize.header("accept", accept, "str")
if not next_link:
url = self.get_single_pages.metadata["url"]
query_parameters = {}
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {}
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ProductResult", pipeline_response)
list_of_elem = deserialized.values
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
get_single_pages.metadata = {"url": "/paging/single"}
@distributed_trace
def first_response_empty(self, **kwargs) -> AsyncIterable["_models.ProductResultValue"]:
cls = kwargs.pop("cls", None)
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
accept = "application/json"
def prepare_request(next_link=None):
header_parameters = {}
header_parameters["Accept"] = self._serialize.header("accept", accept, "str")
if not next_link:
url = self.first_response_empty.metadata["url"]
query_parameters = {}
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {}
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ProductResultValue", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
first_response_empty.metadata = {"url": "/paging/firstResponseEmpty/1"}
@distributed_trace
def get_multiple_pages(
self,
client_request_id: Optional[str] = None,
paging_get_multiple_pages_options: Optional["_models.PagingGetMultiplePagesOptions"] = None,
**kwargs
) -> AsyncIterable["_models.ProductResult"]:
cls = kwargs.pop("cls", None)
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
_maxresults = None
_timeout = None
if paging_get_multiple_pages_options is not None:
_maxresults = paging_get_multiple_pages_options.maxresults
_timeout = paging_get_multiple_pages_options.timeout
accept = "application/json"
def prepare_request(next_link=None):
header_parameters = {}
if client_request_id is not None:
header_parameters["client-request-id"] = self._serialize.header(
"client_request_id", client_request_id, "str"
)
if _maxresults is not None:
header_parameters["maxresults"] = self._serialize.header("maxresults", _maxresults, "int")
if _timeout is not None:
header_parameters["timeout"] = self._serialize.header("timeout", _timeout, "int")
header_parameters["Accept"] = self._serialize.header("accept", accept, "str")
if not next_link:
url = self.get_multiple_pages.metadata["url"]
query_parameters = {}
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {}
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ProductResult", pipeline_response)
list_of_elem = deserialized.values
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
get_multiple_pages.metadata = {"url": "/paging/multiple"}
@distributed_trace
def get_with_query_params(self, required_query_parameter: int, **kwargs) -> AsyncIterable["_models.ProductResult"]:
cls = kwargs.pop("cls", None)
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
query_constant = True
accept = "application/json"
def prepare_request(next_link=None):
header_parameters = {}
header_parameters["Accept"] = self._serialize.header("accept", accept, "str")
if not next_link:
url = self.get_with_query_params.metadata["url"]
query_parameters = {}
query_parameters["requiredQueryParameter"] = self._serialize.query(
"required_query_parameter", required_query_parameter, "int"
)
query_parameters["queryConstant"] = self._serialize.query("query_constant", query_constant, "bool")
request = self._client.get(url, query_parameters, header_parameters)
else:
url = "/paging/multiple/nextOperationWithQueryParams"
query_parameters = {}
query_parameters["queryConstant"] = self._serialize.query("query_constant", query_constant, "bool")
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ProductResult", pipeline_response)
list_of_elem = deserialized.values
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
get_with_query_params.metadata = {"url": "/paging/multiple/getWithQueryParams"}
@distributed_trace
def get_odata_multiple_pages(
self,
client_request_id: Optional[str] = None,
paging_get_odata_multiple_pages_options: Optional["_models.PagingGetOdataMultiplePagesOptions"] = None,
**kwargs
) -> AsyncIterable["_models.OdataProductResult"]:
cls = kwargs.pop("cls", None)
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
_maxresults = None
_timeout = None
if paging_get_odata_multiple_pages_options is not None:
_maxresults = paging_get_odata_multiple_pages_options.maxresults
_timeout = paging_get_odata_multiple_pages_options.timeout
accept = "application/json"
def prepare_request(next_link=None):
header_parameters = {}
if client_request_id is not None:
header_parameters["client-request-id"] = self._serialize.header(
"client_request_id", client_request_id, "str"
)
if _maxresults is not None:
header_parameters["maxresults"] = self._serialize.header("maxresults", _maxresults, "int")
if _timeout is not None:
header_parameters["timeout"] = self._serialize.header("timeout", _timeout, "int")
header_parameters["Accept"] = self._serialize.header("accept", accept, "str")
if not next_link:
url = self.get_odata_multiple_pages.metadata["url"]
query_parameters = {}
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {}
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("OdataProductResult", pipeline_response)
list_of_elem = deserialized.values
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.odata_next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
get_odata_multiple_pages.metadata = {"url": "/paging/multiple/odata"}
@distributed_trace
def get_multiple_pages_with_offset(
self,
paging_get_multiple_pages_with_offset_options: "_models.PagingGetMultiplePagesWithOffsetOptions",
client_request_id: Optional[str] = None,
**kwargs
) -> AsyncIterable["_models.ProductResult"]:
cls = kwargs.pop("cls", None)
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
_maxresults = None
_offset = None
_timeout = None
if paging_get_multiple_pages_with_offset_options is not None:
_maxresults = paging_get_multiple_pages_with_offset_options.maxresults
_offset = paging_get_multiple_pages_with_offset_options.offset
_timeout = paging_get_multiple_pages_with_offset_options.timeout
accept = "application/json"
def prepare_request(next_link=None):
header_parameters = {}
if client_request_id is not None:
header_parameters["client-request-id"] = self._serialize.header(
"client_request_id", client_request_id, "str"
)
if _maxresults is not None:
header_parameters["maxresults"] = self._serialize.header("maxresults", _maxresults, "int")
if _timeout is not None:
header_parameters["timeout"] = self._serialize.header("timeout", _timeout, "int")
header_parameters["Accept"] = self._serialize.header("accept", accept, "str")
if not next_link:
url = self.get_multiple_pages_with_offset.metadata["url"]
path_format_arguments = {
"offset": self._serialize.url("offset", _offset, "int"),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {}
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ProductResult", pipeline_response)
list_of_elem = deserialized.values
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
get_multiple_pages_with_offset.metadata = {"url": "/paging/multiple/withpath/{offset}"}
@distributed_trace
def get_multiple_pages_retry_first(self, **kwargs) -> AsyncIterable["_models.ProductResult"]:
cls = kwargs.pop("cls", None)
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
accept = "application/json"
def prepare_request(next_link=None):
header_parameters = {}
header_parameters["Accept"] = self._serialize.header("accept", accept, "str")
if not next_link:
url = self.get_multiple_pages_retry_first.metadata["url"]
query_parameters = {}
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {}
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ProductResult", pipeline_response)
list_of_elem = deserialized.values
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
get_multiple_pages_retry_first.metadata = {"url": "/paging/multiple/retryfirst"}
@distributed_trace
def get_multiple_pages_retry_second(self, **kwargs) -> AsyncIterable["_models.ProductResult"]:
cls = kwargs.pop("cls", None)
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
accept = "application/json"
def prepare_request(next_link=None):
header_parameters = {}
header_parameters["Accept"] = self._serialize.header("accept", accept, "str")
if not next_link:
url = self.get_multiple_pages_retry_second.metadata["url"]
query_parameters = {}
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {}
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ProductResult", pipeline_response)
list_of_elem = deserialized.values
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
get_multiple_pages_retry_second.metadata = {"url": "/paging/multiple/retrysecond"}
@distributed_trace
def get_single_pages_failure(self, **kwargs) -> AsyncIterable["_models.ProductResult"]:
cls = kwargs.pop("cls", None)
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
accept = "application/json"
def prepare_request(next_link=None):
header_parameters = {}
header_parameters["Accept"] = self._serialize.header("accept", accept, "str")
if not next_link:
url = self.get_single_pages_failure.metadata["url"]
query_parameters = {}
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {}
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ProductResult", pipeline_response)
list_of_elem = deserialized.values
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
get_single_pages_failure.metadata = {"url": "/paging/single/failure"}
@distributed_trace
def get_multiple_pages_failure(self, **kwargs) -> AsyncIterable["_models.ProductResult"]:
cls = kwargs.pop("cls", None)
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
accept = "application/json"
def prepare_request(next_link=None):
header_parameters = {}
header_parameters["Accept"] = self._serialize.header("accept", accept, "str")
if not next_link:
url = self.get_multiple_pages_failure.metadata["url"]
query_parameters = {}
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {}
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ProductResult", pipeline_response)
list_of_elem = deserialized.values
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
get_multiple_pages_failure.metadata = {"url": "/paging/multiple/failure"}
@distributed_trace
def get_multiple_pages_failure_uri(self, **kwargs) -> AsyncIterable["_models.ProductResult"]:
cls = kwargs.pop("cls", None)
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
accept = "application/json"
def prepare_request(next_link=None):
header_parameters = {}
header_parameters["Accept"] = self._serialize.header("accept", accept, "str")
if not next_link:
url = self.get_multiple_pages_failure_uri.metadata["url"]
query_parameters = {}
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {}
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ProductResult", pipeline_response)
list_of_elem = deserialized.values
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
get_multiple_pages_failure_uri.metadata = {"url": "/paging/multiple/failureuri"}
@distributed_trace
def get_multiple_pages_fragment_next_link(
self, api_version: str, tenant: str, **kwargs
) -> AsyncIterable["_models.OdataProductResult"]:
cls = kwargs.pop("cls", None)
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
accept = "application/json"
def prepare_request(next_link=None):
header_parameters = {}
header_parameters["Accept"] = self._serialize.header("accept", accept, "str")
if not next_link:
url = self.get_multiple_pages_fragment_next_link.metadata["url"]
path_format_arguments = {
"tenant": self._serialize.url("tenant", tenant, "str"),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters["api_version"] = self._serialize.query("api_version", api_version, "str")
request = self._client.get(url, query_parameters, header_parameters)
else:
url = "/paging/multiple/fragment/{tenant}/{nextLink}"
path_format_arguments = {
"tenant": self._serialize.url("tenant", tenant, "str"),
"nextLink": self._serialize.url("next_link", next_link, "str", skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters["api_version"] = self._serialize.query("api_version", api_version, "str")
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("OdataProductResult", pipeline_response)
list_of_elem = deserialized.values
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.odata_next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
get_multiple_pages_fragment_next_link.metadata = {"url": "/paging/multiple/fragment/{tenant}"}
@distributed_trace
def get_multiple_pages_fragment_with_grouping_next_link(
self, custom_parameter_group: "_models.CustomParameterGroup", **kwargs
) -> AsyncIterable["_models.OdataProductResult"]:
cls = kwargs.pop("cls", None)
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
_api_version = None
_tenant = None
if custom_parameter_group is not None:
_api_version = custom_parameter_group.api_version
_tenant = custom_parameter_group.tenant
accept = "application/json"
def prepare_request(next_link=None):
header_parameters = {}
header_parameters["Accept"] = self._serialize.header("accept", accept, "str")
if not next_link:
url = self.get_multiple_pages_fragment_with_grouping_next_link.metadata["url"]
path_format_arguments = {
"tenant": self._serialize.url("tenant", _tenant, "str"),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters["api_version"] = self._serialize.query("api_version", _api_version, "str")
request = self._client.get(url, query_parameters, header_parameters)
else:
url = "/paging/multiple/fragmentwithgrouping/{tenant}/{nextLink}"
path_format_arguments = {
"tenant": self._serialize.url("tenant", _tenant, "str"),
"nextLink": self._serialize.url("next_link", next_link, "str", skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters["api_version"] = self._serialize.query("api_version", _api_version, "str")
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("OdataProductResult", pipeline_response)
list_of_elem = deserialized.values
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.odata_next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
get_multiple_pages_fragment_with_grouping_next_link.metadata = {"url": "/paging/multiple/fragmentwithgrouping/{tenant}"}
async def _get_multiple_pages_lro_initial(
self,
client_request_id: Optional[str] = None,
paging_get_multiple_pages_lro_options: Optional["_models.PagingGetMultiplePagesLroOptions"] = None,
**kwargs
) -> "_models.ProductResult":
cls = kwargs.pop("cls", None)
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
_maxresults = None
_timeout = None
if paging_get_multiple_pages_lro_options is not None:
_maxresults = paging_get_multiple_pages_lro_options.maxresults
_timeout = paging_get_multiple_pages_lro_options.timeout
accept = "application/json"
url = self._get_multiple_pages_lro_initial.metadata["url"]
query_parameters = {}
header_parameters = {}
if client_request_id is not None:
header_parameters["client-request-id"] = self._serialize.header(
"client_request_id", client_request_id, "str"
)
if _maxresults is not None:
header_parameters["maxresults"] = self._serialize.header("maxresults", _maxresults, "int")
if _timeout is not None:
header_parameters["timeout"] = self._serialize.header("timeout", _timeout, "int")
header_parameters["Accept"] = self._serialize.header("accept", accept, "str")
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
deserialized = self._deserialize("ProductResult", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_multiple_pages_lro_initial.metadata = {"url": "/paging/multiple/lro"}
@distributed_trace_async
async def begin_get_multiple_pages_lro(
self,
client_request_id: Optional[str] = None,
paging_get_multiple_pages_lro_options: Optional["_models.PagingGetMultiplePagesLroOptions"] = None,
**kwargs
) -> AsyncLROPoller[AsyncItemPaged["_models.ProductResult"]]:
cls = kwargs.pop("cls", None)
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
_maxresults = None
_timeout = None
if paging_get_multiple_pages_lro_options is not None:
_maxresults = paging_get_multiple_pages_lro_options.maxresults
_timeout = paging_get_multiple_pages_lro_options.timeout
accept = "application/json"
def prepare_request(next_link=None):
header_parameters = {}
if client_request_id is not None:
header_parameters["client-request-id"] = self._serialize.header(
"client_request_id", client_request_id, "str"
)
if _maxresults is not None:
header_parameters["maxresults"] = self._serialize.header("maxresults", _maxresults, "int")
if _timeout is not None:
header_parameters["timeout"] = self._serialize.header("timeout", _timeout, "int")
header_parameters["Accept"] = self._serialize.header("accept", accept, "str")
if not next_link:
url = self.get_multiple_pages_lro.metadata["url"]
query_parameters = {}
request = self._client.post(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {}
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ProductResult", pipeline_response)
list_of_elem = deserialized.values
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
polling = kwargs.pop("polling", False)
cls = kwargs.pop("cls", None)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None)
if cont_token is None:
raw_result = await self._get_multiple_pages_lro_initial(
client_request_id=client_request_id,
paging_get_multiple_pages_lro_options=paging_get_multiple_pages_lro_options,
cls=lambda x, y, z: x,
**kwargs
)
kwargs.pop("error_map", None)
kwargs.pop("content_type", None)
def get_long_running_output(pipeline_response):
async def internal_get_next(next_link=None):
if next_link is None:
return pipeline_response
else:
return await get_next(next_link)
return AsyncItemPaged(internal_get_next, extract_data)
if polling is True:
polling_method = AsyncLROBasePolling(lro_delay, **kwargs)
elif polling is False:
polling_method = AsyncNoPolling()
else:
polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_multiple_pages_lro.metadata = {"url": "/paging/multiple/lro"}
@distributed_trace
def get_paging_model_with_item_name_with_xms_client_name(
self, **kwargs
) -> AsyncIterable["_models.ProductResultValueWithXMSClientName"]:
cls = kwargs.pop("cls", None)
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
accept = "application/json"
def prepare_request(next_link=None):
header_parameters = {}
header_parameters["Accept"] = self._serialize.header("accept", accept, "str")
if not next_link:
url = self.get_paging_model_with_item_name_with_xms_client_name.metadata["url"]
query_parameters = {}
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {}
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ProductResultValueWithXMSClientName", pipeline_response)
list_of_elem = deserialized.indexes
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
get_paging_model_with_item_name_with_xms_client_name.metadata = {"url": "/paging/itemNameWithXMSClientName"}
| true | true |
f72fc59c0d562a760978fca715dfabbc90935136 | 791 | pyde | Python | mode/examples/Topics/Create Shapes/PolygonPShape/PolygonPShape.pyde | timgates42/processing.py | 78a237922c2a928b83f4ad579dbf8d32c0099890 | [
"Apache-2.0"
] | 1,224 | 2015-01-01T22:09:23.000Z | 2022-03-29T19:43:56.000Z | mode/examples/Topics/Create Shapes/PolygonPShape/PolygonPShape.pyde | timgates42/processing.py | 78a237922c2a928b83f4ad579dbf8d32c0099890 | [
"Apache-2.0"
] | 253 | 2015-01-14T03:45:51.000Z | 2022-02-08T01:18:19.000Z | mode/examples/Topics/Create Shapes/PolygonPShape/PolygonPShape.pyde | timgates42/processing.py | 78a237922c2a928b83f4ad579dbf8d32c0099890 | [
"Apache-2.0"
] | 225 | 2015-01-13T18:38:33.000Z | 2022-03-30T20:27:39.000Z |
"""
PrimitivePShape.
Using a PShape to display a custom polygon.
"""
def setup():
size(640, 360, P2D)
smooth()
# First create the shape.
global star
star = createShape()
star.beginShape()
# You can set fill and stroke.
star.fill(102)
star.stroke(255)
star.strokeWeight(2)
# Here, we are hardcoding a series of vertices.
star.vertex(0, -50)
star.vertex(14, -20)
star.vertex(47, -15)
star.vertex(23, 7)
star.vertex(29, 40)
star.vertex(0, 25)
star.vertex(-29, 40)
star.vertex(-23, 7)
star.vertex(-47, -15)
star.vertex(-14, -20)
star.endShape(CLOSE)
def draw():
background(51)
# We can use translate to move the PShape.
translate(mouseX, mouseY)
# Display the shape.
shape(star)
| 19.775 | 51 | 0.610619 |
def setup():
size(640, 360, P2D)
smooth()
global star
star = createShape()
star.beginShape()
star.fill(102)
star.stroke(255)
star.strokeWeight(2)
star.vertex(0, -50)
star.vertex(14, -20)
star.vertex(47, -15)
star.vertex(23, 7)
star.vertex(29, 40)
star.vertex(0, 25)
star.vertex(-29, 40)
star.vertex(-23, 7)
star.vertex(-47, -15)
star.vertex(-14, -20)
star.endShape(CLOSE)
def draw():
background(51)
translate(mouseX, mouseY)
shape(star)
| true | true |
f72fc60681e9156e8be7417e9e0e510cbc1a4913 | 7,388 | py | Python | faucet/faucet_pipeline.py | boldsort/faucet | 451fbaa8ebce1822e06615c9da947f1dc7e3e416 | [
"Apache-2.0"
] | 3 | 2021-04-07T19:10:12.000Z | 2021-12-30T17:11:14.000Z | faucet/faucet_pipeline.py | boldsort/faucet | 451fbaa8ebce1822e06615c9da947f1dc7e3e416 | [
"Apache-2.0"
] | 27 | 2019-03-22T03:44:20.000Z | 2020-01-19T16:53:55.000Z | faucet/faucet_pipeline.py | boldsort/faucet | 451fbaa8ebce1822e06615c9da947f1dc7e3e416 | [
"Apache-2.0"
] | 1 | 2019-10-25T22:51:42.000Z | 2019-10-25T22:51:42.000Z | """Standard FAUCET pipeline."""
# Copyright (C) 2015 Brad Cowie, Christopher Lorier and Joe Stringer.
# Copyright (C) 2015 Research and Education Advanced Network New Zealand Ltd.
# Copyright (C) 2015--2019 The Contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from faucet.faucet_metadata import EGRESS_METADATA_MASK
class ValveTableConfig: # pylint: disable=too-few-public-methods,too-many-instance-attributes
"""Configuration for a single table."""
def __init__(self, name, table_id, # pylint: disable=too-many-arguments
exact_match=None, meter=None, output=True, miss_goto=None,
size=None, match_types=None, set_fields=None, dec_ttl=None,
vlan_scale=None, vlan_port_scale=None,
next_tables=None, metadata_match=0, metadata_write=0):
self.name = name
self.table_id = table_id
self.exact_match = exact_match
self.meter = meter
self.output = output
self.miss_goto = miss_goto
self.size = size
self.match_types = match_types
self.set_fields = set_fields
self.dec_ttl = dec_ttl
self.vlan_scale = vlan_scale
self.vlan_port_scale = vlan_port_scale
self.metadata_match = metadata_match
self.metadata_write = metadata_write
if next_tables:
assert isinstance(next_tables, (list, tuple))
self.next_tables = next_tables
else:
self.next_tables = ()
def __str__(self):
field_strs = ' '.join([
'%s: %s' % (key, val)
for key, val in sorted(self.__dict__.items())
if val])
return 'table config %s' % field_strs
def __repr__(self):
return self.__str__()
def __hash__(self):
return hash(self.__str__())
def __eq__(self, other):
return self.__hash__() == other.__hash__()
def __lt__(self, other):
return self.__hash__() < other.__hash__()
_NEXT_ETH = ('eth_dst_hairpin', 'eth_dst', 'flood')
_NEXT_VIP = ('vip',) + _NEXT_ETH
def _fib_table(ipv, table_id):
return ValveTableConfig(
'ipv%u_fib' % ipv,
table_id,
match_types=(('eth_type', False), ('ipv%u_dst' % ipv, True), ('vlan_vid', False)),
set_fields=('eth_dst', 'eth_src', 'vlan_vid'),
dec_ttl=True,
vlan_port_scale=3.1,
next_tables=_NEXT_VIP
)
PORT_ACL_DEFAULT_CONFIG = ValveTableConfig(
'port_acl',
0,
match_types=(('in_port', False),),
next_tables=(('vlan',) + _NEXT_VIP)
)
VLAN_DEFAULT_CONFIG = ValveTableConfig(
'vlan',
PORT_ACL_DEFAULT_CONFIG.table_id + 1,
match_types=(('eth_dst', True), ('eth_type', False),
('in_port', False), ('vlan_vid', False)),
set_fields=('vlan_vid',),
vlan_port_scale=3,
next_tables=('copro', 'vlan_acl', 'classification', 'eth_src')
)
COPRO_DEFAULT_CONFIG = ValveTableConfig(
'copro',
VLAN_DEFAULT_CONFIG.table_id + 1,
match_types=(('in_port', False), ('eth_type', False), ('vlan_vid', False)),
vlan_port_scale=1.5,
miss_goto='eth_dst',
next_tables=(('eth_dst',)),
)
VLAN_ACL_DEFAULT_CONFIG = ValveTableConfig(
'vlan_acl',
VLAN_DEFAULT_CONFIG.table_id + 1,
next_tables=(('classification', 'eth_src') + _NEXT_ETH))
CLASSIFICATION_DEFAULT_CONFIG = ValveTableConfig(
'classification',
VLAN_ACL_DEFAULT_CONFIG.table_id + 1,
miss_goto='eth_src',
next_tables=(('eth_src', 'ipv4_fib', 'ipv6_fib') + _NEXT_VIP)
)
ETH_SRC_DEFAULT_CONFIG = ValveTableConfig(
'eth_src',
CLASSIFICATION_DEFAULT_CONFIG.table_id + 1,
miss_goto='eth_dst',
next_tables=(('ipv4_fib', 'ipv6_fib') + _NEXT_VIP),
match_types=(('eth_dst', True), ('eth_src', False), ('eth_type', False),
('in_port', False), ('vlan_vid', False)),
set_fields=('vlan_vid', 'eth_dst'),
vlan_port_scale=4.1,
)
IPV4_FIB_DEFAULT_CONFIG = _fib_table(4, ETH_SRC_DEFAULT_CONFIG.table_id + 1)
IPV6_FIB_DEFAULT_CONFIG = _fib_table(6, IPV4_FIB_DEFAULT_CONFIG.table_id + 1)
VIP_DEFAULT_CONFIG = ValveTableConfig(
'vip',
IPV6_FIB_DEFAULT_CONFIG.table_id + 1,
match_types=(('arp_tpa', False), ('eth_dst', False), ('eth_type', False),
('icmpv6_type', False), ('ip_proto', False)),
next_tables=_NEXT_ETH,
vlan_scale=8,
)
ETH_DST_HAIRPIN_DEFAULT_CONFIG = ValveTableConfig(
'eth_dst_hairpin',
VIP_DEFAULT_CONFIG.table_id + 1,
match_types=(('in_port', False), ('eth_dst', False), ('vlan_vid', False)),
miss_goto='eth_dst',
exact_match=True,
vlan_port_scale=4.1,
)
ETH_DST_DEFAULT_CONFIG = ValveTableConfig(
'eth_dst',
ETH_DST_HAIRPIN_DEFAULT_CONFIG.table_id + 1,
exact_match=True,
miss_goto='flood', # Note: when using egress acls the miss goto will be
# egress acl table
match_types=(('eth_dst', False), ('vlan_vid', False)),
next_tables=('egress', 'egress_acl'),
vlan_port_scale=4.1,
metadata_write=EGRESS_METADATA_MASK
)
EGRESS_ACL_DEFAULT_CONFIG = ValveTableConfig(
'egress_acl',
ETH_DST_DEFAULT_CONFIG.table_id + 1,
next_tables=('egress',)
)
EGRESS_DEFAULT_CONFIG = ValveTableConfig(
'egress',
EGRESS_ACL_DEFAULT_CONFIG.table_id + 1,
match_types=(('metadata', True), ('vlan_vid', False)),
vlan_port_scale=1.5,
next_tables=('flood',),
miss_goto='flood',
metadata_match=EGRESS_METADATA_MASK
)
FLOOD_DEFAULT_CONFIG = ValveTableConfig(
'flood',
EGRESS_DEFAULT_CONFIG.table_id + 1,
match_types=(('eth_dst', True), ('in_port', False), ('vlan_vid', False)),
vlan_port_scale=8.0,
)
MINIMUM_FAUCET_PIPELINE_TABLES = {
'vlan', 'eth_src', 'eth_dst', 'flood'}
# TODO: implement an eth_type table before VLAN. This would enable interception
# of control protocols and simplify matches in vlan/eth_src, enabling use of
# exact_match.
FAUCET_PIPELINE = (
PORT_ACL_DEFAULT_CONFIG,
VLAN_DEFAULT_CONFIG,
COPRO_DEFAULT_CONFIG,
VLAN_ACL_DEFAULT_CONFIG,
CLASSIFICATION_DEFAULT_CONFIG,
ETH_SRC_DEFAULT_CONFIG,
IPV4_FIB_DEFAULT_CONFIG,
IPV6_FIB_DEFAULT_CONFIG,
VIP_DEFAULT_CONFIG,
ETH_DST_HAIRPIN_DEFAULT_CONFIG,
ETH_DST_DEFAULT_CONFIG,
EGRESS_ACL_DEFAULT_CONFIG,
EGRESS_DEFAULT_CONFIG,
FLOOD_DEFAULT_CONFIG,
)
DEFAULT_CONFIGS = {
'port_acl': PORT_ACL_DEFAULT_CONFIG,
'vlan': VLAN_DEFAULT_CONFIG,
'copro': COPRO_DEFAULT_CONFIG,
'vlan_acl': VLAN_ACL_DEFAULT_CONFIG,
'eth_src': ETH_SRC_DEFAULT_CONFIG,
'ipv4_fib': IPV4_FIB_DEFAULT_CONFIG,
'ipv6_fib': IPV6_FIB_DEFAULT_CONFIG,
'vip': VIP_DEFAULT_CONFIG,
'eth_dst_hairpin': ETH_DST_HAIRPIN_DEFAULT_CONFIG,
'eth_dst': ETH_DST_DEFAULT_CONFIG,
'egress_acl': EGRESS_ACL_DEFAULT_CONFIG,
'egress': EGRESS_DEFAULT_CONFIG,
'flood': FLOOD_DEFAULT_CONFIG,
}
| 34.362791 | 93 | 0.678668 |
from faucet.faucet_metadata import EGRESS_METADATA_MASK
class ValveTableConfig:
def __init__(self, name, table_id,
exact_match=None, meter=None, output=True, miss_goto=None,
size=None, match_types=None, set_fields=None, dec_ttl=None,
vlan_scale=None, vlan_port_scale=None,
next_tables=None, metadata_match=0, metadata_write=0):
self.name = name
self.table_id = table_id
self.exact_match = exact_match
self.meter = meter
self.output = output
self.miss_goto = miss_goto
self.size = size
self.match_types = match_types
self.set_fields = set_fields
self.dec_ttl = dec_ttl
self.vlan_scale = vlan_scale
self.vlan_port_scale = vlan_port_scale
self.metadata_match = metadata_match
self.metadata_write = metadata_write
if next_tables:
assert isinstance(next_tables, (list, tuple))
self.next_tables = next_tables
else:
self.next_tables = ()
def __str__(self):
field_strs = ' '.join([
'%s: %s' % (key, val)
for key, val in sorted(self.__dict__.items())
if val])
return 'table config %s' % field_strs
def __repr__(self):
return self.__str__()
def __hash__(self):
return hash(self.__str__())
def __eq__(self, other):
return self.__hash__() == other.__hash__()
def __lt__(self, other):
return self.__hash__() < other.__hash__()
_NEXT_ETH = ('eth_dst_hairpin', 'eth_dst', 'flood')
_NEXT_VIP = ('vip',) + _NEXT_ETH
def _fib_table(ipv, table_id):
return ValveTableConfig(
'ipv%u_fib' % ipv,
table_id,
match_types=(('eth_type', False), ('ipv%u_dst' % ipv, True), ('vlan_vid', False)),
set_fields=('eth_dst', 'eth_src', 'vlan_vid'),
dec_ttl=True,
vlan_port_scale=3.1,
next_tables=_NEXT_VIP
)
PORT_ACL_DEFAULT_CONFIG = ValveTableConfig(
'port_acl',
0,
match_types=(('in_port', False),),
next_tables=(('vlan',) + _NEXT_VIP)
)
VLAN_DEFAULT_CONFIG = ValveTableConfig(
'vlan',
PORT_ACL_DEFAULT_CONFIG.table_id + 1,
match_types=(('eth_dst', True), ('eth_type', False),
('in_port', False), ('vlan_vid', False)),
set_fields=('vlan_vid',),
vlan_port_scale=3,
next_tables=('copro', 'vlan_acl', 'classification', 'eth_src')
)
COPRO_DEFAULT_CONFIG = ValveTableConfig(
'copro',
VLAN_DEFAULT_CONFIG.table_id + 1,
match_types=(('in_port', False), ('eth_type', False), ('vlan_vid', False)),
vlan_port_scale=1.5,
miss_goto='eth_dst',
next_tables=(('eth_dst',)),
)
VLAN_ACL_DEFAULT_CONFIG = ValveTableConfig(
'vlan_acl',
VLAN_DEFAULT_CONFIG.table_id + 1,
next_tables=(('classification', 'eth_src') + _NEXT_ETH))
CLASSIFICATION_DEFAULT_CONFIG = ValveTableConfig(
'classification',
VLAN_ACL_DEFAULT_CONFIG.table_id + 1,
miss_goto='eth_src',
next_tables=(('eth_src', 'ipv4_fib', 'ipv6_fib') + _NEXT_VIP)
)
ETH_SRC_DEFAULT_CONFIG = ValveTableConfig(
'eth_src',
CLASSIFICATION_DEFAULT_CONFIG.table_id + 1,
miss_goto='eth_dst',
next_tables=(('ipv4_fib', 'ipv6_fib') + _NEXT_VIP),
match_types=(('eth_dst', True), ('eth_src', False), ('eth_type', False),
('in_port', False), ('vlan_vid', False)),
set_fields=('vlan_vid', 'eth_dst'),
vlan_port_scale=4.1,
)
IPV4_FIB_DEFAULT_CONFIG = _fib_table(4, ETH_SRC_DEFAULT_CONFIG.table_id + 1)
IPV6_FIB_DEFAULT_CONFIG = _fib_table(6, IPV4_FIB_DEFAULT_CONFIG.table_id + 1)
VIP_DEFAULT_CONFIG = ValveTableConfig(
'vip',
IPV6_FIB_DEFAULT_CONFIG.table_id + 1,
match_types=(('arp_tpa', False), ('eth_dst', False), ('eth_type', False),
('icmpv6_type', False), ('ip_proto', False)),
next_tables=_NEXT_ETH,
vlan_scale=8,
)
ETH_DST_HAIRPIN_DEFAULT_CONFIG = ValveTableConfig(
'eth_dst_hairpin',
VIP_DEFAULT_CONFIG.table_id + 1,
match_types=(('in_port', False), ('eth_dst', False), ('vlan_vid', False)),
miss_goto='eth_dst',
exact_match=True,
vlan_port_scale=4.1,
)
ETH_DST_DEFAULT_CONFIG = ValveTableConfig(
'eth_dst',
ETH_DST_HAIRPIN_DEFAULT_CONFIG.table_id + 1,
exact_match=True,
miss_goto='flood',
match_types=(('eth_dst', False), ('vlan_vid', False)),
next_tables=('egress', 'egress_acl'),
vlan_port_scale=4.1,
metadata_write=EGRESS_METADATA_MASK
)
EGRESS_ACL_DEFAULT_CONFIG = ValveTableConfig(
'egress_acl',
ETH_DST_DEFAULT_CONFIG.table_id + 1,
next_tables=('egress',)
)
EGRESS_DEFAULT_CONFIG = ValveTableConfig(
'egress',
EGRESS_ACL_DEFAULT_CONFIG.table_id + 1,
match_types=(('metadata', True), ('vlan_vid', False)),
vlan_port_scale=1.5,
next_tables=('flood',),
miss_goto='flood',
metadata_match=EGRESS_METADATA_MASK
)
FLOOD_DEFAULT_CONFIG = ValveTableConfig(
'flood',
EGRESS_DEFAULT_CONFIG.table_id + 1,
match_types=(('eth_dst', True), ('in_port', False), ('vlan_vid', False)),
vlan_port_scale=8.0,
)
MINIMUM_FAUCET_PIPELINE_TABLES = {
'vlan', 'eth_src', 'eth_dst', 'flood'}
FAUCET_PIPELINE = (
PORT_ACL_DEFAULT_CONFIG,
VLAN_DEFAULT_CONFIG,
COPRO_DEFAULT_CONFIG,
VLAN_ACL_DEFAULT_CONFIG,
CLASSIFICATION_DEFAULT_CONFIG,
ETH_SRC_DEFAULT_CONFIG,
IPV4_FIB_DEFAULT_CONFIG,
IPV6_FIB_DEFAULT_CONFIG,
VIP_DEFAULT_CONFIG,
ETH_DST_HAIRPIN_DEFAULT_CONFIG,
ETH_DST_DEFAULT_CONFIG,
EGRESS_ACL_DEFAULT_CONFIG,
EGRESS_DEFAULT_CONFIG,
FLOOD_DEFAULT_CONFIG,
)
DEFAULT_CONFIGS = {
'port_acl': PORT_ACL_DEFAULT_CONFIG,
'vlan': VLAN_DEFAULT_CONFIG,
'copro': COPRO_DEFAULT_CONFIG,
'vlan_acl': VLAN_ACL_DEFAULT_CONFIG,
'eth_src': ETH_SRC_DEFAULT_CONFIG,
'ipv4_fib': IPV4_FIB_DEFAULT_CONFIG,
'ipv6_fib': IPV6_FIB_DEFAULT_CONFIG,
'vip': VIP_DEFAULT_CONFIG,
'eth_dst_hairpin': ETH_DST_HAIRPIN_DEFAULT_CONFIG,
'eth_dst': ETH_DST_DEFAULT_CONFIG,
'egress_acl': EGRESS_ACL_DEFAULT_CONFIG,
'egress': EGRESS_DEFAULT_CONFIG,
'flood': FLOOD_DEFAULT_CONFIG,
}
| true | true |
f72fc60aecfcd841a19625037bc8f38fa6921303 | 30,717 | py | Python | models/tests/test_dataio.py | endymecy/NDIToolbox | f7a0a642b4a778d9d0c131871f4bfb9822ecb3da | [
"BSD-4-Clause"
] | 5 | 2017-02-28T16:16:06.000Z | 2020-07-13T06:49:34.000Z | models/tests/test_dataio.py | endymecy/NDIToolbox | f7a0a642b4a778d9d0c131871f4bfb9822ecb3da | [
"BSD-4-Clause"
] | 1 | 2018-08-19T19:08:14.000Z | 2018-08-19T19:08:14.000Z | models/tests/test_dataio.py | endymecy/NDIToolbox | f7a0a642b4a778d9d0c131871f4bfb9822ecb3da | [
"BSD-4-Clause"
] | 4 | 2017-10-25T20:17:15.000Z | 2021-07-26T11:39:50.000Z | """test_dataio.py - tests the dataio module
Chris R. Coughlin (TRI/Austin, Inc.)
"""
__author__ = 'Chris R. Coughlin'
import unittest
from models import dataio
from controllers import pathfinder
from utils.skiptest import skipIfModuleNotInstalled
import h5py
import numpy as np
import numpy.testing
import scipy.misc
import os
import random
class TestDataIO(unittest.TestCase):
"""Tests Data IO functions"""
def setUp(self):
self.sample_data = np.array(self.random_data())
self.sample_data_basename = "sample.dat"
self.sample_data_file = os.path.join(os.path.dirname(__file__),
self.sample_data_basename)
with h5py.File(self.sample_data_file, 'w') as fidout:
fidout.create_dataset(self.sample_data_basename, data=self.sample_data)
def random_data(self):
"""Returns a list of random data"""
return [random.uniform(-100, 100) for i in range(25)]
def test_save_data(self):
"""Verify save_data function saves NumPy array to disk"""
sample_filename = "test_savedata.dat"
sample_path = os.path.join(os.path.dirname(__file__), sample_filename)
dataio.save_data(sample_path, self.sample_data)
self.assertTrue(os.path.exists(sample_path + ".hdf5"))
with h5py.File(sample_path + ".hdf5", "r") as fidin:
froot, ext = os.path.splitext(os.path.basename(sample_filename))
for key in fidin.keys():
if key.startswith(froot):
read_data = fidin[key][...]
self.assertTrue(np.array_equal(self.sample_data, read_data))
if os.path.exists(sample_path + ".hdf5"):
os.remove(sample_path + ".hdf5")
def test_get_data(self):
"""Verify get_data function returns a NumPy array"""
read_data = dataio.get_data(self.sample_data_file)
self.assertTrue(np.array_equal(self.sample_data, read_data))
def test_get_data_slice(self):
"""Verify get_data function returns a slice if specified"""
slice_idx = np.s_[5:15]
read_hyperslab = dataio.get_data(self.sample_data_file, slice_idx)
self.assertTrue(np.array_equal(self.sample_data[slice_idx], read_hyperslab))
def test_get_txt_data(self):
"""Verify retrieval of ASCII delimited data"""
sample_data_file = os.path.join(os.path.dirname(__file__), 'support_files',
'1.25 from hole Single Column.asc')
assert(os.path.exists(sample_data_file))
import_params = {'delimiter': None}
expected_data = np.loadtxt(sample_data_file, delimiter=import_params['delimiter'])
retrieved_data = dataio.get_txt_data(sample_data_file, **import_params)
self.assertTrue(np.array_equal(expected_data, retrieved_data))
def test_import_txt(self):
"""Verify import of ASCII delimited data files"""
sample_data_file = os.path.join(os.path.dirname(__file__), 'support_files',
'1.25 from hole Single Column.asc')
assert(os.path.exists(sample_data_file))
import_params = {'delimiter': None}
expected_data = np.loadtxt(sample_data_file, delimiter=import_params['delimiter'])
dataio.import_txt(sample_data_file, **import_params)
dest_file = os.path.join(pathfinder.data_path(),
os.path.basename(sample_data_file) + ".hdf5")
self.assertTrue(os.path.exists(dest_file))
with h5py.File(dest_file, "r") as fidin:
root, ext = os.path.splitext(os.path.basename(dest_file))
for key in fidin.keys():
if key.startswith(root):
read_data = fidin[key][...]
self.assertTrue(np.array_equal(expected_data, read_data))
try:
if os.path.exists(dest_file):
os.remove(dest_file)
except WindowsError: # file in use
pass
def test_export_txt(self):
"""Verify export of data to delimited ASCII"""
# Use integer data to avoid the floating point conversion to/from files
sample_data = self.sample_data.astype(np.int64)
sample_data_file = os.path.join(os.path.dirname(__file__), 'support_files',
'sample.hdf5')
dest_file = os.path.join(os.path.dirname(__file__), 'support_files',
'sample.txt')
with h5py.File(sample_data_file, "w") as fidout:
fidout.create_dataset(os.path.basename(sample_data_file), data=sample_data)
export_params = {'delimiter': ','}
dataio.export_txt(dest_file, sample_data_file, **export_params)
retrieved_data = np.genfromtxt(dest_file, delimiter=export_params['delimiter'])
self.assertTrue(np.array_equal(sample_data, retrieved_data))
try:
if os.path.exists(sample_data_file):
os.remove(sample_data_file)
if os.path.exists(dest_file):
os.remove(dest_file)
except WindowsError: # file in use
pass
def test_export3D_txt(self):
"""Verify export of 3D data to delimited ASCII"""
x_size = 5
y_size = 4
z_size = 6
sample_data = np.empty((y_size, x_size, z_size))
for xidx in range(x_size):
for yidx in range(y_size):
for zidx in range(z_size):
sample_data[yidx, xidx, zidx] = int(random.uniform(-100, 100))
sample_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'sample3d.hdf5')
dest_file = os.path.join(os.path.dirname(__file__), 'support_files', 'sample3d.txt')
with h5py.File(sample_data_file, "w") as fidout:
fidout.create_dataset(os.path.basename(sample_data_file), data=sample_data)
export_params = {'delimiter': ','}
dataio.export_txt(dest_file, sample_data_file, **export_params)
retrieved_data = np.empty(sample_data.shape)
with open(dest_file, "rb") as fidin:
zidx = 0
for line in fidin:
if not line.startswith('#'):
x, y, z = line.split(export_params['delimiter'])
x = int(x)
y = int(y)
z = float(z.strip())
retrieved_data[y, x, zidx] = z
zidx += 1
if zidx > sample_data.shape[2]-1:
zidx = 0
self.assertTrue(np.array_equal(sample_data, retrieved_data))
try:
if os.path.exists(sample_data_file):
os.remove(sample_data_file)
if os.path.exists(dest_file):
os.remove(dest_file)
except WindowsError: # file in use
pass
@skipIfModuleNotInstalled("dicom")
def test_get_dicom_data(self):
"""Verify retrieval of DICOM / DICONDE data"""
import dicom
diconde_folder = os.path.join(os.path.dirname(__file__), 'support_files')
for root, dirs, files in os.walk(diconde_folder):
for fname in files:
dicom_data_file = os.path.join(root, fname)
basename, ext = os.path.splitext(dicom_data_file)
# Simple check to ensure we're looking at DICOM files
if ext.lower() == '.dcm':
dicom_data = dicom.read_file(dicom_data_file)
dicom_arr = dicom_data.pixel_array
retrieved_data = dataio.get_dicom_data(dicom_data_file)
self.assertTrue(np.array_equal(dicom_arr, retrieved_data))
@skipIfModuleNotInstalled("dicom")
def test_import_dicom(self):
"""Verify import of DICOM / DICONDE data"""
# Load the ASTM DICONDE example files,
# save, then ensure the resulting arrays
# are identical
import dicom
diconde_folder = os.path.join(os.path.dirname(__file__), 'support_files')
for root, dirs, files in os.walk(diconde_folder):
for fname in files:
dicom_data_file = os.path.join(root, fname)
basename, ext = os.path.splitext(dicom_data_file)
# Simple check to ensure we're looking at DICOM files
if ext.lower() == '.dcm':
dicom_data = dicom.read_file(dicom_data_file)
dicom_arr = dicom_data.pixel_array
dataio.import_dicom(dicom_data_file)
dest_file = os.path.join(pathfinder.data_path(),
os.path.basename(dicom_data_file) + ".hdf5")
self.assertTrue(os.path.exists(dest_file))
with h5py.File(dest_file, "r") as fidin:
froot, ext = os.path.splitext(os.path.basename(dest_file))
for key in fidin.keys():
if key.startswith(froot):
read_data = fidin[key][...]
self.assertTrue(np.array_equal(dicom_arr, read_data))
try:
if os.path.exists(dest_file):
os.remove(dest_file)
except WindowsError: # File in use
pass
def test_get_img_data(self):
"""Verify retrieval of bitmap data"""
sample_data_file = os.path.join(os.path.dirname(__file__), 'support_files',
'austin_sky320x240.jpg')
assert(os.path.exists(sample_data_file))
expected_data = scipy.misc.imread(sample_data_file, flatten=True)
retrieved_data = dataio.get_img_data(sample_data_file, flatten=True)
self.assertTrue(np.array_equal(expected_data, retrieved_data))
def test_import_img(self):
"""Verify import of images"""
sample_data_file = os.path.join(os.path.dirname(__file__), 'support_files',
'austin_sky320x240.jpg')
assert(os.path.exists(sample_data_file))
expected_data = scipy.misc.imread(sample_data_file, flatten=True)
dataio.import_img(sample_data_file, flatten=True)
dest_file = os.path.join(pathfinder.data_path(),
os.path.basename(sample_data_file) + ".hdf5")
self.assertTrue(os.path.exists(dest_file))
with h5py.File(dest_file, "r") as fidin:
root, ext = os.path.splitext(os.path.basename(dest_file))
for key in fidin.keys():
if key.startswith(root):
read_data = fidin[key][...]
self.assertTrue(np.array_equal(expected_data, read_data))
try:
if os.path.exists(dest_file):
os.remove(dest_file)
except WindowsError: # file in use
pass
def test_get_utwin_tof_data(self):
"""Verify retrieval of UTWin Time Of Flight data through convenience function"""
sample_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'CScanData.csc')
tof_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'CScanData_tofdata.npy')
tof_resolution = 0.01
assert(os.path.exists(tof_data_file))
expected_tof_data = np.load(tof_data_file) * tof_resolution
returned_tof_data = dataio.get_utwin_tof_data(sample_data_file)[0]
numpy.testing.assert_array_almost_equal(expected_tof_data, returned_tof_data, decimal=3)
def test_import_utwin_tof(self):
"""Verify import of UTWin Time Of Flight data through convenience function"""
tof_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'CScanData_tofdata.npy')
sample_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'CScanData.csc')
tof_resolution = 0.01
expected_tof_data = np.load(tof_data_file) * tof_resolution
root, ext = os.path.splitext(os.path.basename(sample_data_file))
dest_file = os.path.join(pathfinder.data_path(),
os.path.basename(root) + "_tofdata0.csc.hdf5")
dataio.import_utwin_tof(sample_data_file)
self.assertTrue(os.path.exists(dest_file))
with h5py.File(dest_file, "r") as fidin:
root, ext = os.path.splitext(os.path.basename(dest_file))
for key in fidin.keys():
if key.startswith(root):
read_data = fidin[key][...]
numpy.testing.assert_array_almost_equal(expected_tof_data, read_data, decimal=3)
try:
if os.path.exists(dest_file):
os.remove(dest_file)
except WindowsError: # file in use
pass
def test_get_utwin_amp_data(self):
"""Verify retrieval of UTWin amplitude data through convenience function"""
sample_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'CScanData.csc')
amp_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'CScanData_ampdata.npy')
assert(os.path.exists(amp_data_file))
expected_tof_data = np.load(amp_data_file)
self.assertTrue(np.array_equal(expected_tof_data, dataio.get_utwin_amp_data(sample_data_file)[0]))
def test_import_utwin_amp(self):
"""Verify import of UTWin amplitude data through convenience function"""
amp_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'CScanData_ampdata.npy')
sample_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'CScanData.csc')
expected_amp_data = np.load(amp_data_file)
root, ext = os.path.splitext(os.path.basename(sample_data_file))
dest_file = os.path.join(pathfinder.data_path(),
os.path.basename(root) + "_ampdata0.csc.hdf5")
dataio.import_utwin_amp(sample_data_file)
self.assertTrue(os.path.exists(dest_file))
with h5py.File(dest_file, "r") as fidin:
root, ext = os.path.splitext(os.path.basename(dest_file))
for key in fidin.keys():
if key.startswith(root):
read_data = fidin[key][...]
self.assertTrue(np.array_equal(expected_amp_data, read_data))
try:
if os.path.exists(dest_file):
os.remove(dest_file)
except WindowsError: # file in use
pass
def test_get_utwin_data(self):
"""Verify returning UTWin data"""
sample_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'CScanData.csc')
sample_reader = dataio.UTWinCScanDataFile(sample_data_file)
sample_reader.read_data()
expected_data = sample_reader.data
returned_data = dataio.get_utwin_data(sample_data_file)
for datatype in expected_data:
self.assertTrue(np.array_equal(expected_data[datatype], returned_data[datatype]))
def test_get_winspect_data(self):
"""Verify retrieval of Winspect data through convenience function"""
sample_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'sample_data.sdt')
assert(os.path.exists(sample_data_file))
scan_reader = dataio.WinspectReader(sample_data_file)
expected_data_list = scan_reader.get_winspect_data()
retrieved_data_list = dataio.get_winspect_data(sample_data_file)
self.assertEqual(len(expected_data_list), len(retrieved_data_list))
for data_array_idx in range(len(expected_data_list)):
self.assertTrue(np.array_equal(expected_data_list[data_array_idx].data, retrieved_data_list[data_array_idx].data))
def test_import_winspect(self):
"""Verify import of Winspect data through convenience function"""
sample_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'sample_data.sdt')
assert(os.path.exists(sample_data_file))
output_basename, ext = os.path.splitext(sample_data_file)
amp_dest_file = os.path.join(pathfinder.data_path(),
os.path.basename(output_basename) + "_ampdata0" + ext + ".hdf5")
waveform_dest_file = os.path.join(pathfinder.data_path(),
os.path.basename(output_basename) + "_waveformdata0" + ext + ".hdf5")
dataio.import_winspect(sample_data_file)
expected_data_list = dataio.get_winspect_data(sample_data_file)
for dataset in expected_data_list:
if "amplitude" in dataset.data_type:
dest_file = amp_dest_file
elif "waveform" in dataset.data_type:
dest_file = waveform_dest_file
with h5py.File(dest_file, "r") as fidin:
root, ext = os.path.splitext(os.path.basename(dest_file))
for key in fidin.keys():
if key.startswith(root):
read_data = fidin[key][...]
self.assertTrue(np.array_equal(dataset.data, read_data))
try:
if os.path.exists(dest_file):
os.remove(dest_file)
except WindowsError: # file in use
pass
def tearDown(self):
if os.path.exists(self.sample_data_file + ".hdf5"):
os.remove(self.sample_data_file + ".hdf5")
if os.path.exists(self.sample_data_file):
os.remove(self.sample_data_file)
class TestUTWinCScanReader(unittest.TestCase):
"""Tests the UTWinCScanReader class"""
def setUp(self):
self.sample_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'CScanData.csc')
assert(os.path.exists(self.sample_data_file))
self.cscan_reader = dataio.UTWinCscanReader()
def test_basicfile_parameters(self):
"""Verify the basic parameters of the CSC file format are correct"""
self.assertEqual(self.cscan_reader.header_string_length, 15)
expected_message_ids = {'CSCAN_DATA': 2300,
'WAVEFORM_pre240': 2016,
'WAVEFORM_post240': 2303,
'UTSAVE_UTCD0': 2010,
'UTSAVE_UTCD1': 2011,
'UTSAVE_UTCD2': 2012,
'UTSAVE_UTCD4': 2014,
'UTSAVE_UTPro0': 253,
'PROJECT': 301,
'UTSAVE_UTHead': 100,
'UTSAVE_UTCScan0': 750,
'UTSAVE_UTCD10': 2020,
'UTSAVE_UTCScan3': 753}
self.assertDictEqual(expected_message_ids, self.cscan_reader.message_ids)
def test_is_cscanfile(self):
"""Verify reader correctly identifies CSC files"""
self.assertTrue(self.cscan_reader.is_cscanfile(self.sample_data_file))
def test_msg_info(self):
"""Verify reader correctly returns message ID and length"""
with open(self.sample_data_file, "rb") as fidin:
fidin.seek(self.cscan_reader.header_string_length)
first_message = (100, 14)
self.assertTupleEqual(first_message, self.cscan_reader.msg_info(fidin))
def test_find_message(self):
"""Verify find_message returns the expected file positions"""
expected_file_positions = ((2014, 38037),
(2011, 38059),
(2010, 38003),
(2012, 422075),
(2010, 38003),
(2010, 38003))
for message_id, expected_pos in expected_file_positions:
self.assertEqual(self.cscan_reader.find_message(self.sample_data_file, message_id), expected_pos)
def test_find_blocks(self):
"""Verify find_blocks returns the file positions for the specified message ID"""
# Search for UTSave_UTAD0 (Message ID 950) - contains A/D settings for each channel
expected_filed_positions = [173, 920, 1667, 2414, 3161, 3908, 4655, 5402]
self.assertListEqual(expected_filed_positions, self.cscan_reader.find_blocks(self.sample_data_file, 950))
def test_read_field(self):
"""Verify read_field correctly parses the specified message block"""
start_pos = self.cscan_reader.find_message(self.sample_data_file, 950)
self.assertTrue(start_pos != -1)
with open(self.sample_data_file, "rb") as fidin:
fidin.seek(start_pos)
# Read a sample of A/D settings for the first channel
expected_ad_delay = np.fromfile(fidin, self.cscan_reader.field_sizes['float'], 1)[0]
expected_ad_width = np.fromfile(fidin, self.cscan_reader.field_sizes['float'], 1)[0]
expected_ad_blanking_width = np.fromfile(fidin, self.cscan_reader.field_sizes['float'], 1)[0]
expected_ad_gain = np.fromfile(fidin, self.cscan_reader.field_sizes['float'], 1)[0]
expected_ad_offset = np.fromfile(fidin, self.cscan_reader.field_sizes['float'], 1)[0]
expected_ad_trigger_level = np.fromfile(fidin, self.cscan_reader.field_sizes['float'], 1)[0]
expected_ad_trigger_rate = np.fromfile(fidin, self.cscan_reader.field_sizes['float'], 1)[0]
with open(self.sample_data_file, "rb") as fidin:
fidin.seek(start_pos)
ad_delay = self.cscan_reader.read_field(fidin, self.cscan_reader.field_sizes['float'])
ad_width = self.cscan_reader.read_field(fidin, self.cscan_reader.field_sizes['float'])
ad_blanking_width = self.cscan_reader.read_field(fidin, self.cscan_reader.field_sizes['float'])
ad_gain = self.cscan_reader.read_field(fidin, self.cscan_reader.field_sizes['float'])
ad_offset = self.cscan_reader.read_field(fidin, self.cscan_reader.field_sizes['float'])
ad_trigger_level = self.cscan_reader.read_field(fidin, self.cscan_reader.field_sizes['float'])
ad_trigger_rate = self.cscan_reader.read_field(fidin, self.cscan_reader.field_sizes['float'])
self.assertAlmostEqual(expected_ad_delay, ad_delay)
self.assertAlmostEqual(expected_ad_width, ad_width)
self.assertAlmostEqual(expected_ad_blanking_width, ad_blanking_width)
self.assertAlmostEqual(expected_ad_gain, ad_gain)
self.assertAlmostEqual(expected_ad_offset, ad_offset)
self.assertAlmostEqual(expected_ad_trigger_level, ad_trigger_level)
self.assertAlmostEqual(expected_ad_trigger_rate, ad_trigger_rate)
class TestUTWinCScanDataFile(unittest.TestCase):
"""Tests the UTWinCScanDataFile class.
Note: the sample UTWin data files available to TRI as of May 2013 are export-controlled and can't be
distributed, which in turn limits the tests that can be performed. The UTWinCScanDataFile class has been
tested against real inspection data, however without additional sample files you should consider the code
experimental. For more details, contact TRI.
"""
def setUp(self):
self.sample_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'CScanData.csc')
self.cscan_datafile = dataio.UTWinCScanDataFile(self.sample_data_file)
def test_get_scan_version(self):
"""Verify get_scan_version returns the correct scan version"""
self.assertEqual(self.cscan_datafile.get_scan_version(), 117)
def test_read_scan_properties(self):
"""Verify read_scan_properties correctly compiles required scan settings"""
# Read a sample of the most important properties, verify read
important_scan_properties = {'n_height':320,
'n_width':600,
'rf_length':2994,
'channel_active':[1, 0, 0, 0, 0, 0, 0, 0]}
for idx in important_scan_properties.keys():
prop = important_scan_properties[idx]
if not isinstance(prop, list):
self.assertEqual(prop, self.cscan_datafile.scan_properties[idx])
else:
self.assertListEqual(prop, self.cscan_datafile.scan_properties[idx])
def test_read_tof_data(self):
"""Verify read_tof_data correctly reads Time Of Flight data"""
# Verify one TOF dataset
tof_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'CScanData_tofdata.npy')
tof_resolution = 0.01
assert(os.path.exists(tof_data_file))
expected_tof_data = np.load(tof_data_file) * tof_resolution
self.cscan_datafile.read_tof_data()
numpy.testing.assert_array_almost_equal(expected_tof_data, self.cscan_datafile.data['tof'][0], decimal=3)
def test_read_amplitude_data(self):
"""Verify read_amplitude_data correctly reads amplitude data"""
amp_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'CScanData_ampdata.npy')
assert(os.path.exists(amp_data_file))
expected_amp_data = np.load(amp_data_file)
self.cscan_datafile.read_amplitude_data()
self.assertTrue(np.array_equal(expected_amp_data, self.cscan_datafile.data['amplitude'][0]))
def test_import_tof(self):
"""Verify import of Time Of Flight data"""
tof_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'CScanData_tofdata.npy')
tof_resolution = 0.01
csc_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'CScanData')
assert(os.path.exists(tof_data_file))
expected_tof_data = np.load(tof_data_file) * tof_resolution
dest_file = os.path.join(pathfinder.data_path(),
os.path.basename(csc_data_file) + "_tofdata0.csc.hdf5")
self.cscan_datafile.import_tof_data()
self.assertTrue(os.path.exists(dest_file))
with h5py.File(dest_file, "r") as fidin:
root, ext = os.path.splitext(os.path.basename(dest_file))
for key in fidin.keys():
if key.startswith(root):
read_data = fidin[key][...]
numpy.testing.assert_array_almost_equal(expected_tof_data, read_data, decimal=3)
try:
if os.path.exists(dest_file):
os.remove(dest_file)
except WindowsError: # file in use
pass
def test_import_amp(self):
"""Verify import of amplitude data"""
amp_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'CScanData_ampdata.npy')
csc_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'CScanData')
assert(os.path.exists(amp_data_file))
expected_amp_data = np.load(amp_data_file)
dest_file = os.path.join(pathfinder.data_path(),
os.path.basename(csc_data_file) + "_ampdata0.csc.hdf5")
self.cscan_datafile.import_amplitude_data()
self.assertTrue(os.path.exists(dest_file))
with h5py.File(dest_file, "r") as fidin:
root, ext = os.path.splitext(os.path.basename(dest_file))
for key in fidin.keys():
if key.startswith(root):
read_data = fidin[key][...]
self.assertTrue(np.array_equal(expected_amp_data, read_data))
try:
if os.path.exists(dest_file):
os.remove(dest_file)
except WindowsError: # file in use
pass
class TestWinspectReader(unittest.TestCase):
"""Tests the WinspectReader class."""
def setUp(self):
self.sample_data_file = os.path.join(os.path.dirname(__file__), 'support_files',
'sample_data.sdt')
assert(os.path.exists(self.sample_data_file))
self.scan_reader = dataio.WinspectReader(self.sample_data_file)
def test_find_numbers(self):
"""Verify find_numbers static method correctly pulls numbers from strings"""
float_strings = {"0.000000 mm":0.0, "0.775995 Usec":0.775995}
int_strings = {"35 18 0 22 3 112 ":[35, 18, 0, 22, 3, 112],
"Number of Sample Points : 3500":3500}
bad_strings = {"Ramshackle":[], "":[]}
for string in float_strings:
self.assertAlmostEqual(float_strings[string], self.scan_reader.find_numbers(string))
def test_get_winspect_data(self):
"""Verify returning the list of arrays read from the data file"""
data_reader = dataio.WinspectDataFile(self.sample_data_file)
data_reader.read_data()
expected_data_list = data_reader.datasets
retrieved_data_list = self.scan_reader.get_winspect_data()
self.assertEqual(len(expected_data_list), len(retrieved_data_list))
for data_array_idx in range(len(expected_data_list)):
self.assertTrue(np.array_equal(expected_data_list[data_array_idx].data, retrieved_data_list[data_array_idx].data))
def test_import_winspect(self):
"""Verify importing datasets"""
output_basename, ext = os.path.splitext(self.sample_data_file)
amp_dest_file = os.path.join(pathfinder.data_path(),
os.path.basename(output_basename) + "_ampdata0" + ext + ".hdf5")
waveform_dest_file = os.path.join(pathfinder.data_path(),
os.path.basename(output_basename) + "_waveformdata0" + ext + ".hdf5")
self.scan_reader.import_winspect()
data_reader = dataio.WinspectDataFile(self.sample_data_file)
data_reader.read_data()
expected_data_list = data_reader.datasets
for dataset in expected_data_list:
if "amplitude" in dataset.data_type:
dest_file = amp_dest_file
elif "waveform" in dataset.data_type:
dest_file = waveform_dest_file
with h5py.File(dest_file, "r") as fidin:
root, ext = os.path.splitext(os.path.basename(dest_file))
for key in fidin.keys():
if key.startswith(root):
read_data = fidin[key][...]
self.assertTrue(np.array_equal(dataset.data, read_data))
try:
if os.path.exists(dest_file):
os.remove(dest_file)
except WindowsError: # file in use
pass
if __name__ == "__main__":
random.seed()
unittest.main() | 51.799325 | 126 | 0.624442 |
__author__ = 'Chris R. Coughlin'
import unittest
from models import dataio
from controllers import pathfinder
from utils.skiptest import skipIfModuleNotInstalled
import h5py
import numpy as np
import numpy.testing
import scipy.misc
import os
import random
class TestDataIO(unittest.TestCase):
def setUp(self):
self.sample_data = np.array(self.random_data())
self.sample_data_basename = "sample.dat"
self.sample_data_file = os.path.join(os.path.dirname(__file__),
self.sample_data_basename)
with h5py.File(self.sample_data_file, 'w') as fidout:
fidout.create_dataset(self.sample_data_basename, data=self.sample_data)
def random_data(self):
return [random.uniform(-100, 100) for i in range(25)]
def test_save_data(self):
sample_filename = "test_savedata.dat"
sample_path = os.path.join(os.path.dirname(__file__), sample_filename)
dataio.save_data(sample_path, self.sample_data)
self.assertTrue(os.path.exists(sample_path + ".hdf5"))
with h5py.File(sample_path + ".hdf5", "r") as fidin:
froot, ext = os.path.splitext(os.path.basename(sample_filename))
for key in fidin.keys():
if key.startswith(froot):
read_data = fidin[key][...]
self.assertTrue(np.array_equal(self.sample_data, read_data))
if os.path.exists(sample_path + ".hdf5"):
os.remove(sample_path + ".hdf5")
def test_get_data(self):
read_data = dataio.get_data(self.sample_data_file)
self.assertTrue(np.array_equal(self.sample_data, read_data))
def test_get_data_slice(self):
slice_idx = np.s_[5:15]
read_hyperslab = dataio.get_data(self.sample_data_file, slice_idx)
self.assertTrue(np.array_equal(self.sample_data[slice_idx], read_hyperslab))
def test_get_txt_data(self):
sample_data_file = os.path.join(os.path.dirname(__file__), 'support_files',
'1.25 from hole Single Column.asc')
assert(os.path.exists(sample_data_file))
import_params = {'delimiter': None}
expected_data = np.loadtxt(sample_data_file, delimiter=import_params['delimiter'])
retrieved_data = dataio.get_txt_data(sample_data_file, **import_params)
self.assertTrue(np.array_equal(expected_data, retrieved_data))
def test_import_txt(self):
sample_data_file = os.path.join(os.path.dirname(__file__), 'support_files',
'1.25 from hole Single Column.asc')
assert(os.path.exists(sample_data_file))
import_params = {'delimiter': None}
expected_data = np.loadtxt(sample_data_file, delimiter=import_params['delimiter'])
dataio.import_txt(sample_data_file, **import_params)
dest_file = os.path.join(pathfinder.data_path(),
os.path.basename(sample_data_file) + ".hdf5")
self.assertTrue(os.path.exists(dest_file))
with h5py.File(dest_file, "r") as fidin:
root, ext = os.path.splitext(os.path.basename(dest_file))
for key in fidin.keys():
if key.startswith(root):
read_data = fidin[key][...]
self.assertTrue(np.array_equal(expected_data, read_data))
try:
if os.path.exists(dest_file):
os.remove(dest_file)
except WindowsError:
pass
def test_export_txt(self):
sample_data = self.sample_data.astype(np.int64)
sample_data_file = os.path.join(os.path.dirname(__file__), 'support_files',
'sample.hdf5')
dest_file = os.path.join(os.path.dirname(__file__), 'support_files',
'sample.txt')
with h5py.File(sample_data_file, "w") as fidout:
fidout.create_dataset(os.path.basename(sample_data_file), data=sample_data)
export_params = {'delimiter': ','}
dataio.export_txt(dest_file, sample_data_file, **export_params)
retrieved_data = np.genfromtxt(dest_file, delimiter=export_params['delimiter'])
self.assertTrue(np.array_equal(sample_data, retrieved_data))
try:
if os.path.exists(sample_data_file):
os.remove(sample_data_file)
if os.path.exists(dest_file):
os.remove(dest_file)
except WindowsError:
pass
def test_export3D_txt(self):
x_size = 5
y_size = 4
z_size = 6
sample_data = np.empty((y_size, x_size, z_size))
for xidx in range(x_size):
for yidx in range(y_size):
for zidx in range(z_size):
sample_data[yidx, xidx, zidx] = int(random.uniform(-100, 100))
sample_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'sample3d.hdf5')
dest_file = os.path.join(os.path.dirname(__file__), 'support_files', 'sample3d.txt')
with h5py.File(sample_data_file, "w") as fidout:
fidout.create_dataset(os.path.basename(sample_data_file), data=sample_data)
export_params = {'delimiter': ','}
dataio.export_txt(dest_file, sample_data_file, **export_params)
retrieved_data = np.empty(sample_data.shape)
with open(dest_file, "rb") as fidin:
zidx = 0
for line in fidin:
if not line.startswith('#'):
x, y, z = line.split(export_params['delimiter'])
x = int(x)
y = int(y)
z = float(z.strip())
retrieved_data[y, x, zidx] = z
zidx += 1
if zidx > sample_data.shape[2]-1:
zidx = 0
self.assertTrue(np.array_equal(sample_data, retrieved_data))
try:
if os.path.exists(sample_data_file):
os.remove(sample_data_file)
if os.path.exists(dest_file):
os.remove(dest_file)
except WindowsError:
pass
@skipIfModuleNotInstalled("dicom")
def test_get_dicom_data(self):
import dicom
diconde_folder = os.path.join(os.path.dirname(__file__), 'support_files')
for root, dirs, files in os.walk(diconde_folder):
for fname in files:
dicom_data_file = os.path.join(root, fname)
basename, ext = os.path.splitext(dicom_data_file)
if ext.lower() == '.dcm':
dicom_data = dicom.read_file(dicom_data_file)
dicom_arr = dicom_data.pixel_array
retrieved_data = dataio.get_dicom_data(dicom_data_file)
self.assertTrue(np.array_equal(dicom_arr, retrieved_data))
@skipIfModuleNotInstalled("dicom")
def test_import_dicom(self):
# Load the ASTM DICONDE example files,
# save, then ensure the resulting arrays
# are identical
import dicom
diconde_folder = os.path.join(os.path.dirname(__file__), 'support_files')
for root, dirs, files in os.walk(diconde_folder):
for fname in files:
dicom_data_file = os.path.join(root, fname)
basename, ext = os.path.splitext(dicom_data_file)
# Simple check to ensure we're looking at DICOM files
if ext.lower() == '.dcm':
dicom_data = dicom.read_file(dicom_data_file)
dicom_arr = dicom_data.pixel_array
dataio.import_dicom(dicom_data_file)
dest_file = os.path.join(pathfinder.data_path(),
os.path.basename(dicom_data_file) + ".hdf5")
self.assertTrue(os.path.exists(dest_file))
with h5py.File(dest_file, "r") as fidin:
froot, ext = os.path.splitext(os.path.basename(dest_file))
for key in fidin.keys():
if key.startswith(froot):
read_data = fidin[key][...]
self.assertTrue(np.array_equal(dicom_arr, read_data))
try:
if os.path.exists(dest_file):
os.remove(dest_file)
except WindowsError:
pass
def test_get_img_data(self):
sample_data_file = os.path.join(os.path.dirname(__file__), 'support_files',
'austin_sky320x240.jpg')
assert(os.path.exists(sample_data_file))
expected_data = scipy.misc.imread(sample_data_file, flatten=True)
retrieved_data = dataio.get_img_data(sample_data_file, flatten=True)
self.assertTrue(np.array_equal(expected_data, retrieved_data))
def test_import_img(self):
sample_data_file = os.path.join(os.path.dirname(__file__), 'support_files',
'austin_sky320x240.jpg')
assert(os.path.exists(sample_data_file))
expected_data = scipy.misc.imread(sample_data_file, flatten=True)
dataio.import_img(sample_data_file, flatten=True)
dest_file = os.path.join(pathfinder.data_path(),
os.path.basename(sample_data_file) + ".hdf5")
self.assertTrue(os.path.exists(dest_file))
with h5py.File(dest_file, "r") as fidin:
root, ext = os.path.splitext(os.path.basename(dest_file))
for key in fidin.keys():
if key.startswith(root):
read_data = fidin[key][...]
self.assertTrue(np.array_equal(expected_data, read_data))
try:
if os.path.exists(dest_file):
os.remove(dest_file)
except WindowsError:
pass
def test_get_utwin_tof_data(self):
sample_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'CScanData.csc')
tof_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'CScanData_tofdata.npy')
tof_resolution = 0.01
assert(os.path.exists(tof_data_file))
expected_tof_data = np.load(tof_data_file) * tof_resolution
returned_tof_data = dataio.get_utwin_tof_data(sample_data_file)[0]
numpy.testing.assert_array_almost_equal(expected_tof_data, returned_tof_data, decimal=3)
def test_import_utwin_tof(self):
tof_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'CScanData_tofdata.npy')
sample_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'CScanData.csc')
tof_resolution = 0.01
expected_tof_data = np.load(tof_data_file) * tof_resolution
root, ext = os.path.splitext(os.path.basename(sample_data_file))
dest_file = os.path.join(pathfinder.data_path(),
os.path.basename(root) + "_tofdata0.csc.hdf5")
dataio.import_utwin_tof(sample_data_file)
self.assertTrue(os.path.exists(dest_file))
with h5py.File(dest_file, "r") as fidin:
root, ext = os.path.splitext(os.path.basename(dest_file))
for key in fidin.keys():
if key.startswith(root):
read_data = fidin[key][...]
numpy.testing.assert_array_almost_equal(expected_tof_data, read_data, decimal=3)
try:
if os.path.exists(dest_file):
os.remove(dest_file)
except WindowsError:
pass
def test_get_utwin_amp_data(self):
sample_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'CScanData.csc')
amp_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'CScanData_ampdata.npy')
assert(os.path.exists(amp_data_file))
expected_tof_data = np.load(amp_data_file)
self.assertTrue(np.array_equal(expected_tof_data, dataio.get_utwin_amp_data(sample_data_file)[0]))
def test_import_utwin_amp(self):
amp_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'CScanData_ampdata.npy')
sample_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'CScanData.csc')
expected_amp_data = np.load(amp_data_file)
root, ext = os.path.splitext(os.path.basename(sample_data_file))
dest_file = os.path.join(pathfinder.data_path(),
os.path.basename(root) + "_ampdata0.csc.hdf5")
dataio.import_utwin_amp(sample_data_file)
self.assertTrue(os.path.exists(dest_file))
with h5py.File(dest_file, "r") as fidin:
root, ext = os.path.splitext(os.path.basename(dest_file))
for key in fidin.keys():
if key.startswith(root):
read_data = fidin[key][...]
self.assertTrue(np.array_equal(expected_amp_data, read_data))
try:
if os.path.exists(dest_file):
os.remove(dest_file)
except WindowsError:
pass
def test_get_utwin_data(self):
sample_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'CScanData.csc')
sample_reader = dataio.UTWinCScanDataFile(sample_data_file)
sample_reader.read_data()
expected_data = sample_reader.data
returned_data = dataio.get_utwin_data(sample_data_file)
for datatype in expected_data:
self.assertTrue(np.array_equal(expected_data[datatype], returned_data[datatype]))
def test_get_winspect_data(self):
sample_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'sample_data.sdt')
assert(os.path.exists(sample_data_file))
scan_reader = dataio.WinspectReader(sample_data_file)
expected_data_list = scan_reader.get_winspect_data()
retrieved_data_list = dataio.get_winspect_data(sample_data_file)
self.assertEqual(len(expected_data_list), len(retrieved_data_list))
for data_array_idx in range(len(expected_data_list)):
self.assertTrue(np.array_equal(expected_data_list[data_array_idx].data, retrieved_data_list[data_array_idx].data))
def test_import_winspect(self):
sample_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'sample_data.sdt')
assert(os.path.exists(sample_data_file))
output_basename, ext = os.path.splitext(sample_data_file)
amp_dest_file = os.path.join(pathfinder.data_path(),
os.path.basename(output_basename) + "_ampdata0" + ext + ".hdf5")
waveform_dest_file = os.path.join(pathfinder.data_path(),
os.path.basename(output_basename) + "_waveformdata0" + ext + ".hdf5")
dataio.import_winspect(sample_data_file)
expected_data_list = dataio.get_winspect_data(sample_data_file)
for dataset in expected_data_list:
if "amplitude" in dataset.data_type:
dest_file = amp_dest_file
elif "waveform" in dataset.data_type:
dest_file = waveform_dest_file
with h5py.File(dest_file, "r") as fidin:
root, ext = os.path.splitext(os.path.basename(dest_file))
for key in fidin.keys():
if key.startswith(root):
read_data = fidin[key][...]
self.assertTrue(np.array_equal(dataset.data, read_data))
try:
if os.path.exists(dest_file):
os.remove(dest_file)
except WindowsError:
pass
def tearDown(self):
if os.path.exists(self.sample_data_file + ".hdf5"):
os.remove(self.sample_data_file + ".hdf5")
if os.path.exists(self.sample_data_file):
os.remove(self.sample_data_file)
class TestUTWinCScanReader(unittest.TestCase):
def setUp(self):
self.sample_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'CScanData.csc')
assert(os.path.exists(self.sample_data_file))
self.cscan_reader = dataio.UTWinCscanReader()
def test_basicfile_parameters(self):
self.assertEqual(self.cscan_reader.header_string_length, 15)
expected_message_ids = {'CSCAN_DATA': 2300,
'WAVEFORM_pre240': 2016,
'WAVEFORM_post240': 2303,
'UTSAVE_UTCD0': 2010,
'UTSAVE_UTCD1': 2011,
'UTSAVE_UTCD2': 2012,
'UTSAVE_UTCD4': 2014,
'UTSAVE_UTPro0': 253,
'PROJECT': 301,
'UTSAVE_UTHead': 100,
'UTSAVE_UTCScan0': 750,
'UTSAVE_UTCD10': 2020,
'UTSAVE_UTCScan3': 753}
self.assertDictEqual(expected_message_ids, self.cscan_reader.message_ids)
def test_is_cscanfile(self):
self.assertTrue(self.cscan_reader.is_cscanfile(self.sample_data_file))
def test_msg_info(self):
with open(self.sample_data_file, "rb") as fidin:
fidin.seek(self.cscan_reader.header_string_length)
first_message = (100, 14)
self.assertTupleEqual(first_message, self.cscan_reader.msg_info(fidin))
def test_find_message(self):
expected_file_positions = ((2014, 38037),
(2011, 38059),
(2010, 38003),
(2012, 422075),
(2010, 38003),
(2010, 38003))
for message_id, expected_pos in expected_file_positions:
self.assertEqual(self.cscan_reader.find_message(self.sample_data_file, message_id), expected_pos)
def test_find_blocks(self):
expected_filed_positions = [173, 920, 1667, 2414, 3161, 3908, 4655, 5402]
self.assertListEqual(expected_filed_positions, self.cscan_reader.find_blocks(self.sample_data_file, 950))
def test_read_field(self):
start_pos = self.cscan_reader.find_message(self.sample_data_file, 950)
self.assertTrue(start_pos != -1)
with open(self.sample_data_file, "rb") as fidin:
fidin.seek(start_pos)
expected_ad_delay = np.fromfile(fidin, self.cscan_reader.field_sizes['float'], 1)[0]
expected_ad_width = np.fromfile(fidin, self.cscan_reader.field_sizes['float'], 1)[0]
expected_ad_blanking_width = np.fromfile(fidin, self.cscan_reader.field_sizes['float'], 1)[0]
expected_ad_gain = np.fromfile(fidin, self.cscan_reader.field_sizes['float'], 1)[0]
expected_ad_offset = np.fromfile(fidin, self.cscan_reader.field_sizes['float'], 1)[0]
expected_ad_trigger_level = np.fromfile(fidin, self.cscan_reader.field_sizes['float'], 1)[0]
expected_ad_trigger_rate = np.fromfile(fidin, self.cscan_reader.field_sizes['float'], 1)[0]
with open(self.sample_data_file, "rb") as fidin:
fidin.seek(start_pos)
ad_delay = self.cscan_reader.read_field(fidin, self.cscan_reader.field_sizes['float'])
ad_width = self.cscan_reader.read_field(fidin, self.cscan_reader.field_sizes['float'])
ad_blanking_width = self.cscan_reader.read_field(fidin, self.cscan_reader.field_sizes['float'])
ad_gain = self.cscan_reader.read_field(fidin, self.cscan_reader.field_sizes['float'])
ad_offset = self.cscan_reader.read_field(fidin, self.cscan_reader.field_sizes['float'])
ad_trigger_level = self.cscan_reader.read_field(fidin, self.cscan_reader.field_sizes['float'])
ad_trigger_rate = self.cscan_reader.read_field(fidin, self.cscan_reader.field_sizes['float'])
self.assertAlmostEqual(expected_ad_delay, ad_delay)
self.assertAlmostEqual(expected_ad_width, ad_width)
self.assertAlmostEqual(expected_ad_blanking_width, ad_blanking_width)
self.assertAlmostEqual(expected_ad_gain, ad_gain)
self.assertAlmostEqual(expected_ad_offset, ad_offset)
self.assertAlmostEqual(expected_ad_trigger_level, ad_trigger_level)
self.assertAlmostEqual(expected_ad_trigger_rate, ad_trigger_rate)
class TestUTWinCScanDataFile(unittest.TestCase):
def setUp(self):
self.sample_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'CScanData.csc')
self.cscan_datafile = dataio.UTWinCScanDataFile(self.sample_data_file)
def test_get_scan_version(self):
self.assertEqual(self.cscan_datafile.get_scan_version(), 117)
def test_read_scan_properties(self):
important_scan_properties = {'n_height':320,
'n_width':600,
'rf_length':2994,
'channel_active':[1, 0, 0, 0, 0, 0, 0, 0]}
for idx in important_scan_properties.keys():
prop = important_scan_properties[idx]
if not isinstance(prop, list):
self.assertEqual(prop, self.cscan_datafile.scan_properties[idx])
else:
self.assertListEqual(prop, self.cscan_datafile.scan_properties[idx])
def test_read_tof_data(self):
tof_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'CScanData_tofdata.npy')
tof_resolution = 0.01
assert(os.path.exists(tof_data_file))
expected_tof_data = np.load(tof_data_file) * tof_resolution
self.cscan_datafile.read_tof_data()
numpy.testing.assert_array_almost_equal(expected_tof_data, self.cscan_datafile.data['tof'][0], decimal=3)
def test_read_amplitude_data(self):
amp_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'CScanData_ampdata.npy')
assert(os.path.exists(amp_data_file))
expected_amp_data = np.load(amp_data_file)
self.cscan_datafile.read_amplitude_data()
self.assertTrue(np.array_equal(expected_amp_data, self.cscan_datafile.data['amplitude'][0]))
def test_import_tof(self):
tof_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'CScanData_tofdata.npy')
tof_resolution = 0.01
csc_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'CScanData')
assert(os.path.exists(tof_data_file))
expected_tof_data = np.load(tof_data_file) * tof_resolution
dest_file = os.path.join(pathfinder.data_path(),
os.path.basename(csc_data_file) + "_tofdata0.csc.hdf5")
self.cscan_datafile.import_tof_data()
self.assertTrue(os.path.exists(dest_file))
with h5py.File(dest_file, "r") as fidin:
root, ext = os.path.splitext(os.path.basename(dest_file))
for key in fidin.keys():
if key.startswith(root):
read_data = fidin[key][...]
numpy.testing.assert_array_almost_equal(expected_tof_data, read_data, decimal=3)
try:
if os.path.exists(dest_file):
os.remove(dest_file)
except WindowsError:
pass
def test_import_amp(self):
amp_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'CScanData_ampdata.npy')
csc_data_file = os.path.join(os.path.dirname(__file__), 'support_files', 'CScanData')
assert(os.path.exists(amp_data_file))
expected_amp_data = np.load(amp_data_file)
dest_file = os.path.join(pathfinder.data_path(),
os.path.basename(csc_data_file) + "_ampdata0.csc.hdf5")
self.cscan_datafile.import_amplitude_data()
self.assertTrue(os.path.exists(dest_file))
with h5py.File(dest_file, "r") as fidin:
root, ext = os.path.splitext(os.path.basename(dest_file))
for key in fidin.keys():
if key.startswith(root):
read_data = fidin[key][...]
self.assertTrue(np.array_equal(expected_amp_data, read_data))
try:
if os.path.exists(dest_file):
os.remove(dest_file)
except WindowsError:
pass
class TestWinspectReader(unittest.TestCase):
def setUp(self):
self.sample_data_file = os.path.join(os.path.dirname(__file__), 'support_files',
'sample_data.sdt')
assert(os.path.exists(self.sample_data_file))
self.scan_reader = dataio.WinspectReader(self.sample_data_file)
def test_find_numbers(self):
float_strings = {"0.000000 mm":0.0, "0.775995 Usec":0.775995}
int_strings = {"35 18 0 22 3 112 ":[35, 18, 0, 22, 3, 112],
"Number of Sample Points : 3500":3500}
bad_strings = {"Ramshackle":[], "":[]}
for string in float_strings:
self.assertAlmostEqual(float_strings[string], self.scan_reader.find_numbers(string))
def test_get_winspect_data(self):
data_reader = dataio.WinspectDataFile(self.sample_data_file)
data_reader.read_data()
expected_data_list = data_reader.datasets
retrieved_data_list = self.scan_reader.get_winspect_data()
self.assertEqual(len(expected_data_list), len(retrieved_data_list))
for data_array_idx in range(len(expected_data_list)):
self.assertTrue(np.array_equal(expected_data_list[data_array_idx].data, retrieved_data_list[data_array_idx].data))
def test_import_winspect(self):
output_basename, ext = os.path.splitext(self.sample_data_file)
amp_dest_file = os.path.join(pathfinder.data_path(),
os.path.basename(output_basename) + "_ampdata0" + ext + ".hdf5")
waveform_dest_file = os.path.join(pathfinder.data_path(),
os.path.basename(output_basename) + "_waveformdata0" + ext + ".hdf5")
self.scan_reader.import_winspect()
data_reader = dataio.WinspectDataFile(self.sample_data_file)
data_reader.read_data()
expected_data_list = data_reader.datasets
for dataset in expected_data_list:
if "amplitude" in dataset.data_type:
dest_file = amp_dest_file
elif "waveform" in dataset.data_type:
dest_file = waveform_dest_file
with h5py.File(dest_file, "r") as fidin:
root, ext = os.path.splitext(os.path.basename(dest_file))
for key in fidin.keys():
if key.startswith(root):
read_data = fidin[key][...]
self.assertTrue(np.array_equal(dataset.data, read_data))
try:
if os.path.exists(dest_file):
os.remove(dest_file)
except WindowsError:
pass
if __name__ == "__main__":
random.seed()
unittest.main() | true | true |
f72fca48a62d8d293aa54c0897823b567e43a32a | 2,008 | py | Python | tests/test_RunningStats.py | gratuxri/play-chess-with-a-webcam | 9ef7ec306a2a612871fba83130ebee1f044ef0c1 | [
"Apache-2.0"
] | 17 | 2019-10-25T01:33:43.000Z | 2022-03-21T03:31:56.000Z | tests/test_RunningStats.py | gratuxri/play-chess-with-a-webcam | 9ef7ec306a2a612871fba83130ebee1f044ef0c1 | [
"Apache-2.0"
] | 34 | 2019-10-17T06:52:30.000Z | 2022-01-19T12:45:43.000Z | tests/test_RunningStats.py | gratuxri/play-chess-with-a-webcam | 9ef7ec306a2a612871fba83130ebee1f044ef0c1 | [
"Apache-2.0"
] | 4 | 2019-11-29T09:19:38.000Z | 2021-10-13T03:12:25.000Z | #!/usr/bin/python3
# part of https://github.com/WolfgangFahl/play-chess-with-a-webcam
from pcwawc.runningstats import RunningStats, ColorStats, MovingAverage
import pytest
from unittest import TestCase
class RunningStatsTest(TestCase):
def test_RunningStats(self):
rs = RunningStats()
rs.push(17.0);
rs.push(19.0);
rs.push(24.0);
mean = rs.mean();
variance = rs.variance();
stdev = rs.standard_deviation();
print ("mean=%f variance=%f stdev=%f" % (mean, variance, stdev))
assert mean == 20.0
assert variance == 13.0
assert stdev == pytest.approx(3.605551, 0.00001)
def test_ColorStats(self):
colors = [(100, 100, 100), (90, 100, 90), (80, 90, 80), (110, 110, 120)]
colorStats = ColorStats()
for color in colors:
r, g, b = color
colorStats.push(r, g, b)
cm = colorStats.mean();
mR, mG, mB = cm
vR, vG, vB = colorStats.variance();
sR, sG, sB = colorStats.standard_deviation();
print ("mean=%f,%f,%f variance=%f,%f,%f stdev=%f,%f,%f" % (mR, mG, mB, vR, vG, vB, sR, sG, sB))
assert cm == (95.0, 100.0, 97.5)
prec = 0.000001
assert vR == pytest.approx(166.666667, prec)
assert vG == pytest.approx(66.666667, prec)
assert vB == pytest.approx(291.666667, prec)
assert sR == pytest.approx(12.909944, prec)
assert sG == pytest.approx(8.164966, prec)
assert sB == pytest.approx(17.078251, prec)
def test_MovingAverage(self):
values=[10,12,17,19,24,17,13,12,8]
means=[10,11,13,16,20,20,18,14,11]
gs=[0,2,3.5,3.5,3.5,-1,-5.5,-2.5,-2.5]
ma=MovingAverage(3)
index=0
for value in values:
ma.push(value)
print ("%d: %f %f %f" % (index,value,ma.mean(),ma.gradient()))
assert means[index]==ma.mean()
assert gs[index]==ma.gradient()
index+=1 | 36.509091 | 103 | 0.556275 |
from pcwawc.runningstats import RunningStats, ColorStats, MovingAverage
import pytest
from unittest import TestCase
class RunningStatsTest(TestCase):
def test_RunningStats(self):
rs = RunningStats()
rs.push(17.0);
rs.push(19.0);
rs.push(24.0);
mean = rs.mean();
variance = rs.variance();
stdev = rs.standard_deviation();
print ("mean=%f variance=%f stdev=%f" % (mean, variance, stdev))
assert mean == 20.0
assert variance == 13.0
assert stdev == pytest.approx(3.605551, 0.00001)
def test_ColorStats(self):
colors = [(100, 100, 100), (90, 100, 90), (80, 90, 80), (110, 110, 120)]
colorStats = ColorStats()
for color in colors:
r, g, b = color
colorStats.push(r, g, b)
cm = colorStats.mean();
mR, mG, mB = cm
vR, vG, vB = colorStats.variance();
sR, sG, sB = colorStats.standard_deviation();
print ("mean=%f,%f,%f variance=%f,%f,%f stdev=%f,%f,%f" % (mR, mG, mB, vR, vG, vB, sR, sG, sB))
assert cm == (95.0, 100.0, 97.5)
prec = 0.000001
assert vR == pytest.approx(166.666667, prec)
assert vG == pytest.approx(66.666667, prec)
assert vB == pytest.approx(291.666667, prec)
assert sR == pytest.approx(12.909944, prec)
assert sG == pytest.approx(8.164966, prec)
assert sB == pytest.approx(17.078251, prec)
def test_MovingAverage(self):
values=[10,12,17,19,24,17,13,12,8]
means=[10,11,13,16,20,20,18,14,11]
gs=[0,2,3.5,3.5,3.5,-1,-5.5,-2.5,-2.5]
ma=MovingAverage(3)
index=0
for value in values:
ma.push(value)
print ("%d: %f %f %f" % (index,value,ma.mean(),ma.gradient()))
assert means[index]==ma.mean()
assert gs[index]==ma.gradient()
index+=1 | true | true |
f72fcbddac8b795d0d38b329417686484d875719 | 2,006 | py | Python | jabberbot/_tests/test_capat.py | RealTimeWeb/wikisite | 66a22c68c172f0ebb3c88a9885ccd33e2d59c3c5 | [
"Apache-2.0"
] | null | null | null | jabberbot/_tests/test_capat.py | RealTimeWeb/wikisite | 66a22c68c172f0ebb3c88a9885ccd33e2d59c3c5 | [
"Apache-2.0"
] | null | null | null | jabberbot/_tests/test_capat.py | RealTimeWeb/wikisite | 66a22c68c172f0ebb3c88a9885ccd33e2d59c3c5 | [
"Apache-2.0"
] | 1 | 2020-01-09T04:53:32.000Z | 2020-01-09T04:53:32.000Z | # -*- coding: utf-8 -*-
import py
try:
from jabberbot import capat
except ImportError:
py.test.skip("Skipping jabber bot tests - pyxmpp is not installed")
def test_ver_simple():
# example values supplied by the XEP
ident = (("client", "pc"), )
feat = ("http://jabber.org/protocol/disco#info",
"http://jabber.org/protocol/disco#items",
"http://jabber.org/protocol/muc",
)
assert capat.generate_ver(ident, feat) == "8RovUdtOmiAjzj+xI7SK5BCw3A8="
def test_ver_complex():
# this test should verify that ordering works properly
ident = (("client", "animal"),
("client", "bear"), # type ordering after category ordering
("apples", "bar"),
("apple", "foo"), # "apples" starts with "apple"
# thus it's greater
)
feat = ()
expected = capat.hash_new('sha1')
expected.update("apple/foo<apples/bar<client/animal<client/bear<")
expected = capat.base64.b64encode(expected.digest())
assert capat.generate_ver(ident, feat) == expected
def test_xml():
try:
import pyxmpp.iq
except ImportError:
py.test.skip("pyxmpp needs to be installed for this test")
x = pyxmpp.iq.Iq(stanza_type='result', stanza_id='disco1',
from_jid='romeo@montague.lit/orchard',
to_jid='juliet@capulet.lit/chamber')
y = x.new_query(ns_uri='http://jabber.org/protocol/disco#info')
z = y.newChild(None, 'identity', None)
z.setProp('category', 'client')
z.setProp('type', 'pc')
y.newChild(None, 'feature', None).setProp(
'var', 'http://jabber.org/protocol/disco#info')
y.newChild(None, 'feature', None).setProp(
'var', 'http://jabber.org/protocol/disco#items')
y.newChild(None, 'feature', None).setProp(
'var', 'http://jabber.org/protocol/muc')
assert capat.hash_iq(x) == "8RovUdtOmiAjzj+xI7SK5BCw3A8="
# hash value taken from `test_ver_simple`
| 34.586207 | 76 | 0.612164 |
import py
try:
from jabberbot import capat
except ImportError:
py.test.skip("Skipping jabber bot tests - pyxmpp is not installed")
def test_ver_simple():
ident = (("client", "pc"), )
feat = ("http://jabber.org/protocol/disco#info",
"http://jabber.org/protocol/disco#items",
"http://jabber.org/protocol/muc",
)
assert capat.generate_ver(ident, feat) == "8RovUdtOmiAjzj+xI7SK5BCw3A8="
def test_ver_complex():
ident = (("client", "animal"),
("client", "bear"),
("apples", "bar"),
("apple", "foo"),
)
feat = ()
expected = capat.hash_new('sha1')
expected.update("apple/foo<apples/bar<client/animal<client/bear<")
expected = capat.base64.b64encode(expected.digest())
assert capat.generate_ver(ident, feat) == expected
def test_xml():
try:
import pyxmpp.iq
except ImportError:
py.test.skip("pyxmpp needs to be installed for this test")
x = pyxmpp.iq.Iq(stanza_type='result', stanza_id='disco1',
from_jid='romeo@montague.lit/orchard',
to_jid='juliet@capulet.lit/chamber')
y = x.new_query(ns_uri='http://jabber.org/protocol/disco
z = y.newChild(None, 'identity', None)
z.setProp('category', 'client')
z.setProp('type', 'pc')
y.newChild(None, 'feature', None).setProp(
'var', 'http://jabber.org/protocol/disco
y.newChild(None, 'feature', None).setProp(
'var', 'http://jabber.org/protocol/disco
y.newChild(None, 'feature', None).setProp(
'var', 'http://jabber.org/protocol/muc')
assert capat.hash_iq(x) == "8RovUdtOmiAjzj+xI7SK5BCw3A8="
# hash value taken from `test_ver_simple`
| true | true |
f72fcdb0009d66ce95554b101bd82a076188d8f3 | 1,140 | py | Python | where_to_go/places/models.py | MZen2610/Yandex-poster | 07b1e44974783563c394b22625aa2543d74552f9 | [
"MIT"
] | null | null | null | where_to_go/places/models.py | MZen2610/Yandex-poster | 07b1e44974783563c394b22625aa2543d74552f9 | [
"MIT"
] | null | null | null | where_to_go/places/models.py | MZen2610/Yandex-poster | 07b1e44974783563c394b22625aa2543d74552f9 | [
"MIT"
] | null | null | null | from django.db import models
class Place(models.Model):
title = models.CharField(max_length=150, verbose_name='Наименование')
description_short = models.TextField(blank=True, verbose_name='Краткое описание')
description_long = models.TextField(blank=True, verbose_name='Полное описание')
lng = models.DecimalField(max_digits=17, decimal_places=14, verbose_name='Долгота')
lat = models.DecimalField(max_digits=17, decimal_places=14, verbose_name='Широта')
def __str__(self):
return self.title
class Meta:
verbose_name = 'Место'
verbose_name_plural = 'Места'
ordering = ['title']
class Images(models.Model):
title = models.ForeignKey('Place', on_delete=models.SET_NULL, null=True, verbose_name='Место', blank=False)
num = models.IntegerField(verbose_name='Позиция')
image = models.ImageField(upload_to='photos/%Y/%m/%d', blank=True, verbose_name='Изображение', null=True)
def __str__(self):
return f"{self.num} {self.title}"
class Meta:
verbose_name = 'Изображение'
verbose_name_plural = 'Изображения'
ordering = ['-num']
| 35.625 | 111 | 0.7 | from django.db import models
class Place(models.Model):
title = models.CharField(max_length=150, verbose_name='Наименование')
description_short = models.TextField(blank=True, verbose_name='Краткое описание')
description_long = models.TextField(blank=True, verbose_name='Полное описание')
lng = models.DecimalField(max_digits=17, decimal_places=14, verbose_name='Долгота')
lat = models.DecimalField(max_digits=17, decimal_places=14, verbose_name='Широта')
def __str__(self):
return self.title
class Meta:
verbose_name = 'Место'
verbose_name_plural = 'Места'
ordering = ['title']
class Images(models.Model):
title = models.ForeignKey('Place', on_delete=models.SET_NULL, null=True, verbose_name='Место', blank=False)
num = models.IntegerField(verbose_name='Позиция')
image = models.ImageField(upload_to='photos/%Y/%m/%d', blank=True, verbose_name='Изображение', null=True)
def __str__(self):
return f"{self.num} {self.title}"
class Meta:
verbose_name = 'Изображение'
verbose_name_plural = 'Изображения'
ordering = ['-num']
| true | true |
f72fcde236c44645e86f524db09b26ce3bfb931b | 3,649 | py | Python | server/core.py | cwza/deep_t2i | 22877fdd28ad407984ddc3bc4d57109c54c22fc0 | [
"Apache-2.0"
] | null | null | null | server/core.py | cwza/deep_t2i | 22877fdd28ad407984ddc3bc4d57109c54c22fc0 | [
"Apache-2.0"
] | null | null | null | server/core.py | cwza/deep_t2i | 22877fdd28ad407984ddc3bc4d57109c54c22fc0 | [
"Apache-2.0"
] | 1 | 2020-11-30T06:11:02.000Z | 2020-11-30T06:11:02.000Z | import os
from pathlib import Path
import numpy as np
from PIL import Image
import requests
from google.cloud import storage
import base64
from io import BytesIO
import uuid
__all__ = ['do', 'recaptcha_check']
def predict_and2jpg(model, cap):
''' cap: "white hair yellow eyes", returns: jpeg file buffer remember to close it or use with '''
img, _ = model.predict(cap)
img = Image.fromarray(np.uint8(img.numpy()))
buf = BytesIO()
img.save(buf, format='JPEG')
buf.seek(0)
return buf
# import matplotlib.pyplot as plt
# from deep_t2i.model_anime_heads import ExportedModel
# from deep_t2i.inference_anime_heads import predict
# model = ExportedModel.from_pretrained('./anime_heads.pt')
# with predict_and2jpg(model, "white hair yellow eyes") as buf:
# img = Image.open(buf)
# plt.imshow(img)
# plt.show()
gs_bucket_id = os.getenv('gs_bucket_id')
def upload_to_gs(client, img_file):
"upload img_file to google storage name it fname and return url"
bucket = client.bucket(gs_bucket_id)
fname = f'{uuid.uuid4().hex[:8]}.jpg'
blob = bucket.blob(fname)
blob.upload_from_file(img_file, content_type="image/jpeg")
return f'https://storage.googleapis.com/{gs_bucket_id}/{fname}'
# from deep_t2i.model_anime_heads import ExportedModel
# from deep_t2i.inference_anime_heads import predict
# gs_client = storage.Client()
# model = ExportedModel.from_pretrained('./anime_heads.pt')
# with predict_and2jpg(model, "white hair yellow eyes") as buf:
# url = upload_to_gs(gs_client, buf)
# print(url)
imgur_client_id = os.getenv('imgur_client_id')
def upload_to_imgur(img_file):
"upload img_file to imgur and return url"
img = img_file.read()
img = base64.standard_b64encode(img)
url = "https://api.imgur.com/3/image"
data = {'image': img, 'type': 'base64'}
headers = { 'Authorization': f'Client-ID {imgur_client_id}' }
res = requests.post(url, headers=headers, data=data).json()
if res['success']: return res["data"]["link"]
else:
raise Exception("Failed to upload to imgur")
# from deep_t2i.model_anime_heads import ExportedModel
# from deep_t2i.inference_anime_heads import predict
# model = ExportedModel.from_pretrained('./anime_heads.pt')
# with predict_and2jpg(model, "white hair yellow eyes") as buf:
# url = upload_to_imgur(buf)
# print(url)
def save_to_tmp(img_file):
" save img_file to ./tmp_jpg/ "
img = Image.open(img_file)
fname = f'{uuid.uuid4().hex[:8]}.jpg'
path = f'./temp_jpg/{fname}'
img.save(path)
return path
# from deep_t2i.model_anime_heads import ExportedModel
# from deep_t2i.inference_anime_heads import predict
# model = ExportedModel.from_pretrained('./anime_heads.pt')
# with predict_and2jpg(model, "white hair yellow eyes") as buf:
# url = save_to_tmp(buf)
# print(url)
img_server = os.getenv("img_server")
gs_client = storage.Client() if img_server=="gs" else None
def do(model, cap):
"generate image from model, upload image to img_server and return link"
with predict_and2jpg(model, cap) as buf:
if img_server=="gs":
url = upload_to_gs(gs_client, buf)
elif img_server=="imgur":
url = upload_to_imgur(buf)
else:
url = save_to_tmp(buf)
return url
# Recaptcha check
recaptcha_secret = os.getenv('recaptcha_secret')
def recaptcha_check(token):
if token is None: return False
url = "https://www.google.com/recaptcha/api/siteverify"
data = {
'secret': recaptcha_secret,
'response': token,
}
r = requests.post(url=url, data=data)
return r.json()['success']
| 34.424528 | 101 | 0.698822 | import os
from pathlib import Path
import numpy as np
from PIL import Image
import requests
from google.cloud import storage
import base64
from io import BytesIO
import uuid
__all__ = ['do', 'recaptcha_check']
def predict_and2jpg(model, cap):
img, _ = model.predict(cap)
img = Image.fromarray(np.uint8(img.numpy()))
buf = BytesIO()
img.save(buf, format='JPEG')
buf.seek(0)
return buf
gs_bucket_id = os.getenv('gs_bucket_id')
def upload_to_gs(client, img_file):
bucket = client.bucket(gs_bucket_id)
fname = f'{uuid.uuid4().hex[:8]}.jpg'
blob = bucket.blob(fname)
blob.upload_from_file(img_file, content_type="image/jpeg")
return f'https://storage.googleapis.com/{gs_bucket_id}/{fname}'
imgur_client_id = os.getenv('imgur_client_id')
def upload_to_imgur(img_file):
img = img_file.read()
img = base64.standard_b64encode(img)
url = "https://api.imgur.com/3/image"
data = {'image': img, 'type': 'base64'}
headers = { 'Authorization': f'Client-ID {imgur_client_id}' }
res = requests.post(url, headers=headers, data=data).json()
if res['success']: return res["data"]["link"]
else:
raise Exception("Failed to upload to imgur")
def save_to_tmp(img_file):
img = Image.open(img_file)
fname = f'{uuid.uuid4().hex[:8]}.jpg'
path = f'./temp_jpg/{fname}'
img.save(path)
return path
img_server = os.getenv("img_server")
gs_client = storage.Client() if img_server=="gs" else None
def do(model, cap):
with predict_and2jpg(model, cap) as buf:
if img_server=="gs":
url = upload_to_gs(gs_client, buf)
elif img_server=="imgur":
url = upload_to_imgur(buf)
else:
url = save_to_tmp(buf)
return url
recaptcha_secret = os.getenv('recaptcha_secret')
def recaptcha_check(token):
if token is None: return False
url = "https://www.google.com/recaptcha/api/siteverify"
data = {
'secret': recaptcha_secret,
'response': token,
}
r = requests.post(url=url, data=data)
return r.json()['success']
| true | true |
f72fce083693b057598af0c60439146c9ccc930a | 1,461 | py | Python | Task2D.py | dan7267/1a-flood-risk-project-93 | d95cee987f5673d637626e1804f719371a25daa8 | [
"MIT"
] | null | null | null | Task2D.py | dan7267/1a-flood-risk-project-93 | d95cee987f5673d637626e1804f719371a25daa8 | [
"MIT"
] | null | null | null | Task2D.py | dan7267/1a-flood-risk-project-93 | d95cee987f5673d637626e1804f719371a25daa8 | [
"MIT"
] | null | null | null | # Copyright (C) 2018 Garth N. Wells
#
# SPDX-License-Identifier: MIT
import datetime
from floodsystem.datafetcher import fetch_measure_levels
from floodsystem.stationdata import build_station_list
def run():
"""Requirements for Task2D"""
# Build list of stations
stations = build_station_list()
# Station name to find
station_name = "Cam"
# Find station
station_cam = None
for station in stations:
if station.name == station_name:
station_cam = station
break
# Check that station could be found. Return if not found.
if not station_cam:
print("Station {} could not be found".format(station_name))
return
# Alternative find station 'Cam' using the Python 'next' function
# (https://docs.python.org/3/library/functions.html#next). Raises
# an exception if station is not found.
# try:
# station_cam = next(s for s in stations if s.name == station_name)
# except StopIteration:
# print("Station {} could not be found".format(station_name))
# return
print(station_cam)
# Fetch data over past 2 days
dt = 2
dates, levels = fetch_measure_levels(
station_cam.measure_id, dt=datetime.timedelta(days=dt))
# Print level history
for date, level in zip(dates, levels):
print(date, level)
if __name__ == "__main__":
print("*** Task 2D: CUED Part IA Flood Warning System ***")
run()
| 27.055556 | 75 | 0.659138 |
import datetime
from floodsystem.datafetcher import fetch_measure_levels
from floodsystem.stationdata import build_station_list
def run():
stations = build_station_list()
station_name = "Cam"
station_cam = None
for station in stations:
if station.name == station_name:
station_cam = station
break
if not station_cam:
print("Station {} could not be found".format(station_name))
return
print(station_cam)
dt = 2
dates, levels = fetch_measure_levels(
station_cam.measure_id, dt=datetime.timedelta(days=dt))
for date, level in zip(dates, levels):
print(date, level)
if __name__ == "__main__":
print("*** Task 2D: CUED Part IA Flood Warning System ***")
run()
| true | true |
f72fce4b9ca244110fc20c8066f330fd436dbac7 | 769 | py | Python | contacts/server/server.py | alfredoroblesa/contacts-tool | 7b9d9ddbaa3ac1f2fc1210aa11958043a79d2e05 | [
"MIT"
] | null | null | null | contacts/server/server.py | alfredoroblesa/contacts-tool | 7b9d9ddbaa3ac1f2fc1210aa11958043a79d2e05 | [
"MIT"
] | null | null | null | contacts/server/server.py | alfredoroblesa/contacts-tool | 7b9d9ddbaa3ac1f2fc1210aa11958043a79d2e05 | [
"MIT"
] | null | null | null | import os
import json
from flask import Flask, render_template
DATABASE_PATH = "../.contacts-store"
# Read database and build HTML string
file_names = os.listdir(DATABASE_PATH)
file_names.remove(".git")
html = "<table><th>Contact</th><th>Last Name</th><th>Tlf</th><th>Email</th><th>Job</th><th>Province</th>"
for file_name in file_names:
file_path = os.path.join(DATABASE_PATH, file_name)
with open(file_path, 'r') as f:
data = json.load(f)
data['name'] = file_name
html += f"<tr><td>{data['name']}</td><td>{data['last_name']}</td><td>{data['tlf']}</td><td>{data['email']}</td><td>{data['job']}</td><td>{data['province']}</td></tr>"
# Create Flask app
server = Flask(__name__)
@server.route("/")
def contacts_table():
return html | 33.434783 | 174 | 0.654096 | import os
import json
from flask import Flask, render_template
DATABASE_PATH = "../.contacts-store"
file_names = os.listdir(DATABASE_PATH)
file_names.remove(".git")
html = "<table><th>Contact</th><th>Last Name</th><th>Tlf</th><th>Email</th><th>Job</th><th>Province</th>"
for file_name in file_names:
file_path = os.path.join(DATABASE_PATH, file_name)
with open(file_path, 'r') as f:
data = json.load(f)
data['name'] = file_name
html += f"<tr><td>{data['name']}</td><td>{data['last_name']}</td><td>{data['tlf']}</td><td>{data['email']}</td><td>{data['job']}</td><td>{data['province']}</td></tr>"
server = Flask(__name__)
@server.route("/")
def contacts_table():
return html | true | true |
f72fcf78d29e01d35333b5634ff87c068a1f35d6 | 13,176 | py | Python | Networks/4_layer_net_Parameter_optimization.py | Kohulan/Decimer-Python | 17373e02faedb28ba94742f61001bb3c6b015798 | [
"MIT"
] | 5 | 2019-07-24T14:18:07.000Z | 2021-11-08T00:35:55.000Z | Networks/4_layer_net_Parameter_optimization.py | Kohulan/Decimer-Python | 17373e02faedb28ba94742f61001bb3c6b015798 | [
"MIT"
] | null | null | null | Networks/4_layer_net_Parameter_optimization.py | Kohulan/Decimer-Python | 17373e02faedb28ba94742f61001bb3c6b015798 | [
"MIT"
] | 5 | 2020-09-16T13:01:31.000Z | 2022-01-24T06:26:06.000Z | '''
* This Software is under the MIT License
* Refer to LICENSE or https://opensource.org/licenses/MIT for more information
* Written by Kohulan Rajan
* © 2019
'''
#Parallelized datareading network
import tensorflow as tf
import os
import sys
import numpy as np
import matplotlib as mpl
import csv
mpl.use('Agg')
import matplotlib.pyplot as plt
from datetime import datetime
from numpy import array
import pickle
import lz4.frame as lz
import multiprocessing
np.set_printoptions(threshold=np.nan)
#Set the Desired Gpu from the cluster
os.environ["CUDA_DEVICE_ORDER"]="PCI_BUS_ID"
os.environ["CUDA_VISIBLE_DEVICES"]="0"
#Set Hidden neurons count
hidden_neurons_list_I = [2,4,8,16,32,64,128,512,1024,2048,4096]
hidden_neurons_list_II = [2,4,8,16,32,64,128,512,1024,2048,4096]
#Set Batch Size
batch_sizer_list = [500,1000]
#Set Learning rate
learning_rate_list = [0.001,0.003,0.005,0.007,0.009,0.01]
#Paramter Optimizing loops
for hidden_neurons_I in range(len(hidden_neurons_list_I)):
for hidden_neurons_II in range(len(hidden_neurons_list_II)):
for batch_sizer in range(len(batch_sizer_list)):
for learning_rate_ in range(len(learning_rate_list)):
f = open("/Results/Batch Size_{}_learning_rate_{}_hidden_neurons_{}_x_{}.txt".format(batch_sizer_list[batch_sizer],learning_rate_list[learning_rate_],hidden_neurons_list_I[hidden_neurons_I],hidden_neurons_list_II[hidden_neurons_II]), 'w',0)
sys.stdout = f
print (datetime.now().strftime('%Y/%m/%d %H:%M:%S'),"Network Started")
#Data input from image data
#labels
def label_data(is_test=False):
data_path = "train"
if is_test:
data_path = "test"
myFile = open('/Data/Potential'+data_path+'_labels.csv',"r")
labels = []
for row in myFile:
x = int(row.strip().split(",")[1])
labels.append(x)
myFile.close()
return np.asarray(labels)
y_train = label_data()
y_test = label_data(is_test=True)
print (datetime.now().strftime('%Y/%m/%d %H:%M:%S'),"Labels loaded !!")
#Image array data
Train_Images = pickle.load( open("/Data/train_compressed.txt","rb"))
Test_Images = pickle.load( open("/Data/test_compressed.txt","rb"))
train_items = Train_Images.items()
test_items = Test_Images.items()
print (datetime.now().strftime('%Y/%m/%d %H:%M:%S'),"Loading done! Train",len(train_items))
print (datetime.now().strftime('%Y/%m/%d %H:%M:%S'),"Loading done! Test",len(test_items))
#one hot vector transformation
def one_hot(y, n_labels):
mat = np.zeros((len(y), n_labels))
for i, val in enumerate(y):
mat[i, val] = 1
return mat
# Parameters
learning_rate = learning_rate_list[learning_rate_]
training_epochs = 20
batch_size = batch_sizer_list[batch_sizer]
display_step = 1
testbatch_size = 1000
totaltrain_batch = len(train_items)/batch_size
totaltest_batch = len(test_items)/testbatch_size
# Network Parameters
n_hidden_1 = hidden_neurons_list_I[hidden_neurons_I] # 1st layer number of neurons
n_hidden_2 = hidden_neurons_list_II[hidden_neurons_II] # 1st layer number of neurons
n_input = 256*256 # Data input (Image shape: 1024 * 1024)
n_classes = 36 # Bond_Count
# tf Graph input
X = tf.placeholder("float", [None, n_input])
Y = tf.placeholder("float", [None, n_classes])
# Store layers weight & bias
weights = {
'h1': tf.Variable(tf.random_normal([n_input, n_hidden_1])),
'h2': tf.Variable(tf.random_normal([n_hidden_1, n_hidden_2])),
'out': tf.Variable(tf.random_normal([n_hidden_2, n_classes]))
}
biases = {
'b1': tf.Variable(tf.random_normal([n_hidden_1])),
'b2': tf.Variable(tf.random_normal([n_hidden_2])),
'out': tf.Variable(tf.random_normal([n_classes]))
}
# Create model
def multilayer_perceptron(x):
# Fully Connected Hidden Layers
layer_1 = tf.add(tf.matmul(x, weights['h1']), biases['b1'])
layer_1 = tf.nn.relu(layer_1)
layer_2 = tf.add(tf.matmul(layer_1, weights['h2']), biases['b2'])
layer_2 = tf.nn.relu(layer_2)
# Output fully connected layer with a neuron for each class
out_layer = tf.matmul(layer_2, weights['out']) + biases['out']
return out_layer
# Construct model
logits = multilayer_perceptron(X)
# Define loss and optimizer
loss_op = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits_v2(logits=logits, labels=Y))
optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate)
train_op = optimizer.minimize(loss_op)
# Initializing the variables
init = tf.global_variables_initializer()
# encoding labels to one_hot vectors
y_data_enc = one_hot(y_train, n_classes)
y_test_enc = one_hot(y_test, n_classes)
# Evaluate model (with test logits, for dropout to be disabled)
correct_prediction = tf.equal(tf.argmax(logits, 1), tf.argmax(Y, 1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
# Evaluate the errors, mean,median and maximum errors
pred = tf.argmax(logits, 1)
pred_difference = tf.subtract(tf.argmax(Y, 1),tf.argmax(logits, 1))
mean_error=[]
median_error=[]
maximum_error=[]
#Initiating data for plots
loss_history = []
acc_history = []
valid_history = []
acc_valid_history = []
difference_history = []
test_loss_history = []
test_accuracy_history = []
print ("Data decompression for test batch started!")
#-----------------------------------------------------------------------------------------------------------------
print ("Total available threads for multiprocessing: ",multiprocessing.cpu_count())
#Decompressing Lines Test
def decomp_test(k):
strarraytest = (lz.decompress(Test_Images.values()[k]))
floatarray_test = np.fromstring(strarraytest, dtype=float, sep=',')
floatarray32_test = np.array(floatarray_test).astype(np.float32)
encoded_array_test=(1.0-floatarray32_test/255.0)
return encoded_array_test
pool_test = multiprocessing.Pool()
def decomp_train(j):
strarray = (lz.decompress(Train_Images.values()[j]))
floatarray = np.fromstring(strarray, dtype=float, sep=',')
floatarray32 = np.array(floatarray).astype(np.float32)
encoded_array=(1.0-floatarray32/255.0)
return encoded_array
pool_train = multiprocessing.Pool()
#Network training
print (datetime.now().strftime('%Y/%m/%d %H:%M:%S'),"Training Started")
config = tf.ConfigProto(allow_soft_placement=True)
config.gpu_options.allow_growth = True
config.gpu_options.allocator_type = 'BFC'
with tf.Session(config=config) as sess:
sess.run(init)
# Training cycle
for epoch in range(training_epochs):
avg_cost = 0
print ("total batch",totaltrain_batch)
counter=0
total_correct_preds = 0
Train_loss_per_batch = 0
# Loop over all batches
for l in range(totaltrain_batch):
print ("bathc",l)
print ("tests","count",counter,"batchsize",counter+batch_size)
train_batchX = pool_train.map(decomp_train,range(counter,counter+batch_size))
batch_x=train_batchX
batch_y=y_data_enc[counter:(counter+len(train_batchX))]
_, c = sess.run([train_op, loss_op], feed_dict={X: batch_x,Y: batch_y})
Train_loss_per_batch += c
#Validation and calculating training accuracy
_, accu_train = sess.run([loss_op, accuracy], feed_dict={X: batch_x,Y: batch_y})
valid_history.append(accu_train)
total_correct_preds += accu_train
print (datetime.now().strftime('%Y/%m/%d %H:%M:%S'),"train Accuracy:",accu_train)
print (datetime.now().strftime('%Y/%m/%d %H:%M:%S'),counter,"batch over")
counter += len(train_batchX)
validation_accuracy = total_correct_preds/totaltrain_batch
print (datetime.now().strftime('%Y/%m/%d %H:%M:%S'),"Train accuracy:",validation_accuracy)
acc_valid_history.append(validation_accuracy)
loss_history.append(Train_loss_per_batch/totaltrain_batch)
#Testing
counter_test = 0
All_test_loss = 0
All_error = 0
test_accuracy_perbatch = 0
for test_set in range(totaltest_batch):
X_test = pool_test.map(decomp_test,range(counter_test,counter_test+testbatch_size))
Y_test = y_test_enc[counter_test:(counter_test+len(X_test))]
test_acc = accuracy.eval({X: X_test, Y: Y_test})
print (datetime.now().strftime('%Y/%m/%d %H:%M:%S'),"Accuracy:", test_acc)
test_accuracy_perbatch += test_acc
test_loss_batch,predict,error = sess.run([loss_op,pred,pred_difference], feed_dict={X: X_test, Y: Y_test})
All_test_loss += test_loss_batch
All_error += error
#print(predict)
counter_test += len(X_test)
#Statistics
print (datetime.now().strftime('%Y/%m/%d %H:%M:%S'),"Final Test Accuracy:",test_accuracy_perbatch/totaltest_batch)
mean_error.append(np.absolute(np.mean(All_error/totaltest_batch)))
median_error.append(np.absolute(np.median(All_error/totaltest_batch)))
maximum_error.append(np.absolute(np.amax(All_error/totaltest_batch)))
test_loss_history.append(All_test_loss/totaltest_batch)
test_accuracy_history.append(test_accuracy_perbatch/totaltest_batch)
# Display logs per epoch step
if epoch % display_step == 0:
print (datetime.now().strftime('%Y/%m/%d %H:%M:%S'),"Epoch:", '%04d' % (epoch+1))
print (datetime.now().strftime('%Y/%m/%d %H:%M:%S'),"Optimization Finished!")
print (datetime.now().strftime('%Y/%m/%d %H:%M:%S'),"Network completed")
f.close()
pool_train.close()
# Final results for various bond counts
file_append = open('/Results/Final_Report.txt' , 'a+')
sys.stdout = file_append
print("\n---------------------------------------------------------------------------------------------------------------------------------------------------------------------\n")
print("Batch Size_{}_learning_rate_{}_hidden_neurons_{}_x_{}.txt".format(batch_sizer_list[batch_sizer],learning_rate_list[learning_rate_],hidden_neurons_list_I[hidden_neurons_I],hidden_neurons_list_II[hidden_neurons_II]))
print (datetime.now().strftime('%Y/%m/%d %H:%M:%S'),"Final Train accuracy:",validation_accuracy)
print (datetime.now().strftime('%Y/%m/%d %H:%M:%S'),"Final Test Accuracy:",test_accuracy_perbatch/totaltest_batch)
counter_test_x = 0
prediction_difference = 0
for testing in range(totaltest_batch):
X_test = pool_test.map(decomp_test,range(counter_test_x,counter_test_x+testbatch_size))
Y_test = y_test_enc[counter_test_x:(counter_test_x+len(X_test))]
_, predict,prediction_difference_batch = sess.run([loss_op,pred,pred_difference], feed_dict={X: X_test, Y: Y_test})
prediction_difference += prediction_difference_batch
counter_test_x += len(X_test)
prediction_window = np.absolute(prediction_difference)
pool_test.close()
for j in range(10):
count_error = 0
for i in prediction_window:
if i<=j:
count_error+=1
Window_accuracy = float(count_error)/len(test_items)*100
print("Currectly predicted bond count with error less than",j,"bonds, Accuracy ={:.2f}".format(Window_accuracy))
file_append.close()
#Matplot plot depiction
plt.subplot(3,1,1)
plt.plot(loss_history, '-o', label='Train Loss value')
plt.title('Training & Tesing Loss')
plt.xlabel('Epoch x Batches')
plt.ylabel('Loss Value')
plt.plot(test_loss_history, '-o', label='Test Loss value')
plt.xlabel('Epoch x Batches')
plt.ylabel('Loss Value')
plt.legend(ncol=2, loc='upper right')
plt.subplot(3,1,2)
plt.gca().set_ylim([0,1.0])
plt.plot(acc_valid_history, '-o', label='Train Accuracy value')
plt.plot(test_accuracy_history, '-o', label='Test Accuracy value')
#plt.plot(difference_history, '-o', label='Train-Test Accuracy')
plt.title('Train & Test Accuracy')
plt.xlabel('Batches')
plt.ylabel('Accuracy')
plt.legend(ncol=2, loc='lower right')
plt.subplot(3,1,3)
plt.plot(mean_error, '-o', label='Mean of error')
plt.plot(median_error, '-o', label='Median of error')
plt.plot(maximum_error, '-o', label='Maximum error')
plt.xlabel('Batches')
plt.ylabel('Error')
plt.legend(ncol=2, loc='lower right')
plt.gcf().set_size_inches(15, 30)
plt.savefig("/Results/Batch Size_{}_learning_rate_{}_hidden_neurons_{}_x_{}.png".format(batch_sizer_list[batch_sizer],learning_rate_list[learning_rate_],hidden_neurons_list_I[hidden_neurons_I],hidden_neurons_list_II[hidden_neurons_II]))
plt.close() | 41.046729 | 245 | 0.652095 |
import tensorflow as tf
import os
import sys
import numpy as np
import matplotlib as mpl
import csv
mpl.use('Agg')
import matplotlib.pyplot as plt
from datetime import datetime
from numpy import array
import pickle
import lz4.frame as lz
import multiprocessing
np.set_printoptions(threshold=np.nan)
os.environ["CUDA_DEVICE_ORDER"]="PCI_BUS_ID"
os.environ["CUDA_VISIBLE_DEVICES"]="0"
hidden_neurons_list_I = [2,4,8,16,32,64,128,512,1024,2048,4096]
hidden_neurons_list_II = [2,4,8,16,32,64,128,512,1024,2048,4096]
batch_sizer_list = [500,1000]
learning_rate_list = [0.001,0.003,0.005,0.007,0.009,0.01]
for hidden_neurons_I in range(len(hidden_neurons_list_I)):
for hidden_neurons_II in range(len(hidden_neurons_list_II)):
for batch_sizer in range(len(batch_sizer_list)):
for learning_rate_ in range(len(learning_rate_list)):
f = open("/Results/Batch Size_{}_learning_rate_{}_hidden_neurons_{}_x_{}.txt".format(batch_sizer_list[batch_sizer],learning_rate_list[learning_rate_],hidden_neurons_list_I[hidden_neurons_I],hidden_neurons_list_II[hidden_neurons_II]), 'w',0)
sys.stdout = f
print (datetime.now().strftime('%Y/%m/%d %H:%M:%S'),"Network Started")
def label_data(is_test=False):
data_path = "train"
if is_test:
data_path = "test"
myFile = open('/Data/Potential'+data_path+'_labels.csv',"r")
labels = []
for row in myFile:
x = int(row.strip().split(",")[1])
labels.append(x)
myFile.close()
return np.asarray(labels)
y_train = label_data()
y_test = label_data(is_test=True)
print (datetime.now().strftime('%Y/%m/%d %H:%M:%S'),"Labels loaded !!")
Train_Images = pickle.load( open("/Data/train_compressed.txt","rb"))
Test_Images = pickle.load( open("/Data/test_compressed.txt","rb"))
train_items = Train_Images.items()
test_items = Test_Images.items()
print (datetime.now().strftime('%Y/%m/%d %H:%M:%S'),"Loading done! Train",len(train_items))
print (datetime.now().strftime('%Y/%m/%d %H:%M:%S'),"Loading done! Test",len(test_items))
def one_hot(y, n_labels):
mat = np.zeros((len(y), n_labels))
for i, val in enumerate(y):
mat[i, val] = 1
return mat
learning_rate = learning_rate_list[learning_rate_]
training_epochs = 20
batch_size = batch_sizer_list[batch_sizer]
display_step = 1
testbatch_size = 1000
totaltrain_batch = len(train_items)/batch_size
totaltest_batch = len(test_items)/testbatch_size
n_hidden_1 = hidden_neurons_list_I[hidden_neurons_I]
n_hidden_2 = hidden_neurons_list_II[hidden_neurons_II]
n_input = 256*256
n_classes = 36
X = tf.placeholder("float", [None, n_input])
Y = tf.placeholder("float", [None, n_classes])
weights = {
'h1': tf.Variable(tf.random_normal([n_input, n_hidden_1])),
'h2': tf.Variable(tf.random_normal([n_hidden_1, n_hidden_2])),
'out': tf.Variable(tf.random_normal([n_hidden_2, n_classes]))
}
biases = {
'b1': tf.Variable(tf.random_normal([n_hidden_1])),
'b2': tf.Variable(tf.random_normal([n_hidden_2])),
'out': tf.Variable(tf.random_normal([n_classes]))
}
def multilayer_perceptron(x):
layer_1 = tf.add(tf.matmul(x, weights['h1']), biases['b1'])
layer_1 = tf.nn.relu(layer_1)
layer_2 = tf.add(tf.matmul(layer_1, weights['h2']), biases['b2'])
layer_2 = tf.nn.relu(layer_2)
out_layer = tf.matmul(layer_2, weights['out']) + biases['out']
return out_layer
logits = multilayer_perceptron(X)
loss_op = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits_v2(logits=logits, labels=Y))
optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate)
train_op = optimizer.minimize(loss_op)
init = tf.global_variables_initializer()
y_data_enc = one_hot(y_train, n_classes)
y_test_enc = one_hot(y_test, n_classes)
correct_prediction = tf.equal(tf.argmax(logits, 1), tf.argmax(Y, 1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
pred = tf.argmax(logits, 1)
pred_difference = tf.subtract(tf.argmax(Y, 1),tf.argmax(logits, 1))
mean_error=[]
median_error=[]
maximum_error=[]
loss_history = []
acc_history = []
valid_history = []
acc_valid_history = []
difference_history = []
test_loss_history = []
test_accuracy_history = []
print ("Data decompression for test batch started!")
print ("Total available threads for multiprocessing: ",multiprocessing.cpu_count())
def decomp_test(k):
strarraytest = (lz.decompress(Test_Images.values()[k]))
floatarray_test = np.fromstring(strarraytest, dtype=float, sep=',')
floatarray32_test = np.array(floatarray_test).astype(np.float32)
encoded_array_test=(1.0-floatarray32_test/255.0)
return encoded_array_test
pool_test = multiprocessing.Pool()
def decomp_train(j):
strarray = (lz.decompress(Train_Images.values()[j]))
floatarray = np.fromstring(strarray, dtype=float, sep=',')
floatarray32 = np.array(floatarray).astype(np.float32)
encoded_array=(1.0-floatarray32/255.0)
return encoded_array
pool_train = multiprocessing.Pool()
print (datetime.now().strftime('%Y/%m/%d %H:%M:%S'),"Training Started")
config = tf.ConfigProto(allow_soft_placement=True)
config.gpu_options.allow_growth = True
config.gpu_options.allocator_type = 'BFC'
with tf.Session(config=config) as sess:
sess.run(init)
for epoch in range(training_epochs):
avg_cost = 0
print ("total batch",totaltrain_batch)
counter=0
total_correct_preds = 0
Train_loss_per_batch = 0
for l in range(totaltrain_batch):
print ("bathc",l)
print ("tests","count",counter,"batchsize",counter+batch_size)
train_batchX = pool_train.map(decomp_train,range(counter,counter+batch_size))
batch_x=train_batchX
batch_y=y_data_enc[counter:(counter+len(train_batchX))]
_, c = sess.run([train_op, loss_op], feed_dict={X: batch_x,Y: batch_y})
Train_loss_per_batch += c
_, accu_train = sess.run([loss_op, accuracy], feed_dict={X: batch_x,Y: batch_y})
valid_history.append(accu_train)
total_correct_preds += accu_train
print (datetime.now().strftime('%Y/%m/%d %H:%M:%S'),"train Accuracy:",accu_train)
print (datetime.now().strftime('%Y/%m/%d %H:%M:%S'),counter,"batch over")
counter += len(train_batchX)
validation_accuracy = total_correct_preds/totaltrain_batch
print (datetime.now().strftime('%Y/%m/%d %H:%M:%S'),"Train accuracy:",validation_accuracy)
acc_valid_history.append(validation_accuracy)
loss_history.append(Train_loss_per_batch/totaltrain_batch)
counter_test = 0
All_test_loss = 0
All_error = 0
test_accuracy_perbatch = 0
for test_set in range(totaltest_batch):
X_test = pool_test.map(decomp_test,range(counter_test,counter_test+testbatch_size))
Y_test = y_test_enc[counter_test:(counter_test+len(X_test))]
test_acc = accuracy.eval({X: X_test, Y: Y_test})
print (datetime.now().strftime('%Y/%m/%d %H:%M:%S'),"Accuracy:", test_acc)
test_accuracy_perbatch += test_acc
test_loss_batch,predict,error = sess.run([loss_op,pred,pred_difference], feed_dict={X: X_test, Y: Y_test})
All_test_loss += test_loss_batch
All_error += error
counter_test += len(X_test)
print (datetime.now().strftime('%Y/%m/%d %H:%M:%S'),"Final Test Accuracy:",test_accuracy_perbatch/totaltest_batch)
mean_error.append(np.absolute(np.mean(All_error/totaltest_batch)))
median_error.append(np.absolute(np.median(All_error/totaltest_batch)))
maximum_error.append(np.absolute(np.amax(All_error/totaltest_batch)))
test_loss_history.append(All_test_loss/totaltest_batch)
test_accuracy_history.append(test_accuracy_perbatch/totaltest_batch)
if epoch % display_step == 0:
print (datetime.now().strftime('%Y/%m/%d %H:%M:%S'),"Epoch:", '%04d' % (epoch+1))
print (datetime.now().strftime('%Y/%m/%d %H:%M:%S'),"Optimization Finished!")
print (datetime.now().strftime('%Y/%m/%d %H:%M:%S'),"Network completed")
f.close()
pool_train.close()
file_append = open('/Results/Final_Report.txt' , 'a+')
sys.stdout = file_append
print("\n---------------------------------------------------------------------------------------------------------------------------------------------------------------------\n")
print("Batch Size_{}_learning_rate_{}_hidden_neurons_{}_x_{}.txt".format(batch_sizer_list[batch_sizer],learning_rate_list[learning_rate_],hidden_neurons_list_I[hidden_neurons_I],hidden_neurons_list_II[hidden_neurons_II]))
print (datetime.now().strftime('%Y/%m/%d %H:%M:%S'),"Final Train accuracy:",validation_accuracy)
print (datetime.now().strftime('%Y/%m/%d %H:%M:%S'),"Final Test Accuracy:",test_accuracy_perbatch/totaltest_batch)
counter_test_x = 0
prediction_difference = 0
for testing in range(totaltest_batch):
X_test = pool_test.map(decomp_test,range(counter_test_x,counter_test_x+testbatch_size))
Y_test = y_test_enc[counter_test_x:(counter_test_x+len(X_test))]
_, predict,prediction_difference_batch = sess.run([loss_op,pred,pred_difference], feed_dict={X: X_test, Y: Y_test})
prediction_difference += prediction_difference_batch
counter_test_x += len(X_test)
prediction_window = np.absolute(prediction_difference)
pool_test.close()
for j in range(10):
count_error = 0
for i in prediction_window:
if i<=j:
count_error+=1
Window_accuracy = float(count_error)/len(test_items)*100
print("Currectly predicted bond count with error less than",j,"bonds, Accuracy ={:.2f}".format(Window_accuracy))
file_append.close()
plt.subplot(3,1,1)
plt.plot(loss_history, '-o', label='Train Loss value')
plt.title('Training & Tesing Loss')
plt.xlabel('Epoch x Batches')
plt.ylabel('Loss Value')
plt.plot(test_loss_history, '-o', label='Test Loss value')
plt.xlabel('Epoch x Batches')
plt.ylabel('Loss Value')
plt.legend(ncol=2, loc='upper right')
plt.subplot(3,1,2)
plt.gca().set_ylim([0,1.0])
plt.plot(acc_valid_history, '-o', label='Train Accuracy value')
plt.plot(test_accuracy_history, '-o', label='Test Accuracy value')
plt.title('Train & Test Accuracy')
plt.xlabel('Batches')
plt.ylabel('Accuracy')
plt.legend(ncol=2, loc='lower right')
plt.subplot(3,1,3)
plt.plot(mean_error, '-o', label='Mean of error')
plt.plot(median_error, '-o', label='Median of error')
plt.plot(maximum_error, '-o', label='Maximum error')
plt.xlabel('Batches')
plt.ylabel('Error')
plt.legend(ncol=2, loc='lower right')
plt.gcf().set_size_inches(15, 30)
plt.savefig("/Results/Batch Size_{}_learning_rate_{}_hidden_neurons_{}_x_{}.png".format(batch_sizer_list[batch_sizer],learning_rate_list[learning_rate_],hidden_neurons_list_I[hidden_neurons_I],hidden_neurons_list_II[hidden_neurons_II]))
plt.close() | true | true |
f72fcf8390be1f9d3facd1c0666a534992e527a7 | 10,916 | py | Python | tensorflow_quantum/core/ops/batch_util_test.py | PyJedi/quantum | 3f4a3c320e048b8a8faf3a10339975d2d5366fb6 | [
"Apache-2.0"
] | 1 | 2020-06-01T01:28:36.000Z | 2020-06-01T01:28:36.000Z | tensorflow_quantum/core/ops/batch_util_test.py | PyJedi/quantum | 3f4a3c320e048b8a8faf3a10339975d2d5366fb6 | [
"Apache-2.0"
] | null | null | null | tensorflow_quantum/core/ops/batch_util_test.py | PyJedi/quantum | 3f4a3c320e048b8a8faf3a10339975d2d5366fb6 | [
"Apache-2.0"
] | 1 | 2020-06-07T01:28:01.000Z | 2020-06-07T01:28:01.000Z | # Copyright 2020 The TensorFlow Quantum Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Test parallel Cirq simulations."""
import numpy as np
import tensorflow as tf
from absl.testing import parameterized
from scipy import stats
import cirq
from tensorflow_quantum.core.ops import batch_util
from tensorflow_quantum.python import util
BATCH_SIZE = 12
N_QUBITS = 5
PAULI_LENGTH = 3
SYMBOLS = ['alpha', 'beta', 'gamma']
def _get_mixed_batch(qubits, symbols, size):
circuit1, resolver1 = util.random_circuit_resolver_batch(qubits, size // 2)
circuit2, resolver2 = util.random_symbol_circuit_resolver_batch(
qubits, symbols, size // 2)
return circuit1 + circuit2, resolver1 + resolver2
def _pad_state(sim, state, n):
if isinstance(sim, cirq.sim.sparse_simulator.Simulator):
state = state.final_state
if isinstance(sim, cirq.DensityMatrixSimulator):
state = state.final_density_matrix
return np.pad(state, (0, (1 << n) - state.shape[-1]),
'constant',
constant_values=-2)
def _expectation_helper(sim, circuit, params, op):
if isinstance(sim, cirq.sim.sparse_simulator.Simulator):
state = sim.simulate(circuit, params).final_state.astype(np.complex128)
return [
op.expectation_from_wavefunction(
state,
dict(
zip(sorted(circuit.all_qubits()),
(j for j in range(len(circuit.all_qubits())))))).real
]
if isinstance(sim, cirq.DensityMatrixSimulator):
state = sim.simulate(circuit, params).final_density_matrix
return [
sum(
x._expectation_from_density_matrix_no_validation(
state,
dict(
zip(sorted(circuit.all_qubits()), (
j
for j in range(len(circuit.all_qubits()))))))
for x in op)
]
return NotImplemented
def _sample_helper(sim, state, n_qubits, n_samples):
if isinstance(sim, cirq.sim.sparse_simulator.Simulator):
return cirq.sample_state_vector(state.final_state,
list(range(n_qubits)),
repetitions=n_samples)
if isinstance(sim, cirq.DensityMatrixSimulator):
return cirq.sample_density_matrix(state.final_density_matrix,
list(range(n_qubits)),
repetitions=n_samples)
return NotImplemented
class BatchUtilTest(tf.test.TestCase, parameterized.TestCase):
"""Test cases for BatchUtils main functions."""
@parameterized.parameters([{
'sim': cirq.DensityMatrixSimulator()
}, {
'sim': cirq.sim.sparse_simulator.Simulator()
}])
def test_batch_simulate_state(self, sim):
"""Test variable sized wavefunction output."""
circuit_batch, resolver_batch = _get_mixed_batch(
cirq.GridQubit.rect(1, N_QUBITS), SYMBOLS, BATCH_SIZE)
results = batch_util.batch_calculate_state(circuit_batch,
resolver_batch, sim)
for circuit, resolver, result in zip(circuit_batch, resolver_batch,
results):
r = _pad_state(sim, sim.simulate(circuit, resolver), N_QUBITS)
self.assertAllClose(r, result, rtol=1e-5, atol=1e-5)
self.assertDTypeEqual(results, np.complex64)
@parameterized.parameters([{
'sim': cirq.DensityMatrixSimulator()
}, {
'sim': cirq.sim.sparse_simulator.Simulator()
}])
def test_batch_expectation(self, sim):
"""Test expectation."""
qubits = cirq.GridQubit.rect(1, N_QUBITS)
circuit_batch, resolver_batch = _get_mixed_batch(
qubits + [cirq.GridQubit(9, 9)], SYMBOLS, BATCH_SIZE)
ops = util.random_pauli_sums(qubits, PAULI_LENGTH, BATCH_SIZE)
results = batch_util.batch_calculate_expectation(
circuit_batch, resolver_batch, [[x] for x in ops], sim)
for circuit, resolver, result, op in zip(circuit_batch, resolver_batch,
results, ops):
r = _expectation_helper(sim, circuit, resolver, op)
self.assertAllClose(r, result, rtol=1e-5, atol=1e-5)
self.assertDTypeEqual(results, np.float32)
@parameterized.parameters([{
'sim': cirq.DensityMatrixSimulator()
}, {
'sim': cirq.sim.sparse_simulator.Simulator()
}])
def test_batch_sampled_expectation(self, sim):
"""Test expectation."""
qubits = cirq.GridQubit.rect(1, N_QUBITS)
circuit_batch, resolver_batch = _get_mixed_batch(
qubits + [cirq.GridQubit(9, 9)], SYMBOLS, BATCH_SIZE)
ops = util.random_pauli_sums(qubits, PAULI_LENGTH, BATCH_SIZE)
n_samples = [[1000] for _ in range(len(ops))]
results = batch_util.batch_calculate_sampled_expectation(
circuit_batch, resolver_batch, [[x] for x in ops], n_samples, sim)
for circuit, resolver, result, op in zip(circuit_batch, resolver_batch,
results, ops):
r = _expectation_helper(sim, circuit, resolver, op)
self.assertAllClose(r, result, rtol=1.0, atol=1e-1)
self.assertDTypeEqual(results, np.float32)
@parameterized.parameters([{
'sim': cirq.DensityMatrixSimulator()
}, {
'sim': cirq.sim.sparse_simulator.Simulator()
}])
def test_batch_sample_basic(self, sim):
"""Test sampling."""
n_samples = 1
n_qubits = 8
qubits = cirq.GridQubit.rect(1, n_qubits)
circuit = cirq.Circuit(*cirq.Z.on_each(*qubits[:n_qubits // 2]),
*cirq.X.on_each(*qubits[n_qubits // 2:]))
test_results = batch_util.batch_sample([circuit],
[cirq.ParamResolver({})],
n_samples, sim)
state = sim.simulate(circuit, cirq.ParamResolver({}))
expected_results = _sample_helper(sim, state, len(qubits), n_samples)
self.assertAllEqual(expected_results, test_results[0])
self.assertDTypeEqual(test_results, np.int32)
@parameterized.parameters([{
'sim': cirq.DensityMatrixSimulator()
}, {
'sim': cirq.sim.sparse_simulator.Simulator()
}])
def test_batch_sample(self, sim):
"""Test sampling."""
n_samples = 2000 * (2**N_QUBITS)
circuit_batch, resolver_batch = _get_mixed_batch(
cirq.GridQubit.rect(1, N_QUBITS), SYMBOLS, BATCH_SIZE)
results = batch_util.batch_sample(circuit_batch, resolver_batch,
n_samples, sim)
tfq_histograms = []
for r in results:
tfq_histograms.append(
np.histogram(r.dot(1 << np.arange(r.shape[-1] - 1, -1, -1)),
range=(0, 2**N_QUBITS),
bins=2**N_QUBITS)[0])
cirq_histograms = []
for circuit, resolver in zip(circuit_batch, resolver_batch):
state = sim.simulate(circuit, resolver)
r = _sample_helper(sim, state, len(circuit.all_qubits()), n_samples)
cirq_histograms.append(
np.histogram(r.dot(1 << np.arange(r.shape[-1] - 1, -1, -1)),
range=(0, 2**N_QUBITS),
bins=2**N_QUBITS)[0])
for a, b in zip(tfq_histograms, cirq_histograms):
self.assertLess(stats.entropy(a + 1e-8, b + 1e-8), 0.005)
self.assertDTypeEqual(results, np.int32)
@parameterized.parameters([{
'sim': cirq.DensityMatrixSimulator()
}, {
'sim': cirq.sim.sparse_simulator.Simulator()
}])
def test_empty_circuits(self, sim):
"""Test functions with empty circuits."""
# Common preparation
resolver_batch = [cirq.ParamResolver({}) for _ in range(BATCH_SIZE)]
circuit_batch = [cirq.Circuit() for _ in range(BATCH_SIZE)]
qubits = cirq.GridQubit.rect(1, N_QUBITS)
ops = util.random_pauli_sums(qubits, PAULI_LENGTH, BATCH_SIZE)
n_samples = [[1000] for _ in range(len(ops))]
# If there is no op on a qubit, the expectation answer is -2.0
true_expectation = (-2.0,)
# (1) Test expectation
results = batch_util.batch_calculate_expectation(
circuit_batch, resolver_batch, [[x] for x in ops], sim)
for _, _, result, _ in zip(circuit_batch, resolver_batch, results, ops):
self.assertAllClose(true_expectation, result, rtol=1e-5, atol=1e-5)
self.assertDTypeEqual(results, np.float32)
# (2) Test sampled_expectation
results = batch_util.batch_calculate_sampled_expectation(
circuit_batch, resolver_batch, [[x] for x in ops], n_samples, sim)
for _, _, result, _ in zip(circuit_batch, resolver_batch, results, ops):
self.assertAllClose(true_expectation, result, rtol=1.0, atol=1e-1)
self.assertDTypeEqual(results, np.float32)
# (3) Test state
results = batch_util.batch_calculate_state(circuit_batch,
resolver_batch, sim)
for circuit, resolver, result in zip(circuit_batch, resolver_batch,
results):
r = _pad_state(sim, sim.simulate(circuit, resolver), 0)
self.assertAllClose(r, result, rtol=1e-5, atol=1e-5)
self.assertDTypeEqual(results, np.complex64)
# (4) Test sampling
n_samples = 2000 * (2**N_QUBITS)
results = batch_util.batch_sample(circuit_batch, resolver_batch,
n_samples, sim)
for circuit, resolver, a in zip(circuit_batch, resolver_batch, results):
state = sim.simulate(circuit, resolver)
r = _sample_helper(sim, state, len(circuit.all_qubits()), n_samples)
self.assertAllClose(r, a, atol=1e-5)
self.assertDTypeEqual(results, np.int32)
if __name__ == '__main__':
tf.test.main()
| 39.839416 | 80 | 0.600861 |
import numpy as np
import tensorflow as tf
from absl.testing import parameterized
from scipy import stats
import cirq
from tensorflow_quantum.core.ops import batch_util
from tensorflow_quantum.python import util
BATCH_SIZE = 12
N_QUBITS = 5
PAULI_LENGTH = 3
SYMBOLS = ['alpha', 'beta', 'gamma']
def _get_mixed_batch(qubits, symbols, size):
circuit1, resolver1 = util.random_circuit_resolver_batch(qubits, size // 2)
circuit2, resolver2 = util.random_symbol_circuit_resolver_batch(
qubits, symbols, size // 2)
return circuit1 + circuit2, resolver1 + resolver2
def _pad_state(sim, state, n):
if isinstance(sim, cirq.sim.sparse_simulator.Simulator):
state = state.final_state
if isinstance(sim, cirq.DensityMatrixSimulator):
state = state.final_density_matrix
return np.pad(state, (0, (1 << n) - state.shape[-1]),
'constant',
constant_values=-2)
def _expectation_helper(sim, circuit, params, op):
if isinstance(sim, cirq.sim.sparse_simulator.Simulator):
state = sim.simulate(circuit, params).final_state.astype(np.complex128)
return [
op.expectation_from_wavefunction(
state,
dict(
zip(sorted(circuit.all_qubits()),
(j for j in range(len(circuit.all_qubits())))))).real
]
if isinstance(sim, cirq.DensityMatrixSimulator):
state = sim.simulate(circuit, params).final_density_matrix
return [
sum(
x._expectation_from_density_matrix_no_validation(
state,
dict(
zip(sorted(circuit.all_qubits()), (
j
for j in range(len(circuit.all_qubits()))))))
for x in op)
]
return NotImplemented
def _sample_helper(sim, state, n_qubits, n_samples):
if isinstance(sim, cirq.sim.sparse_simulator.Simulator):
return cirq.sample_state_vector(state.final_state,
list(range(n_qubits)),
repetitions=n_samples)
if isinstance(sim, cirq.DensityMatrixSimulator):
return cirq.sample_density_matrix(state.final_density_matrix,
list(range(n_qubits)),
repetitions=n_samples)
return NotImplemented
class BatchUtilTest(tf.test.TestCase, parameterized.TestCase):
@parameterized.parameters([{
'sim': cirq.DensityMatrixSimulator()
}, {
'sim': cirq.sim.sparse_simulator.Simulator()
}])
def test_batch_simulate_state(self, sim):
circuit_batch, resolver_batch = _get_mixed_batch(
cirq.GridQubit.rect(1, N_QUBITS), SYMBOLS, BATCH_SIZE)
results = batch_util.batch_calculate_state(circuit_batch,
resolver_batch, sim)
for circuit, resolver, result in zip(circuit_batch, resolver_batch,
results):
r = _pad_state(sim, sim.simulate(circuit, resolver), N_QUBITS)
self.assertAllClose(r, result, rtol=1e-5, atol=1e-5)
self.assertDTypeEqual(results, np.complex64)
@parameterized.parameters([{
'sim': cirq.DensityMatrixSimulator()
}, {
'sim': cirq.sim.sparse_simulator.Simulator()
}])
def test_batch_expectation(self, sim):
qubits = cirq.GridQubit.rect(1, N_QUBITS)
circuit_batch, resolver_batch = _get_mixed_batch(
qubits + [cirq.GridQubit(9, 9)], SYMBOLS, BATCH_SIZE)
ops = util.random_pauli_sums(qubits, PAULI_LENGTH, BATCH_SIZE)
results = batch_util.batch_calculate_expectation(
circuit_batch, resolver_batch, [[x] for x in ops], sim)
for circuit, resolver, result, op in zip(circuit_batch, resolver_batch,
results, ops):
r = _expectation_helper(sim, circuit, resolver, op)
self.assertAllClose(r, result, rtol=1e-5, atol=1e-5)
self.assertDTypeEqual(results, np.float32)
@parameterized.parameters([{
'sim': cirq.DensityMatrixSimulator()
}, {
'sim': cirq.sim.sparse_simulator.Simulator()
}])
def test_batch_sampled_expectation(self, sim):
qubits = cirq.GridQubit.rect(1, N_QUBITS)
circuit_batch, resolver_batch = _get_mixed_batch(
qubits + [cirq.GridQubit(9, 9)], SYMBOLS, BATCH_SIZE)
ops = util.random_pauli_sums(qubits, PAULI_LENGTH, BATCH_SIZE)
n_samples = [[1000] for _ in range(len(ops))]
results = batch_util.batch_calculate_sampled_expectation(
circuit_batch, resolver_batch, [[x] for x in ops], n_samples, sim)
for circuit, resolver, result, op in zip(circuit_batch, resolver_batch,
results, ops):
r = _expectation_helper(sim, circuit, resolver, op)
self.assertAllClose(r, result, rtol=1.0, atol=1e-1)
self.assertDTypeEqual(results, np.float32)
@parameterized.parameters([{
'sim': cirq.DensityMatrixSimulator()
}, {
'sim': cirq.sim.sparse_simulator.Simulator()
}])
def test_batch_sample_basic(self, sim):
n_samples = 1
n_qubits = 8
qubits = cirq.GridQubit.rect(1, n_qubits)
circuit = cirq.Circuit(*cirq.Z.on_each(*qubits[:n_qubits // 2]),
*cirq.X.on_each(*qubits[n_qubits // 2:]))
test_results = batch_util.batch_sample([circuit],
[cirq.ParamResolver({})],
n_samples, sim)
state = sim.simulate(circuit, cirq.ParamResolver({}))
expected_results = _sample_helper(sim, state, len(qubits), n_samples)
self.assertAllEqual(expected_results, test_results[0])
self.assertDTypeEqual(test_results, np.int32)
@parameterized.parameters([{
'sim': cirq.DensityMatrixSimulator()
}, {
'sim': cirq.sim.sparse_simulator.Simulator()
}])
def test_batch_sample(self, sim):
n_samples = 2000 * (2**N_QUBITS)
circuit_batch, resolver_batch = _get_mixed_batch(
cirq.GridQubit.rect(1, N_QUBITS), SYMBOLS, BATCH_SIZE)
results = batch_util.batch_sample(circuit_batch, resolver_batch,
n_samples, sim)
tfq_histograms = []
for r in results:
tfq_histograms.append(
np.histogram(r.dot(1 << np.arange(r.shape[-1] - 1, -1, -1)),
range=(0, 2**N_QUBITS),
bins=2**N_QUBITS)[0])
cirq_histograms = []
for circuit, resolver in zip(circuit_batch, resolver_batch):
state = sim.simulate(circuit, resolver)
r = _sample_helper(sim, state, len(circuit.all_qubits()), n_samples)
cirq_histograms.append(
np.histogram(r.dot(1 << np.arange(r.shape[-1] - 1, -1, -1)),
range=(0, 2**N_QUBITS),
bins=2**N_QUBITS)[0])
for a, b in zip(tfq_histograms, cirq_histograms):
self.assertLess(stats.entropy(a + 1e-8, b + 1e-8), 0.005)
self.assertDTypeEqual(results, np.int32)
@parameterized.parameters([{
'sim': cirq.DensityMatrixSimulator()
}, {
'sim': cirq.sim.sparse_simulator.Simulator()
}])
def test_empty_circuits(self, sim):
resolver_batch = [cirq.ParamResolver({}) for _ in range(BATCH_SIZE)]
circuit_batch = [cirq.Circuit() for _ in range(BATCH_SIZE)]
qubits = cirq.GridQubit.rect(1, N_QUBITS)
ops = util.random_pauli_sums(qubits, PAULI_LENGTH, BATCH_SIZE)
n_samples = [[1000] for _ in range(len(ops))]
true_expectation = (-2.0,)
results = batch_util.batch_calculate_expectation(
circuit_batch, resolver_batch, [[x] for x in ops], sim)
for _, _, result, _ in zip(circuit_batch, resolver_batch, results, ops):
self.assertAllClose(true_expectation, result, rtol=1e-5, atol=1e-5)
self.assertDTypeEqual(results, np.float32)
results = batch_util.batch_calculate_sampled_expectation(
circuit_batch, resolver_batch, [[x] for x in ops], n_samples, sim)
for _, _, result, _ in zip(circuit_batch, resolver_batch, results, ops):
self.assertAllClose(true_expectation, result, rtol=1.0, atol=1e-1)
self.assertDTypeEqual(results, np.float32)
results = batch_util.batch_calculate_state(circuit_batch,
resolver_batch, sim)
for circuit, resolver, result in zip(circuit_batch, resolver_batch,
results):
r = _pad_state(sim, sim.simulate(circuit, resolver), 0)
self.assertAllClose(r, result, rtol=1e-5, atol=1e-5)
self.assertDTypeEqual(results, np.complex64)
n_samples = 2000 * (2**N_QUBITS)
results = batch_util.batch_sample(circuit_batch, resolver_batch,
n_samples, sim)
for circuit, resolver, a in zip(circuit_batch, resolver_batch, results):
state = sim.simulate(circuit, resolver)
r = _sample_helper(sim, state, len(circuit.all_qubits()), n_samples)
self.assertAllClose(r, a, atol=1e-5)
self.assertDTypeEqual(results, np.int32)
if __name__ == '__main__':
tf.test.main()
| true | true |
f72fcf8e506902e300f338f0524ddabfe7e97eb9 | 13,031 | py | Python | watertap/core/zero_order_properties.py | jalving/watertap | a89bd61deaaca9c30402727545e8223a276c93e6 | [
"BSD-3-Clause-LBNL"
] | null | null | null | watertap/core/zero_order_properties.py | jalving/watertap | a89bd61deaaca9c30402727545e8223a276c93e6 | [
"BSD-3-Clause-LBNL"
] | null | null | null | watertap/core/zero_order_properties.py | jalving/watertap | a89bd61deaaca9c30402727545e8223a276c93e6 | [
"BSD-3-Clause-LBNL"
] | null | null | null | ###############################################################################
# WaterTAP Copyright (c) 2021, The Regents of the University of California,
# through Lawrence Berkeley National Laboratory, Oak Ridge National
# Laboratory, National Renewable Energy Laboratory, and National Energy
# Technology Laboratory (subject to receipt of any required approvals from
# the U.S. Dept. of Energy). All rights reserved.
#
# Please see the files COPYRIGHT.md and LICENSE.md for full copyright and license
# information, respectively. These files are also available online at the URL
# "https://github.com/watertap-org/watertap/"
#
###############################################################################
"""
This module contains the general purpose property package for zero-order
unit models. Zero-order models do not track temperature and pressure, or any
form of energy flow.
"""
from idaes.core import (EnergyBalanceType,
MaterialBalanceType,
MaterialFlowBasis,
PhysicalParameterBlock,
StateBlock,
StateBlockData,
declare_process_block_class)
from idaes.core.components import Solvent, Solute
from idaes.core.phases import LiquidPhase
from idaes.core.util.misc import add_object_reference
from idaes.core.util.initialization import fix_state_vars, revert_state_vars
import idaes.logger as idaeslog
import idaes.core.util.scaling as iscale
from idaes.core.util.exceptions import ConfigurationError
from pyomo.environ import (Expression,
Param,
PositiveReals,
units as pyunits,
Var)
from pyomo.common.config import ConfigValue
# Some more inforation about this module
__author__ = "Andrew Lee"
# Set up logger
_log = idaeslog.getLogger(__name__)
@declare_process_block_class("WaterParameterBlock")
class WaterParameterBlockData(PhysicalParameterBlock):
"""
Property Parameter Block Class
Defines component and phase lists, along with base units and constant
parameters.
"""
CONFIG = PhysicalParameterBlock.CONFIG()
CONFIG.declare('database', ConfigValue(
description='An instance of a WaterTAP Database to use for parameters.'
))
CONFIG.declare('water_source', ConfigValue(
description=
'Water source to use when looking up parameters from database.'))
CONFIG.declare("solute_list", ConfigValue(
domain=list,
description="List of solute species of interest. If None, will use "
"all species defined in the water_source provided."))
def build(self):
'''
Callable method for Block construction.
'''
super().build()
self._state_block_class = WaterStateBlock
self.Liq = LiquidPhase()
self.H2O = Solvent()
# Get component set from database if provided
comp_set = None
if self.config.database is not None:
comp_set = self.config.database.get_solute_set(
self.config.water_source)
# Check definition of solute list
solute_list = self.config.solute_list
if solute_list is None:
# No user-provided solute list, look up list from database
if comp_set is None:
# No solute list in database and none provided.
raise ConfigurationError(
f"{self.name} no solute_list or database was defined. "
f"Users must provide at least one of these arguments.")
else:
solute_list = comp_set
elif self.config.database is not None:
# User provided custom list and database - check that all
# components are supported
for j in solute_list:
if j not in comp_set:
_log.info(f"{self.name} component {j} is not defined in "
f"the water_sources database file.")
else:
# User provided list but no database - assume they know what they
# are doing
pass
for j in solute_list:
self.add_component(str(j), Solute())
# Define default value for mass density of solution
self.dens_mass_default = 1000*pyunits.kg/pyunits.m**3
# Define default value for dynamic viscosity of solution
self.visc_d_default = 0.001*pyunits.kg/pyunits.m/pyunits.s
# ---------------------------------------------------------------------
# Set default scaling factors
self.default_scaling_factor = {
("flow_vol"): 1e3,
("conc_mass_comp"): 1e2}
@classmethod
def define_metadata(cls, obj):
obj.add_default_units({
'time': pyunits.s,
'length': pyunits.m,
'mass': pyunits.kg,
'amount': pyunits.mol,
'temperature': pyunits.K,
})
obj.add_properties(
{'flow_mass_comp': {'method': None},
'flow_vol': {'method': '_flow_vol'},
'conc_mass_comp': {'method': '_conc_mass_comp'},
'dens_mass': {'method': '_dens_mass'},
'visc_d': {'method': '_visc_d'}})
class _WaterStateBlock(StateBlock):
"""
This Class contains methods which should be applied to Property Blocks as a
whole, rather than individual elements of indexed Property Blocks.
"""
def initialize(blk,
state_args=None,
state_vars_fixed=False,
hold_state=False,
outlvl=idaeslog.NOTSET,
solver=None,
optarg=None):
'''
Initialization routine for property package.
Keyword Arguments:
state_args : Dictionary with initial guesses for the state vars
chosen. Note that if this method is triggered
through the control volume, and if initial guesses
were not provied at the unit model level, the
control volume passes the inlet values as initial
guess.The keys for the state_args dictionary are:
flow_mol_comp : value at which to initialize component
flows (default=None)
pressure : value at which to initialize pressure
(default=None)
temperature : value at which to initialize temperature
(default=None)
outlvl : sets output level of initialization routine
state_vars_fixed: Flag to denote if state vars have already been
fixed.
- True - states have already been fixed and
initialization does not need to worry
about fixing and unfixing variables.
- False - states have not been fixed. The state
block will deal with fixing/unfixing.
optarg : solver options dictionary object (default=None, use
default solver options)
solver : str indicating which solver to use during
initialization (default = None, use default solver)
hold_state : flag indicating whether the initialization routine
should unfix any state variables fixed during
initialization (default=False).
- True - states varaibles are not unfixed, and
a dict of returned containing flags for
which states were fixed during
initialization.
- False - state variables are unfixed after
initialization by calling the
relase_state method
Returns:
If hold_states is True, returns a dict containing flags for
which states were fixed during initialization.
'''
# For now, there are no ocnstraints in the property package, so only
# fix state variables if required
init_log = idaeslog.getInitLogger(blk.name, outlvl, tag="properties")
init_log.info('Initialization Complete.')
if hold_state is True:
flags = fix_state_vars(blk, state_args)
return flags
else:
return
def release_state(blk, flags, outlvl=idaeslog.NOTSET):
'''
Method to release state variables fixed during initialization.
Keyword Arguments:
flags : dict containing information of which state variables
were fixed during initialization, and should now be
unfixed. This dict is returned by initialize if
hold_state=True.
outlvl : sets output level of of logging
'''
init_log = idaeslog.getInitLogger(blk.name, outlvl, tag="properties")
if flags is None:
return
# Unfix state variables
revert_state_vars(blk, flags)
init_log.info('State Released.')
@declare_process_block_class("WaterStateBlock",
block_class=_WaterStateBlock)
class WaterStateBlockData(StateBlockData):
"""
General purpose StateBlock for Zero-Order unit models.
"""
def build(self):
super().build()
# Create state variables
self.flow_mass_comp = Var(self.component_list,
initialize=1,
domain=PositiveReals,
doc='Mass flowrate of each component',
units=pyunits.kg/pyunits.s)
# -------------------------------------------------------------------------
# Other properties
def _conc_mass_comp(self):
def rule_cmc(blk, j):
return (blk.flow_mass_comp[j] /
sum(self.flow_mass_comp[k] for k in self.component_list) *
blk.dens_mass)
self.conc_mass_comp = Expression(self.component_list,
rule=rule_cmc)
def _dens_mass(self):
self.dens_mass = Param(initialize=self.params.dens_mass_default,
units=pyunits.kg/pyunits.m**3,
mutable=True,
doc="Mass density of flow")
def _flow_vol(self):
self.flow_vol = Expression(
expr=sum(self.flow_mass_comp[j] for j in self.component_list) /
self.dens_mass)
def _visc_d(self):
self.visc_d = Param(initialize=self.params.visc_d_default,
units=pyunits.kg/pyunits.m/pyunits.s,
mutable=True,
doc="Dynamic viscosity of solution")
def get_material_flow_terms(blk, p, j):
return blk.flow_mass_comp[j]
def get_enthalpy_flow_terms(blk, p):
raise NotImplementedError
def get_material_density_terms(blk, p, j):
return blk.conc_mass_comp[j]
def get_energy_density_terms(blk, p):
raise NotImplementedError
def default_material_balance_type(self):
return MaterialBalanceType.componentTotal
def default_energy_balance_type(self):
return EnergyBalanceType.none
def define_state_vars(blk):
return {"flow_mass_comp": blk.flow_mass_comp}
def define_display_vars(blk):
return {"Volumetric Flowrate": blk.flow_vol,
"Mass Concentration": blk.conc_mass_comp}
def get_material_flow_basis(blk):
return MaterialFlowBasis.mass
def calculate_scaling_factors(self):
# Get default scale factors and do calculations from base classes
super().calculate_scaling_factors()
d_sf_Q = self.params.default_scaling_factor["flow_vol"]
d_sf_c = self.params.default_scaling_factor["conc_mass_comp"]
for j, v in self.flow_mass_comp.items():
if iscale.get_scaling_factor(v) is None:
iscale.set_scaling_factor(v, d_sf_Q*d_sf_c)
if self.is_property_constructed("flow_vol"):
if iscale.get_scaling_factor(self.flow_vol) is None:
iscale.set_scaling_factor(self.flow_vol, d_sf_Q)
if self.is_property_constructed("conc_mass_comp"):
for j, v in self.conc_mass_comp.items():
sf_c = iscale.get_scaling_factor(self.conc_mass_comp[j])
if sf_c is None:
try:
sf_c = self.params.default_scaling_factor[
("conc_mass_comp", j)]
except KeyError:
sf_c = d_sf_c
iscale.set_scaling_factor(self.conc_mass_comp[j], sf_c)
| 39.728659 | 81 | 0.578927 | :
raise NotImplementedError
def get_material_density_terms(blk, p, j):
return blk.conc_mass_comp[j]
def get_energy_density_terms(blk, p):
raise NotImplementedError
def default_material_balance_type(self):
return MaterialBalanceType.componentTotal
def default_energy_balance_type(self):
return EnergyBalanceType.none
def define_state_vars(blk):
return {"flow_mass_comp": blk.flow_mass_comp}
def define_display_vars(blk):
return {"Volumetric Flowrate": blk.flow_vol,
"Mass Concentration": blk.conc_mass_comp}
def get_material_flow_basis(blk):
return MaterialFlowBasis.mass
def calculate_scaling_factors(self):
super().calculate_scaling_factors()
d_sf_Q = self.params.default_scaling_factor["flow_vol"]
d_sf_c = self.params.default_scaling_factor["conc_mass_comp"]
for j, v in self.flow_mass_comp.items():
if iscale.get_scaling_factor(v) is None:
iscale.set_scaling_factor(v, d_sf_Q*d_sf_c)
if self.is_property_constructed("flow_vol"):
if iscale.get_scaling_factor(self.flow_vol) is None:
iscale.set_scaling_factor(self.flow_vol, d_sf_Q)
if self.is_property_constructed("conc_mass_comp"):
for j, v in self.conc_mass_comp.items():
sf_c = iscale.get_scaling_factor(self.conc_mass_comp[j])
if sf_c is None:
try:
sf_c = self.params.default_scaling_factor[
("conc_mass_comp", j)]
except KeyError:
sf_c = d_sf_c
iscale.set_scaling_factor(self.conc_mass_comp[j], sf_c)
| true | true |
f72fcfcd3dc525e47916c5740040252f6d957d98 | 5,279 | py | Python | py/jupyterlite/src/jupyterlite/config.py | marimeireles/jupyterlite | 65c9304cf89d311b8a48f227a0cbb2b7f50cf4bd | [
"BSD-3-Clause"
] | null | null | null | py/jupyterlite/src/jupyterlite/config.py | marimeireles/jupyterlite | 65c9304cf89d311b8a48f227a0cbb2b7f50cf4bd | [
"BSD-3-Clause"
] | null | null | null | py/jupyterlite/src/jupyterlite/config.py | marimeireles/jupyterlite | 65c9304cf89d311b8a48f227a0cbb2b7f50cf4bd | [
"BSD-3-Clause"
] | null | null | null | """an observable configuration object for the JupyterLite lifecycle
.. todo::
Move to a canonical JSON schema?
"""
import os
from pathlib import Path
from typing import Optional as _Optional
from typing import Text as _Text
from typing import Tuple as _Tuple
from traitlets import CInt, Tuple, Unicode, default
from traitlets.config import LoggingConfigurable
from . import constants as C
from .trait_types import CPath, TypedTuple
class LiteBuildConfig(LoggingConfigurable):
"""the description of a JupyterLite build
This is most likely to be configured:
- from environment variables
- in a `pyproject.toml`
- from the command line
With direct instantiation a distant last place.
This is conceptually similar in scale to `jupyter_server_config.json`, and will
piggy-back off of the `{sys.prefix}/share/jupyter_{notebook,server}_config.d/`
loader paths
"""
disable_addons: _Tuple[_Text] = TypedTuple(
Unicode(),
help=("skip loading `entry_point` for these addons. TODO: should be a dict"),
).tag(config=True)
apps: _Tuple[_Text] = TypedTuple(
Unicode(),
help=(
f"""the Lite apps: currently {C.JUPYTERLITE_APPS}. """
f"""Required: {C.JUPYTERLITE_APPS_REQUIRED}"""
),
).tag(config=True)
app_archive: Path = CPath(
help=("The app archive to use. env: JUPYTERLITE_APP_ARCHIVE")
).tag(config=True)
lite_dir: Path = CPath(
help=("The root folder of a JupyterLite project. env: JUPYTERLITE_DIR")
).tag(config=True)
output_dir: Path = CPath(
help=("Where to build the JupyterLite site. env: JUPYTERLITE_OUTPUT_DIR")
).tag(config=True)
output_archive: Path = CPath(
help=("Archive to create. env: JUPYTERLITE_OUTPUT_ARCHIVE")
).tag(config=True)
files: _Tuple[Path] = TypedTuple(
CPath(), help="Files to add and index as Jupyter Contents"
).tag(config=True)
overrides: _Tuple[_Text] = TypedTuple(
CPath(), help=("Specific overrides.json to include")
).tag(config=True)
# serving
port: int = CInt(
help=(
"[serve] the port to (insecurely) expose on http://127.0.0.1."
" env: JUPYTERLITE_PORT"
)
).tag(config=True)
base_url: str = Unicode(
help=("[serve] the prefix to use." " env: JUPYTERLITE_BASE_URL")
).tag(config=True)
# patterns
ignore_files: _Tuple[_Text] = Tuple(
help="Path patterns that should never be included"
).tag(config=True)
source_date_epoch: _Optional[int] = CInt(
allow_none=True,
min=1,
help="Trigger reproducible builds, clamping timestamps to this value",
).tag(config=True)
@default("apps")
def _default_apps(self):
return C.JUPYTERLITE_APPS
@default("disable_addons")
def _default_disable_addons(self):
"""the addons that are disabled by default."""
return []
@default("output_dir")
def _default_output_dir(self):
return Path(
os.environ.get("JUPYTERLITE_OUTPUT_DIR")
or self.lite_dir / C.DEFAULT_OUTPUT_DIR
)
@default("lite_dir")
def _default_lite_dir(self):
return Path(os.environ.get("JUPYTERLITE_DIR", Path.cwd()))
@default("files")
def _default_files(self):
lite_files = self.lite_dir / "files"
if lite_files.is_dir():
return [lite_files]
return []
@default("overrides")
def _default_overrides(self):
all_overrides = []
for app in [None, *self.apps]:
app_dir = self.lite_dir / app if app else self.lite_dir
overrides_json = app_dir / C.OVERRIDES_JSON
if overrides_json.exists():
all_overrides += [overrides_json]
return all_overrides
@default("ignore_files")
def _default_ignore_files(self):
return [
".*\.pyc",
"/\.git/",
"/\.gitignore",
"/\.ipynb_checkpoints/",
"/build/",
"/lib/",
"/dist/",
".*doit.db",
"/node_modules/",
"/envs/",
"/venvs/",
"/\.env",
C.JUPYTERLITE_JSON.replace(".", "\\."),
C.JUPYTERLITE_IPYNB.replace(".", "\\."),
"untitled.*",
"Untitled.*",
f"/{self.output_dir.name}/",
]
@default("app_archive")
def _default_app_archive(self):
return Path(os.environ.get("JUPYTERLITE_APP_ARCHIVE") or C.DEFAULT_APP_ARCHIVE)
@default("output_archive")
def _default_output_archive(self):
return Path(
os.environ.get("JUPYTERLITE_OUTPUT_ARCHIVE")
or self.output_dir / f"{self.lite_dir.name}-jupyterlite.tgz"
)
@default("source_date_epoch")
def _default_source_date_epoch(self):
if C.SOURCE_DATE_EPOCH not in os.environ:
return None
sde = int(os.environ[C.SOURCE_DATE_EPOCH])
return sde
@default("port")
def _default_port(self):
return int(os.environ.get("JUPYTERLITE_PORT", 8000))
@default("base_url")
def _default_base_url(self):
return os.environ.get("JUPYTERLITE_BASE_URL", "/")
| 29.327778 | 87 | 0.614321 | import os
from pathlib import Path
from typing import Optional as _Optional
from typing import Text as _Text
from typing import Tuple as _Tuple
from traitlets import CInt, Tuple, Unicode, default
from traitlets.config import LoggingConfigurable
from . import constants as C
from .trait_types import CPath, TypedTuple
class LiteBuildConfig(LoggingConfigurable):
disable_addons: _Tuple[_Text] = TypedTuple(
Unicode(),
help=("skip loading `entry_point` for these addons. TODO: should be a dict"),
).tag(config=True)
apps: _Tuple[_Text] = TypedTuple(
Unicode(),
help=(
f"""the Lite apps: currently {C.JUPYTERLITE_APPS}. """
f"""Required: {C.JUPYTERLITE_APPS_REQUIRED}"""
),
).tag(config=True)
app_archive: Path = CPath(
help=("The app archive to use. env: JUPYTERLITE_APP_ARCHIVE")
).tag(config=True)
lite_dir: Path = CPath(
help=("The root folder of a JupyterLite project. env: JUPYTERLITE_DIR")
).tag(config=True)
output_dir: Path = CPath(
help=("Where to build the JupyterLite site. env: JUPYTERLITE_OUTPUT_DIR")
).tag(config=True)
output_archive: Path = CPath(
help=("Archive to create. env: JUPYTERLITE_OUTPUT_ARCHIVE")
).tag(config=True)
files: _Tuple[Path] = TypedTuple(
CPath(), help="Files to add and index as Jupyter Contents"
).tag(config=True)
overrides: _Tuple[_Text] = TypedTuple(
CPath(), help=("Specific overrides.json to include")
).tag(config=True)
port: int = CInt(
help=(
"[serve] the port to (insecurely) expose on http://127.0.0.1."
" env: JUPYTERLITE_PORT"
)
).tag(config=True)
base_url: str = Unicode(
help=("[serve] the prefix to use." " env: JUPYTERLITE_BASE_URL")
).tag(config=True)
ignore_files: _Tuple[_Text] = Tuple(
help="Path patterns that should never be included"
).tag(config=True)
source_date_epoch: _Optional[int] = CInt(
allow_none=True,
min=1,
help="Trigger reproducible builds, clamping timestamps to this value",
).tag(config=True)
@default("apps")
def _default_apps(self):
return C.JUPYTERLITE_APPS
@default("disable_addons")
def _default_disable_addons(self):
return []
@default("output_dir")
def _default_output_dir(self):
return Path(
os.environ.get("JUPYTERLITE_OUTPUT_DIR")
or self.lite_dir / C.DEFAULT_OUTPUT_DIR
)
@default("lite_dir")
def _default_lite_dir(self):
return Path(os.environ.get("JUPYTERLITE_DIR", Path.cwd()))
@default("files")
def _default_files(self):
lite_files = self.lite_dir / "files"
if lite_files.is_dir():
return [lite_files]
return []
@default("overrides")
def _default_overrides(self):
all_overrides = []
for app in [None, *self.apps]:
app_dir = self.lite_dir / app if app else self.lite_dir
overrides_json = app_dir / C.OVERRIDES_JSON
if overrides_json.exists():
all_overrides += [overrides_json]
return all_overrides
@default("ignore_files")
def _default_ignore_files(self):
return [
".*\.pyc",
"/\.git/",
"/\.gitignore",
"/\.ipynb_checkpoints/",
"/build/",
"/lib/",
"/dist/",
".*doit.db",
"/node_modules/",
"/envs/",
"/venvs/",
"/\.env",
C.JUPYTERLITE_JSON.replace(".", "\\."),
C.JUPYTERLITE_IPYNB.replace(".", "\\."),
"untitled.*",
"Untitled.*",
f"/{self.output_dir.name}/",
]
@default("app_archive")
def _default_app_archive(self):
return Path(os.environ.get("JUPYTERLITE_APP_ARCHIVE") or C.DEFAULT_APP_ARCHIVE)
@default("output_archive")
def _default_output_archive(self):
return Path(
os.environ.get("JUPYTERLITE_OUTPUT_ARCHIVE")
or self.output_dir / f"{self.lite_dir.name}-jupyterlite.tgz"
)
@default("source_date_epoch")
def _default_source_date_epoch(self):
if C.SOURCE_DATE_EPOCH not in os.environ:
return None
sde = int(os.environ[C.SOURCE_DATE_EPOCH])
return sde
@default("port")
def _default_port(self):
return int(os.environ.get("JUPYTERLITE_PORT", 8000))
@default("base_url")
def _default_base_url(self):
return os.environ.get("JUPYTERLITE_BASE_URL", "/")
| true | true |
f72fcfd6c1a73e3ebdb2254eb93485dc7e9e2ac2 | 10,495 | py | Python | tests/test_expressions.py | thorag76/mappyfile | 51ae914cb6282549b73cde684cbc54e213c74d4a | [
"MIT"
] | 48 | 2017-02-07T23:37:37.000Z | 2021-12-28T12:56:37.000Z | tests/test_expressions.py | thorag76/mappyfile | 51ae914cb6282549b73cde684cbc54e213c74d4a | [
"MIT"
] | 135 | 2017-03-16T08:54:59.000Z | 2022-03-30T20:00:22.000Z | tests/test_expressions.py | thorag76/mappyfile | 51ae914cb6282549b73cde684cbc54e213c74d4a | [
"MIT"
] | 23 | 2017-01-31T08:46:48.000Z | 2021-07-08T15:28:49.000Z | # -*- coding: utf-8 -*-
import logging
import json
import inspect
import pytest
from mappyfile.parser import Parser
from mappyfile.pprint import PrettyPrinter
from mappyfile.transformer import MapfileToDict
def output(s):
"""
Parse, transform, and pretty print
the result
"""
p = Parser()
m = MapfileToDict(include_position=True)
# https://stackoverflow.com/questions/900392/getting-the-caller-function-name-inside-another-function-in-python
logging.info(inspect.stack()[1][3])
ast = p.parse(s)
logging.debug(ast.pretty())
d = m.transform(ast)
logging.debug(json.dumps(d, indent=4))
pp = PrettyPrinter(indent=0, newlinechar=" ", quote="'")
s = pp.pprint(d)
logging.debug(s)
return s
def check_result(s):
try:
s2 = output(s)
assert(s == s2)
except AssertionError:
logging.info(s)
logging.info(s2)
raise
def test_class_expression1():
s = '''
CLASS
TEXT ([area])
END
'''
exp = "CLASS TEXT ([area]) END"
assert(output(s) == exp)
def test_class_expression2():
r"""
shp2img -m C:\Temp\msautotest\query\text.tmp.map -l text_test002 -o c:\temp\tmp_onl0lk.png
"""
s = '''
CLASS
TEXT ("[area]")
END
'''
exp = 'CLASS TEXT ("[area]") END'
assert(output(s) == exp)
def test_complex_class_expression():
s = '''
CLASS
TEXT ("Area is: " + tostring([area],"%.2f"))
END
'''
exp = '''CLASS TEXT ("Area is: " + (tostring([area],"%.2f"))) END'''
assert(output(s) == exp)
def test_or_expressions():
"""
See http://www.mapserver.org/mapfile/expressions.html#expressions
"""
s = '''
CLASS
EXPRESSION ("[style_class]" = "10" OR "[style_class]" = "20")
END
'''
exp = 'CLASS EXPRESSION ( ( "[style_class]" = "10" ) OR ( "[style_class]" = "20" ) ) END'
assert(output(s) == exp)
s = '''
CLASS
EXPRESSION ("[style_class]" = "10" || "[style_class]" = "20")
END
'''
exp = 'CLASS EXPRESSION ( ( "[style_class]" = "10" ) OR ( "[style_class]" = "20" ) ) END'
assert(output(s) == exp)
def test_and_expressions():
s = '''
CLASS
EXPRESSION ("[style_class]" = "10" AND "[style_class]" = "20")
END
'''
exp = 'CLASS EXPRESSION ( ( "[style_class]" = "10" ) AND ( "[style_class]" = "20" ) ) END'
assert(output(s) == exp)
s = '''
CLASS
EXPRESSION ("[style_class]" = "10" && "[style_class]" = "20")
END
'''
exp = 'CLASS EXPRESSION ( ( "[style_class]" = "10" ) AND ( "[style_class]" = "20" ) ) END'
assert(output(s) == exp)
def test_not_expressions():
s = '''
CLASS
EXPRESSION NOT("[style_class]" = "20")
END
'''
exp = 'CLASS EXPRESSION NOT ( "[style_class]" = "20" ) END'
assert(output(s) == exp)
s = '''
CLASS
EXPRESSION !("[style_class]" = "20")
END
'''
exp = 'CLASS EXPRESSION NOT ( "[style_class]" = "20" ) END'
assert(output(s) == exp)
def test_runtime_expression():
s = """
CLASS
EXPRESSION ( [EPPL_Q100_] = %eppl% )
END
"""
exp = "CLASS EXPRESSION ( [EPPL_Q100_] = %eppl% ) END"
# print(output(s))
assert(output(s) == exp)
def test_ne_comparison():
"""
IS NOT is not valid
NE (Not Equals) should be used instead
"""
s = """
CLASS
# EXPRESSION ( "[building]" IS NOT NULL) # incorrect syntax
EXPRESSION ( "[building]" NE NULL)
END
"""
exp = 'CLASS EXPRESSION ( "[building]" NE NULL ) END'
assert(output(s) == exp)
def test_eq_comparison():
"""
Case is not changed for comparison (EQ/eq stay the same)
Uses Earley
"""
s = """
CLASS
EXPRESSION ( "[building]" eq NULL)
END
"""
exp = 'CLASS EXPRESSION ( "[building]" eq NULL ) END'
# print(output(s))
assert(output(s) == exp)
def test_expression():
"""
Addressed in issue #27, now parses successfully.
"""
s = """
CLASS
EXPRESSION ('[construct]' ~* /Br.*$/)
STYLE
ANGLE 360
END
END
"""
exp = "CLASS EXPRESSION ( '[construct]' ~* /Br.*$/ ) STYLE ANGLE 360 END END"
assert(output(s) == exp)
def test_list_expression():
"""
See issue #27
"""
s = """
CLASS
EXPRESSION /NS_Bahn|NS_BahnAuto/
END
"""
exp = "CLASS EXPRESSION /NS_Bahn|NS_BahnAuto/ END"
assert(output(s) == exp)
def test_numerical_operator_ge_expression():
s = """
CLASS
EXPRESSION ([power] ge 10000)
END
"""
exp = "CLASS EXPRESSION ( [power] ge 10000 ) END"
assert(output(s) == exp)
def test_numerical_operator_gt_expression():
s = """
CLASS
EXPRESSION ([power] gt 10000)
END
"""
exp = "CLASS EXPRESSION ( [power] gt 10000 ) END"
assert(output(s) == exp)
def test_numerical_operator_le_expression():
s = """
CLASS
EXPRESSION ([power] le 100)
END
"""
exp = "CLASS EXPRESSION ( [power] le 100 ) END"
assert(output(s) == exp)
def test_numerical_operator_lt_expression():
s = """
CLASS
EXPRESSION ([power] lt 100)
END
"""
exp = "CLASS EXPRESSION ( [power] lt 100 ) END"
assert(output(s) == exp)
def test_divide():
"""
Not sure if these should be in brackets or not
http://mapserver.org/mapfile/expressions.html
Implies with brackets will return a boolean value
and without will return a numeric value
"""
s = """
CLASS
EXPRESSION ([field1] / [field2])
END
"""
exp = "CLASS EXPRESSION ([field1] / [field2]) END"
assert(output(s) == exp)
def test_multiply():
s = """
CLASS
EXPRESSION ([field1] * [field2])
END
"""
exp = "CLASS EXPRESSION ([field1] * [field2]) END"
assert(output(s) == exp)
def test_negation():
"""
TODO - check the exact syntax for this
"""
s = """
CLASS
EXPRESSION (-[field1])
END
"""
exp = "CLASS EXPRESSION (-[field1]) END"
assert(output(s) == exp)
def test_pointless_plus():
# Based on test_negation
s = """
CLASS
EXPRESSION (+[field1])
END
"""
exp = "CLASS EXPRESSION ([field1]) END"
assert(output(s) == exp)
def test_power():
s = """
CLASS
EXPRESSION ([field1] ^ [field2])
END
"""
exp = "CLASS EXPRESSION ([field1] ^ [field2]) END"
assert(output(s) == exp)
def test_divide_expression():
"""
http://mapserver.org/mapfile/expressions.html
Also - * and ^
"""
s = """
CLASS
EXPRESSION ([field1] / [field2] > 0.1)
END
"""
exp = "CLASS EXPRESSION ( [field1] / [field2] > 0.1 ) END"
assert(output(s) == exp)
def test_modulo_expression():
"""
Not currently documented at http://mapserver.org/mapfile/expressions.html
"""
s = """
CLASS
EXPRESSION ( ([height] % 50) = 0 )
END
"""
exp = "CLASS EXPRESSION ( ( [height] % 50 ) = 0 ) END"
assert(output(s) == exp)
def test_escaped_string():
"""
http://mapserver.org/mapfile/expressions.html#quotes-escaping-in-strings
Starting with MapServer 6.0 you don't need to escape single quotes within double quoted strings
and you don't need to escape double quotes within single quoted strings
"""
s = r"""
CLASS
EXPRESSION "National \"hero\" statue"
END
"""
exp = """CLASS EXPRESSION 'National \\"hero\\" statue' END"""
assert(output(s) == exp)
def test_list_expression_alt():
"""
See issue #38
http://mapserver.org/mapfile/expressions.html#list-expressions
These expressions are much more performant in MapServer
List expressions do not support quote escaping, or attribute values that contain a comma in them.
To activate them enclose a comma separated list of values between {}, without adding quotes
or extra spaces.
"""
s = """
CLASS
EXPRESSION {2_Klass,Rte2etr}
END
"""
exp = "CLASS EXPRESSION {2_Klass,Rte2etr} END"
assert(output(s) == exp)
s = """
CLASS
EXPRESSION {2_Klass,class with space}
END
"""
exp = "CLASS EXPRESSION {2_Klass,class with space} END"
assert(output(s) == exp)
def test_class_expression_oddname():
s = '''
CLASS
TEXT ([area:ian])
END
'''
exp = "CLASS TEXT ([area:ian]) END"
assert(output(s) == exp)
def test_class_not_expression_brackets():
"""
See issue #85 - coding of NOT logical expressions #85
Each expression should be bracketed independently and any NOT
clause should be outside the brackets
"""
s = '''
CLASS
EXPRESSION (("[TIME]" eq 'NOW') AND NOT ("[TYPE]" ~ "(something|completely|different)"))
END
'''
exp = '''CLASS EXPRESSION ( ( "[TIME]" eq 'NOW' ) AND NOT ( "[TYPE]" ~ "(something|completely|different)" ) ) END'''
print(output(s))
assert(output(s) == exp)
def test_class_not_expression_no_brackets():
"""
See issue #85 - coding of NOT logical expressions #85
This parses successfully in MapServer but not in mappyfile
"""
s = '''
CLASS
EXPRESSION ("[TIME]" eq 'NOW' AND NOT "[TYPE]" ~ "(something|completely|different)")
END
'''
exp = '''CLASS EXPRESSION ( ( "[TIME]" eq 'NOW' ) AND NOT ( "[TYPE]" ~ "(something|completely|different)" ) ) END'''
assert(output(s) == exp)
def test_unquoted_unicode_string():
"""
See pull request #92 - French unquoted string
"""
s = '''
CLASS
EXPRESSION {Aérodrome,Aéroport,Héliport,Base spatiale}
END
'''
exp = u'''CLASS EXPRESSION {Aérodrome,Aéroport,Héliport,Base spatiale} END'''
assert(output(s) == exp)
def test_list_with_apostrophe():
"""
See https://github.com/geographika/mappyfile/issues/120
"""
s = '''
CLASS
EXPRESSION {bla,d'apostrophe}
END
'''
exp = u'''CLASS EXPRESSION {bla,d'apostrophe} END'''
assert(output(s) == exp)
def run_tests():
r"""
Need to comment out the following line in C:\VirtualEnvs\mappyfile\Lib\site-packages\pep8.py
#stdin_get_value = sys.stdin.read
Or get AttributeError: '_ReplInput' object has no attribute 'read'
"""
pytest.main(["tests/test_expressions.py"])
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
test_list_with_apostrophe()
# run_tests()
print("Done!")
| 22.765727 | 120 | 0.577608 |
import logging
import json
import inspect
import pytest
from mappyfile.parser import Parser
from mappyfile.pprint import PrettyPrinter
from mappyfile.transformer import MapfileToDict
def output(s):
p = Parser()
m = MapfileToDict(include_position=True)
logging.info(inspect.stack()[1][3])
ast = p.parse(s)
logging.debug(ast.pretty())
d = m.transform(ast)
logging.debug(json.dumps(d, indent=4))
pp = PrettyPrinter(indent=0, newlinechar=" ", quote="'")
s = pp.pprint(d)
logging.debug(s)
return s
def check_result(s):
try:
s2 = output(s)
assert(s == s2)
except AssertionError:
logging.info(s)
logging.info(s2)
raise
def test_class_expression1():
s = '''
CLASS
TEXT ([area])
END
'''
exp = "CLASS TEXT ([area]) END"
assert(output(s) == exp)
def test_class_expression2():
s = '''
CLASS
TEXT ("[area]")
END
'''
exp = 'CLASS TEXT ("[area]") END'
assert(output(s) == exp)
def test_complex_class_expression():
s = '''
CLASS
TEXT ("Area is: " + tostring([area],"%.2f"))
END
'''
exp = '''CLASS TEXT ("Area is: " + (tostring([area],"%.2f"))) END'''
assert(output(s) == exp)
def test_or_expressions():
s = '''
CLASS
EXPRESSION ("[style_class]" = "10" OR "[style_class]" = "20")
END
'''
exp = 'CLASS EXPRESSION ( ( "[style_class]" = "10" ) OR ( "[style_class]" = "20" ) ) END'
assert(output(s) == exp)
s = '''
CLASS
EXPRESSION ("[style_class]" = "10" || "[style_class]" = "20")
END
'''
exp = 'CLASS EXPRESSION ( ( "[style_class]" = "10" ) OR ( "[style_class]" = "20" ) ) END'
assert(output(s) == exp)
def test_and_expressions():
s = '''
CLASS
EXPRESSION ("[style_class]" = "10" AND "[style_class]" = "20")
END
'''
exp = 'CLASS EXPRESSION ( ( "[style_class]" = "10" ) AND ( "[style_class]" = "20" ) ) END'
assert(output(s) == exp)
s = '''
CLASS
EXPRESSION ("[style_class]" = "10" && "[style_class]" = "20")
END
'''
exp = 'CLASS EXPRESSION ( ( "[style_class]" = "10" ) AND ( "[style_class]" = "20" ) ) END'
assert(output(s) == exp)
def test_not_expressions():
s = '''
CLASS
EXPRESSION NOT("[style_class]" = "20")
END
'''
exp = 'CLASS EXPRESSION NOT ( "[style_class]" = "20" ) END'
assert(output(s) == exp)
s = '''
CLASS
EXPRESSION !("[style_class]" = "20")
END
'''
exp = 'CLASS EXPRESSION NOT ( "[style_class]" = "20" ) END'
assert(output(s) == exp)
def test_runtime_expression():
s = """
CLASS
EXPRESSION ( [EPPL_Q100_] = %eppl% )
END
"""
exp = "CLASS EXPRESSION ( [EPPL_Q100_] = %eppl% ) END"
# print(output(s))
assert(output(s) == exp)
def test_ne_comparison():
s = """
CLASS
# EXPRESSION ( "[building]" IS NOT NULL) # incorrect syntax
EXPRESSION ( "[building]" NE NULL)
END
"""
exp = 'CLASS EXPRESSION ( "[building]" NE NULL ) END'
assert(output(s) == exp)
def test_eq_comparison():
s = """
CLASS
EXPRESSION ( "[building]" eq NULL)
END
"""
exp = 'CLASS EXPRESSION ( "[building]" eq NULL ) END'
# print(output(s))
assert(output(s) == exp)
def test_expression():
s = """
CLASS
EXPRESSION ('[construct]' ~* /Br.*$/)
STYLE
ANGLE 360
END
END
"""
exp = "CLASS EXPRESSION ( '[construct]' ~* /Br.*$/ ) STYLE ANGLE 360 END END"
assert(output(s) == exp)
def test_list_expression():
s = """
CLASS
EXPRESSION /NS_Bahn|NS_BahnAuto/
END
"""
exp = "CLASS EXPRESSION /NS_Bahn|NS_BahnAuto/ END"
assert(output(s) == exp)
def test_numerical_operator_ge_expression():
s = """
CLASS
EXPRESSION ([power] ge 10000)
END
"""
exp = "CLASS EXPRESSION ( [power] ge 10000 ) END"
assert(output(s) == exp)
def test_numerical_operator_gt_expression():
s = """
CLASS
EXPRESSION ([power] gt 10000)
END
"""
exp = "CLASS EXPRESSION ( [power] gt 10000 ) END"
assert(output(s) == exp)
def test_numerical_operator_le_expression():
s = """
CLASS
EXPRESSION ([power] le 100)
END
"""
exp = "CLASS EXPRESSION ( [power] le 100 ) END"
assert(output(s) == exp)
def test_numerical_operator_lt_expression():
s = """
CLASS
EXPRESSION ([power] lt 100)
END
"""
exp = "CLASS EXPRESSION ( [power] lt 100 ) END"
assert(output(s) == exp)
def test_divide():
s = """
CLASS
EXPRESSION ([field1] / [field2])
END
"""
exp = "CLASS EXPRESSION ([field1] / [field2]) END"
assert(output(s) == exp)
def test_multiply():
s = """
CLASS
EXPRESSION ([field1] * [field2])
END
"""
exp = "CLASS EXPRESSION ([field1] * [field2]) END"
assert(output(s) == exp)
def test_negation():
s = """
CLASS
EXPRESSION (-[field1])
END
"""
exp = "CLASS EXPRESSION (-[field1]) END"
assert(output(s) == exp)
def test_pointless_plus():
# Based on test_negation
s = """
CLASS
EXPRESSION (+[field1])
END
"""
exp = "CLASS EXPRESSION ([field1]) END"
assert(output(s) == exp)
def test_power():
s = """
CLASS
EXPRESSION ([field1] ^ [field2])
END
"""
exp = "CLASS EXPRESSION ([field1] ^ [field2]) END"
assert(output(s) == exp)
def test_divide_expression():
s = """
CLASS
EXPRESSION ([field1] / [field2] > 0.1)
END
"""
exp = "CLASS EXPRESSION ( [field1] / [field2] > 0.1 ) END"
assert(output(s) == exp)
def test_modulo_expression():
s = """
CLASS
EXPRESSION ( ([height] % 50) = 0 )
END
"""
exp = "CLASS EXPRESSION ( ( [height] % 50 ) = 0 ) END"
assert(output(s) == exp)
def test_escaped_string():
s = r"""
CLASS
EXPRESSION "National \"hero\" statue"
END
"""
exp = """CLASS EXPRESSION 'National \\"hero\\" statue' END"""
assert(output(s) == exp)
def test_list_expression_alt():
s = """
CLASS
EXPRESSION {2_Klass,Rte2etr}
END
"""
exp = "CLASS EXPRESSION {2_Klass,Rte2etr} END"
assert(output(s) == exp)
s = """
CLASS
EXPRESSION {2_Klass,class with space}
END
"""
exp = "CLASS EXPRESSION {2_Klass,class with space} END"
assert(output(s) == exp)
def test_class_expression_oddname():
s = '''
CLASS
TEXT ([area:ian])
END
'''
exp = "CLASS TEXT ([area:ian]) END"
assert(output(s) == exp)
def test_class_not_expression_brackets():
s = '''
CLASS
EXPRESSION (("[TIME]" eq 'NOW') AND NOT ("[TYPE]" ~ "(something|completely|different)"))
END
'''
exp = '''CLASS EXPRESSION ( ( "[TIME]" eq 'NOW' ) AND NOT ( "[TYPE]" ~ "(something|completely|different)" ) ) END'''
print(output(s))
assert(output(s) == exp)
def test_class_not_expression_no_brackets():
s = '''
CLASS
EXPRESSION ("[TIME]" eq 'NOW' AND NOT "[TYPE]" ~ "(something|completely|different)")
END
'''
exp = '''CLASS EXPRESSION ( ( "[TIME]" eq 'NOW' ) AND NOT ( "[TYPE]" ~ "(something|completely|different)" ) ) END'''
assert(output(s) == exp)
def test_unquoted_unicode_string():
s = '''
CLASS
EXPRESSION {Aérodrome,Aéroport,Héliport,Base spatiale}
END
'''
exp = u'''CLASS EXPRESSION {Aérodrome,Aéroport,Héliport,Base spatiale} END'''
assert(output(s) == exp)
def test_list_with_apostrophe():
s = '''
CLASS
EXPRESSION {bla,d'apostrophe}
END
'''
exp = u'''CLASS EXPRESSION {bla,d'apostrophe} END'''
assert(output(s) == exp)
def run_tests():
pytest.main(["tests/test_expressions.py"])
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
test_list_with_apostrophe()
# run_tests()
print("Done!")
| true | true |
f72fd038fc632f6e1fa32dff24a488528fb8fed5 | 230 | py | Python | xga/relations/clusters/Mλ.py | DavidT3/XGA | cde51c3f29f98b5f1e981fb6d327c04072b0ba38 | [
"BSD-3-Clause"
] | 12 | 2020-05-16T09:45:45.000Z | 2022-02-14T14:41:46.000Z | xga/relations/clusters/Mλ.py | DavidT3/XGA | cde51c3f29f98b5f1e981fb6d327c04072b0ba38 | [
"BSD-3-Clause"
] | 684 | 2020-05-28T08:52:09.000Z | 2022-03-31T10:56:24.000Z | xga/relations/clusters/Mλ.py | DavidT3/XGA | cde51c3f29f98b5f1e981fb6d327c04072b0ba38 | [
"BSD-3-Clause"
] | 2 | 2022-02-04T10:55:55.000Z | 2022-02-04T11:30:56.000Z | # This code is a part of XMM: Generate and Analyse (XGA), a module designed for the XMM Cluster Survey (XCS).
# Last modified by David J Turner (david.turner@sussex.ac.uk) 11/12/2020, 16:41. Copyright (c) David J Turner
| 20.909091 | 110 | 0.704348 | true | true | |
f72fd0a975c56ec2d4a2ead5794352e000898434 | 699 | py | Python | test_app.py | john-lock/chatter | 46c0c61f7e5798478a3630aadbfc47d281189edd | [
"MIT"
] | null | null | null | test_app.py | john-lock/chatter | 46c0c61f7e5798478a3630aadbfc47d281189edd | [
"MIT"
] | 2 | 2019-09-17T18:47:31.000Z | 2019-09-17T18:47:34.000Z | test_app.py | john-lock/chatter | 46c0c61f7e5798478a3630aadbfc47d281189edd | [
"MIT"
] | null | null | null | import pytest
import app
@pytest.fixture
def client():
app.app.config['TESTING'] = True
client = app.app.test_client()
yield client
def test_client_page(client):
rv = client.get('/')
# Main page (instructions)
assert b'<p class="lead">A Pusher-powered chat application built using Flask</p>' in rv.data
# Chat window
assert b'<input type="email" class="form-control" id="email" placeholder="Email Address*" required>' in rv.data
def test_adminpage(client):
rv = client.get('/admin')
# Admin page (0 connected clients)
assert b'Select a chat window to show and sent messages to' in rv.data
# Selenium script with clients interacting with the admin
| 25.888889 | 115 | 0.69671 | import pytest
import app
@pytest.fixture
def client():
app.app.config['TESTING'] = True
client = app.app.test_client()
yield client
def test_client_page(client):
rv = client.get('/')
assert b'<p class="lead">A Pusher-powered chat application built using Flask</p>' in rv.data
assert b'<input type="email" class="form-control" id="email" placeholder="Email Address*" required>' in rv.data
def test_adminpage(client):
rv = client.get('/admin')
assert b'Select a chat window to show and sent messages to' in rv.data
| true | true |
f72fd0f50afdb4c7cb225054bd39d9412b196c9c | 1,314 | py | Python | aries_cloudagent/protocols/discovery/v1_0/handlers/tests/test_query_handler.py | msembinelli/aries-cloudagent-python | a5a29dab30238f52dcfb6645aab115d01720a5c7 | [
"Apache-2.0"
] | 1 | 2020-11-30T05:47:54.000Z | 2020-11-30T05:47:54.000Z | aries_cloudagent/protocols/discovery/v1_0/handlers/tests/test_query_handler.py | msembinelli/aries-cloudagent-python | a5a29dab30238f52dcfb6645aab115d01720a5c7 | [
"Apache-2.0"
] | 1 | 2020-06-16T20:20:55.000Z | 2020-06-16T20:20:55.000Z | aries_cloudagent/protocols/discovery/v1_0/handlers/tests/test_query_handler.py | msembinelli/aries-cloudagent-python | a5a29dab30238f52dcfb6645aab115d01720a5c7 | [
"Apache-2.0"
] | 2 | 2020-02-18T20:34:01.000Z | 2021-03-12T16:18:30.000Z | import pytest
from aries_cloudagent.core.protocol_registry import ProtocolRegistry
from aries_cloudagent.messaging.base_handler import HandlerException
from aries_cloudagent.messaging.request_context import RequestContext
from aries_cloudagent.messaging.responder import MockResponder
from ...handlers.query_handler import QueryHandler
from ...messages.disclose import Disclose
from ...messages.query import Query
TEST_MESSAGE_FAMILY = "TEST_FAMILY"
TEST_MESSAGE_TYPE = TEST_MESSAGE_FAMILY + "/MESSAGE"
@pytest.fixture()
def request_context() -> RequestContext:
ctx = RequestContext()
registry = ProtocolRegistry()
registry.register_message_types({TEST_MESSAGE_TYPE: object()})
ctx.injector.bind_instance(ProtocolRegistry, registry)
yield ctx
class TestQueryHandler:
@pytest.mark.asyncio
async def test_query_all(self, request_context):
request_context.message = Query(query="*")
handler = QueryHandler()
responder = MockResponder()
await handler.handle(request_context, responder)
messages = responder.messages
assert len(messages) == 1
result, target = messages[0]
assert isinstance(result, Disclose) and result.protocols
assert result.protocols[0]["pid"] == TEST_MESSAGE_FAMILY
assert not target
| 34.578947 | 69 | 0.758752 | import pytest
from aries_cloudagent.core.protocol_registry import ProtocolRegistry
from aries_cloudagent.messaging.base_handler import HandlerException
from aries_cloudagent.messaging.request_context import RequestContext
from aries_cloudagent.messaging.responder import MockResponder
from ...handlers.query_handler import QueryHandler
from ...messages.disclose import Disclose
from ...messages.query import Query
TEST_MESSAGE_FAMILY = "TEST_FAMILY"
TEST_MESSAGE_TYPE = TEST_MESSAGE_FAMILY + "/MESSAGE"
@pytest.fixture()
def request_context() -> RequestContext:
ctx = RequestContext()
registry = ProtocolRegistry()
registry.register_message_types({TEST_MESSAGE_TYPE: object()})
ctx.injector.bind_instance(ProtocolRegistry, registry)
yield ctx
class TestQueryHandler:
@pytest.mark.asyncio
async def test_query_all(self, request_context):
request_context.message = Query(query="*")
handler = QueryHandler()
responder = MockResponder()
await handler.handle(request_context, responder)
messages = responder.messages
assert len(messages) == 1
result, target = messages[0]
assert isinstance(result, Disclose) and result.protocols
assert result.protocols[0]["pid"] == TEST_MESSAGE_FAMILY
assert not target
| true | true |
f72fd1f63d52cbb7ac69ac0d3b60be8df77af67c | 4,536 | py | Python | benchmark/startQiskit_Class3343.py | UCLA-SEAL/QDiff | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | [
"BSD-3-Clause"
] | null | null | null | benchmark/startQiskit_Class3343.py | UCLA-SEAL/QDiff | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | [
"BSD-3-Clause"
] | null | null | null | benchmark/startQiskit_Class3343.py | UCLA-SEAL/QDiff | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | [
"BSD-3-Clause"
] | null | null | null | # qubit number=4
# total number=49
import cirq
import qiskit
from qiskit import QuantumCircuit, QuantumRegister, ClassicalRegister
from qiskit import BasicAer, execute, transpile
from pprint import pprint
from qiskit.test.mock import FakeVigo
from math import log2
import numpy as np
import networkx as nx
def bitwise_xor(s: str, t: str) -> str:
length = len(s)
res = []
for i in range(length):
res.append(str(int(s[i]) ^ int(t[i])))
return ''.join(res[::-1])
def bitwise_dot(s: str, t: str) -> str:
length = len(s)
res = 0
for i in range(length):
res += int(s[i]) * int(t[i])
return str(res % 2)
def build_oracle(n: int, f) -> QuantumCircuit:
# implement the oracle O_f
# NOTE: use multi_control_toffoli_gate ('noancilla' mode)
# https://qiskit.org/documentation/_modules/qiskit/aqua/circuits/gates/multi_control_toffoli_gate.html
# https://quantumcomputing.stackexchange.com/questions/3943/how-do-you-implement-the-toffoli-gate-using-only-single-qubit-and-cnot-gates
# https://quantumcomputing.stackexchange.com/questions/2177/how-can-i-implement-an-n-bit-toffoli-gate
controls = QuantumRegister(n, "ofc")
target = QuantumRegister(1, "oft")
oracle = QuantumCircuit(controls, target, name="Of")
for i in range(2 ** n):
rep = np.binary_repr(i, n)
if f(rep) == "1":
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
oracle.mct(controls, target[0], None, mode='noancilla')
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
# oracle.barrier()
return oracle
def make_circuit(n:int,f) -> QuantumCircuit:
# circuit begin
input_qubit = QuantumRegister(n,"qc")
classical = ClassicalRegister(n, "qm")
prog = QuantumCircuit(input_qubit, classical)
prog.cx(input_qubit[0],input_qubit[3]) # number=13
prog.cx(input_qubit[0],input_qubit[3]) # number=17
prog.x(input_qubit[3]) # number=18
prog.cx(input_qubit[0],input_qubit[3]) # number=19
prog.cx(input_qubit[0],input_qubit[3]) # number=15
prog.h(input_qubit[1]) # number=2
prog.h(input_qubit[2]) # number=3
prog.h(input_qubit[3]) # number=4
prog.y(input_qubit[3]) # number=12
prog.h(input_qubit[0]) # number=5
oracle = build_oracle(n-1, f)
prog.append(oracle.to_gate(),[input_qubit[i] for i in range(n-1)]+[input_qubit[n-1]])
prog.h(input_qubit[1]) # number=6
prog.h(input_qubit[2]) # number=7
prog.h(input_qubit[3]) # number=37
prog.cz(input_qubit[0],input_qubit[3]) # number=38
prog.h(input_qubit[3]) # number=39
prog.cx(input_qubit[0],input_qubit[3]) # number=40
prog.x(input_qubit[3]) # number=41
prog.h(input_qubit[3]) # number=43
prog.cz(input_qubit[0],input_qubit[3]) # number=44
prog.h(input_qubit[3]) # number=45
prog.h(input_qubit[3]) # number=30
prog.cz(input_qubit[0],input_qubit[3]) # number=31
prog.h(input_qubit[3]) # number=32
prog.h(input_qubit[0]) # number=33
prog.cz(input_qubit[3],input_qubit[0]) # number=34
prog.rx(0.33300882128051834,input_qubit[2]) # number=36
prog.h(input_qubit[0]) # number=35
prog.cx(input_qubit[3],input_qubit[0]) # number=23
prog.cx(input_qubit[3],input_qubit[0]) # number=46
prog.z(input_qubit[3]) # number=47
prog.cx(input_qubit[3],input_qubit[0]) # number=48
prog.cx(input_qubit[3],input_qubit[0]) # number=25
prog.cx(input_qubit[3],input_qubit[0]) # number=22
prog.h(input_qubit[3]) # number=8
prog.h(input_qubit[0]) # number=9
prog.y(input_qubit[2]) # number=10
prog.y(input_qubit[2]) # number=11
# circuit end
return prog
if __name__ == '__main__':
a = "111"
b = "0"
f = lambda rep: bitwise_xor(bitwise_dot(a, rep), b)
prog = make_circuit(4,f)
backend = BasicAer.get_backend('statevector_simulator')
sample_shot =8000
info = execute(prog, backend=backend).result().get_statevector()
qubits = round(log2(len(info)))
info = {
np.binary_repr(i, qubits): round((info[i]*(info[i].conjugate())).real,3)
for i in range(2 ** qubits)
}
backend = FakeVigo()
circuit1 = transpile(prog,backend,optimization_level=2)
writefile = open("../data/startQiskit_Class3343.csv","w")
print(info,file=writefile)
print("results end", file=writefile)
print(circuit1.__len__(),file=writefile)
print(circuit1,file=writefile)
writefile.close()
| 35.4375 | 140 | 0.650573 |
import cirq
import qiskit
from qiskit import QuantumCircuit, QuantumRegister, ClassicalRegister
from qiskit import BasicAer, execute, transpile
from pprint import pprint
from qiskit.test.mock import FakeVigo
from math import log2
import numpy as np
import networkx as nx
def bitwise_xor(s: str, t: str) -> str:
length = len(s)
res = []
for i in range(length):
res.append(str(int(s[i]) ^ int(t[i])))
return ''.join(res[::-1])
def bitwise_dot(s: str, t: str) -> str:
length = len(s)
res = 0
for i in range(length):
res += int(s[i]) * int(t[i])
return str(res % 2)
def build_oracle(n: int, f) -> QuantumCircuit:
controls = QuantumRegister(n, "ofc")
target = QuantumRegister(1, "oft")
oracle = QuantumCircuit(controls, target, name="Of")
for i in range(2 ** n):
rep = np.binary_repr(i, n)
if f(rep) == "1":
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
oracle.mct(controls, target[0], None, mode='noancilla')
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
return oracle
def make_circuit(n:int,f) -> QuantumCircuit:
input_qubit = QuantumRegister(n,"qc")
classical = ClassicalRegister(n, "qm")
prog = QuantumCircuit(input_qubit, classical)
prog.cx(input_qubit[0],input_qubit[3])
prog.cx(input_qubit[0],input_qubit[3])
prog.x(input_qubit[3])
prog.cx(input_qubit[0],input_qubit[3])
prog.cx(input_qubit[0],input_qubit[3])
prog.h(input_qubit[1])
prog.h(input_qubit[2])
prog.h(input_qubit[3])
prog.y(input_qubit[3])
prog.h(input_qubit[0])
oracle = build_oracle(n-1, f)
prog.append(oracle.to_gate(),[input_qubit[i] for i in range(n-1)]+[input_qubit[n-1]])
prog.h(input_qubit[1])
prog.h(input_qubit[2])
prog.h(input_qubit[3])
prog.cz(input_qubit[0],input_qubit[3])
prog.h(input_qubit[3])
prog.cx(input_qubit[0],input_qubit[3])
prog.x(input_qubit[3])
prog.h(input_qubit[3])
prog.cz(input_qubit[0],input_qubit[3])
prog.h(input_qubit[3])
prog.h(input_qubit[3])
prog.cz(input_qubit[0],input_qubit[3])
prog.h(input_qubit[3])
prog.h(input_qubit[0])
prog.cz(input_qubit[3],input_qubit[0])
prog.rx(0.33300882128051834,input_qubit[2])
prog.h(input_qubit[0])
prog.cx(input_qubit[3],input_qubit[0])
prog.cx(input_qubit[3],input_qubit[0])
prog.z(input_qubit[3])
prog.cx(input_qubit[3],input_qubit[0])
prog.cx(input_qubit[3],input_qubit[0])
prog.cx(input_qubit[3],input_qubit[0])
prog.h(input_qubit[3])
prog.h(input_qubit[0])
prog.y(input_qubit[2])
prog.y(input_qubit[2])
return prog
if __name__ == '__main__':
a = "111"
b = "0"
f = lambda rep: bitwise_xor(bitwise_dot(a, rep), b)
prog = make_circuit(4,f)
backend = BasicAer.get_backend('statevector_simulator')
sample_shot =8000
info = execute(prog, backend=backend).result().get_statevector()
qubits = round(log2(len(info)))
info = {
np.binary_repr(i, qubits): round((info[i]*(info[i].conjugate())).real,3)
for i in range(2 ** qubits)
}
backend = FakeVigo()
circuit1 = transpile(prog,backend,optimization_level=2)
writefile = open("../data/startQiskit_Class3343.csv","w")
print(info,file=writefile)
print("results end", file=writefile)
print(circuit1.__len__(),file=writefile)
print(circuit1,file=writefile)
writefile.close()
| true | true |
f72fd32beb09f4059eb8836278eae50e6d7228a6 | 2,650 | py | Python | purly/py/setup.py | rmorshea/purly | 0d07d6d7636fd81d9c1c14e2df6a32fc28b325f7 | [
"MIT"
] | 2 | 2018-08-18T05:39:24.000Z | 2018-08-21T19:02:16.000Z | purly/py/setup.py | rmorshea/purly | 0d07d6d7636fd81d9c1c14e2df6a32fc28b325f7 | [
"MIT"
] | 2 | 2018-07-27T07:14:19.000Z | 2018-07-27T07:17:06.000Z | purly/py/setup.py | rmorshea/purly | 0d07d6d7636fd81d9c1c14e2df6a32fc28b325f7 | [
"MIT"
] | null | null | null | from __future__ import print_function
import os
import sys
import shutil
from glob import glob
from setuptools import find_packages
from distutils.core import setup
# the name of the project
name = "purly"
# basic paths used to gather files
here = os.path.abspath(os.path.dirname(__file__))
root = os.path.join(here, name)
#-----------------------------------------------------------------------------
# Python Version Check
#-----------------------------------------------------------------------------
if sys.version_info < (3,6) or sys.version_info >= (3, 7):
error = "ERROR: %s requires Python version 3.6." % name
print(error, file=sys.stderr)
sys.exit(1)
#-----------------------------------------------------------------------------
# requirements
#-----------------------------------------------------------------------------
requirements = [
'sanic',
'sanic_cors',
'asyncio',
'websocket-client',
'websockets==5.0',
'spectate>=0.2.1',
]
#-----------------------------------------------------------------------------
# Library Version
#-----------------------------------------------------------------------------
with open(os.path.join(root, '__init__.py')) as f:
for line in f.read().split("\n"):
if line.startswith("__version__ = "):
version = eval(line.split("=", 1)[1])
break
else:
print("No version found in purly/__init__.py")
sys.exit(1)
#-----------------------------------------------------------------------------
# Library Description
#-----------------------------------------------------------------------------
with open(os.path.join(here, 'README.md')) as f:
long_description = f.read()
#-----------------------------------------------------------------------------
# Install It
#-----------------------------------------------------------------------------
if __name__ == '__main__':
setup(
name=name,
version=version,
packages=find_packages(),
include_package_data=True,
description="Control the web with Python",
long_description=long_description,
long_description_content_type='text/markdown',
author="Ryan Morshead",
author_email="ryan.morshead@gmail.com",
url="https://github.com/rmorshea/purly",
license='MIT',
platforms="Linux, Mac OS X, Windows",
keywords=["interactive", "widgets", "DOM", "synchronization", "React"],
install_requires=requirements,
classifiers=[
'Intended Audience :: Developers',
'Programming Language :: Python :: 3.6',
],
)
| 31.547619 | 79 | 0.442642 | from __future__ import print_function
import os
import sys
import shutil
from glob import glob
from setuptools import find_packages
from distutils.core import setup
name = "purly"
here = os.path.abspath(os.path.dirname(__file__))
root = os.path.join(here, name)
if sys.version_info < (3,6) or sys.version_info >= (3, 7):
error = "ERROR: %s requires Python version 3.6." % name
print(error, file=sys.stderr)
sys.exit(1)
requirements = [
'sanic',
'sanic_cors',
'asyncio',
'websocket-client',
'websockets==5.0',
'spectate>=0.2.1',
]
with open(os.path.join(root, '__init__.py')) as f:
for line in f.read().split("\n"):
if line.startswith("__version__ = "):
version = eval(line.split("=", 1)[1])
break
else:
print("No version found in purly/__init__.py")
sys.exit(1)
with open(os.path.join(here, 'README.md')) as f:
long_description = f.read()
if __name__ == '__main__':
setup(
name=name,
version=version,
packages=find_packages(),
include_package_data=True,
description="Control the web with Python",
long_description=long_description,
long_description_content_type='text/markdown',
author="Ryan Morshead",
author_email="ryan.morshead@gmail.com",
url="https://github.com/rmorshea/purly",
license='MIT',
platforms="Linux, Mac OS X, Windows",
keywords=["interactive", "widgets", "DOM", "synchronization", "React"],
install_requires=requirements,
classifiers=[
'Intended Audience :: Developers',
'Programming Language :: Python :: 3.6',
],
)
| true | true |
f72fd342ba9c1c26e0b221251203aed9effad1f6 | 18,549 | py | Python | plugin.video.SportsDevil/lib/utils/drench.py | akuala/REPO.KUALA | ea9a157025530d2ce8fa0d88431c46c5352e89d4 | [
"Apache-2.0"
] | 2 | 2018-11-02T19:55:30.000Z | 2020-08-14T02:22:20.000Z | plugin.video.SportsDevil/lib/utils/drench.py | akuala/REPO.KUALA | ea9a157025530d2ce8fa0d88431c46c5352e89d4 | [
"Apache-2.0"
] | null | null | null | plugin.video.SportsDevil/lib/utils/drench.py | akuala/REPO.KUALA | ea9a157025530d2ce8fa0d88431c46c5352e89d4 | [
"Apache-2.0"
] | 3 | 2019-12-17T20:47:00.000Z | 2021-02-11T19:03:59.000Z | """
JavaScript encryption module ver. 2.0 by Daniel Rench
Based on existing code:
Copyright (c) 2003 by Andre Mueller.
Init of blowfish constants with a function (init/backup errors)
Copyright (c) 2003 by Rainer Wollmann
This Object is open source. You can redistribute it and/or modify
it under the terms of the Universal General Public License (UGPL).
http://www.ugpl.de/
"""
import math as Math
class blowfish:
def __init__(self,k):
if len(k) is 0:
raise '0 length key'
self.bf_P = self.Fbf_P()
self.bf_S0 = self.Fbf_S0()
self.bf_S1 = self.Fbf_S1()
self.bf_S2 = self.Fbf_S2()
self.bf_S3 = self.Fbf_S3()
self.key = k
j = 0
i = 0
while i < 18:
d = ((ord(self.key[j % len(self.key)]) * 256 + ord(self.key[(j + 1) % len(self.key)])) * 256 + ord(self.key[(j + 2) % len(self.key)])) * 256 + ord(self.key[(j + 3) % len(self.key)])
self.bf_P[i] = self.xor(self.bf_P[i], d)
j = (j + 4) % len(self.key)
i+=1
self.key = self.escape(self.key)
self.xl_par = 0x00000000
self.xr_par = 0x00000000
i = 0
while i < 18:
self.encipher()
self.bf_P[i] = self.xl_par
self.bf_P[i + 1] = self.xr_par
i += 2
j = 0
while j < 256:
self.encipher()
self.bf_S0[j] = self.xl_par
self.bf_S0[j + 1] = self.xr_par
j += 2
j = 0
while j < 256:
self.encipher()
self.bf_S1[j] = self.xl_par
self.bf_S1[j + 1] = self.xr_par
j += 2
j = 0
while j < 256:
self.encipher()
self.bf_S2[j] = self.xl_par
self.bf_S2[j + 1] = self.xr_par
j += 2
j = 0
while j < 256:
self.encipher()
self.bf_S3[j] = self.xl_par
self.bf_S3[j + 1] = self.xr_par
j += 2
def unescape(self,t):
r = ''
i = 0
l = len(t)
while i < l:
t1 = ord(t[i])
i+=1
t2 = ord(t[i])
if t1 < 58:
t1 -= 48
else:
if t1 > 96:
t1 -= 87
else:
t1 -= 55
if t2 < 58:
t2 -= 48
else:
if t2 > 96:
t2 -= 87
else:
t2 -= 55
r += chr(t1 * 16 + t2)
i+=1
return r
def escape(self,t):
r = ''
i = 0
l = len(t)
while i < l:
c = ord(t[i])
t1 = int(Math.floor(c / 16))
t2 = c % 16
if t1 < 10:
t1 += 48
else:
t1 += 55
if t2 < 10:
t2 += 48
else:
t2 += 55
r += chr(t1) + chr(t2)
i+=1
return r
def wordbyte0(self,w):
return int(Math.floor(Math.floor(Math.floor(w / 256) / 256) / 256) % 256)
def wordbyte1(self,w):
return int(Math.floor(Math.floor(w / 256) / 256) % 256)
def wordbyte2(self,w):
return int(Math.floor(w / 256) % 256)
def wordbyte3(self,w):
return w % 256
def xor(self,w1, w2):
r = w1 ^ w2
if r < 0:
r = 0xffffffff + 1 + r
return r
def Fbf_P(self):
return [0x243f6a88, 0x85a308d3, 0x13198a2e, 0x03707344, 0xa4093822, 0x299f31d0, 0x082efa98, 0xec4e6c89, 0x452821e6, 0x38d01377, 0xbe5466cf, 0x34e90c6c, 0xc0ac29b7, 0xc97c50dd, 0x3f84d5b5, 0xb5470917, 0x9216d5d9, 0x8979fb1b]
def Fbf_S0(self):
return [0xd1310ba6, 0x98dfb5ac, 0x2ffd72db, 0xd01adfb7, 0xb8e1afed, 0x6a267e96, 0xba7c9045, 0xf12c7f99, 0x24a19947, 0xb3916cf7, 0x0801f2e2, 0x858efc16, 0x636920d8, 0x71574e69, 0xa458fea3, 0xf4933d7e, 0x0d95748f, 0x728eb658, 0x718bcd58, 0x82154aee, 0x7b54a41d, 0xc25a59b5, 0x9c30d539, 0x2af26013, 0xc5d1b023, 0x286085f0, 0xca417918, 0xb8db38ef, 0x8e79dcb0, 0x603a180e, 0x6c9e0e8b, 0xb01e8a3e, 0xd71577c1, 0xbd314b27, 0x78af2fda, 0x55605c60, 0xe65525f3, 0xaa55ab94, 0x57489862, 0x63e81440, 0x55ca396a, 0x2aab10b6, 0xb4cc5c34, 0x1141e8ce, 0xa15486af, 0x7c72e993, 0xb3ee1411, 0x636fbc2a, 0x2ba9c55d, 0x741831f6, 0xce5c3e16, 0x9b87931e, 0xafd6ba33, 0x6c24cf5c, 0x7a325381, 0x28958677, 0x3b8f4898, 0x6b4bb9af, 0xc4bfe81b, 0x66282193, 0x61d809cc, 0xfb21a991, 0x487cac60, 0x5dec8032, 0xef845d5d, 0xe98575b1, 0xdc262302, 0xeb651b88, 0x23893e81, 0xd396acc5, 0x0f6d6ff3, 0x83f44239, 0x2e0b4482, 0xa4842004, 0x69c8f04a, 0x9e1f9b5e, 0x21c66842, 0xf6e96c9a, 0x670c9c61, 0xabd388f0, 0x6a51a0d2, 0xd8542f68, 0x960fa728, 0xab5133a3, 0x6eef0b6c, 0x137a3be4, 0xba3bf050, 0x7efb2a98, 0xa1f1651d, 0x39af0176, 0x66ca593e, 0x82430e88, 0x8cee8619, 0x456f9fb4, 0x7d84a5c3, 0x3b8b5ebe, 0xe06f75d8, 0x85c12073, 0x401a449f, 0x56c16aa6, 0x4ed3aa62, 0x363f7706, 0x1bfedf72, 0x429b023d, 0x37d0d724, 0xd00a1248, 0xdb0fead3, 0x49f1c09b, 0x075372c9, 0x80991b7b, 0x25d479d8, 0xf6e8def7, 0xe3fe501a, 0xb6794c3b, 0x976ce0bd, 0x04c006ba, 0xc1a94fb6, 0x409f60c4, 0x5e5c9ec2, 0x196a2463, 0x68fb6faf, 0x3e6c53b5, 0x1339b2eb, 0x3b52ec6f, 0x6dfc511f, 0x9b30952c, 0xcc814544, 0xaf5ebd09, 0xbee3d004, 0xde334afd, 0x660f2807, 0x192e4bb3, 0xc0cba857, 0x45c8740f, 0xd20b5f39, 0xb9d3fbdb, 0x5579c0bd, 0x1a60320a, 0xd6a100c6, 0x402c7279, 0x679f25fe, 0xfb1fa3cc, 0x8ea5e9f8, 0xdb3222f8, 0x3c7516df, 0xfd616b15, 0x2f501ec8, 0xad0552ab, 0x323db5fa, 0xfd238760, 0x53317b48, 0x3e00df82, 0x9e5c57bb, 0xca6f8ca0, 0x1a87562e, 0xdf1769db, 0xd542a8f6, 0x287effc3, 0xac6732c6, 0x8c4f5573, 0x695b27b0, 0xbbca58c8, 0xe1ffa35d, 0xb8f011a0, 0x10fa3d98, 0xfd2183b8, 0x4afcb56c, 0x2dd1d35b, 0x9a53e479, 0xb6f84565, 0xd28e49bc, 0x4bfb9790, 0xe1ddf2da, 0xa4cb7e33, 0x62fb1341, 0xcee4c6e8, 0xef20cada, 0x36774c01, 0xd07e9efe, 0x2bf11fb4, 0x95dbda4d, 0xae909198, 0xeaad8e71, 0x6b93d5a0, 0xd08ed1d0, 0xafc725e0, 0x8e3c5b2f, 0x8e7594b7, 0x8ff6e2fb, 0xf2122b64, 0x8888b812, 0x900df01c, 0x4fad5ea0, 0x688fc31c, 0xd1cff191, 0xb3a8c1ad, 0x2f2f2218, 0xbe0e1777, 0xea752dfe, 0x8b021fa1, 0xe5a0cc0f, 0xb56f74e8, 0x18acf3d6, 0xce89e299, 0xb4a84fe0, 0xfd13e0b7, 0x7cc43b81, 0xd2ada8d9, 0x165fa266, 0x80957705, 0x93cc7314, 0x211a1477, 0xe6ad2065, 0x77b5fa86, 0xc75442f5, 0xfb9d35cf, 0xebcdaf0c, 0x7b3e89a0, 0xd6411bd3, 0xae1e7e49, 0x00250e2d, 0x2071b35e, 0x226800bb, 0x57b8e0af, 0x2464369b, 0xf009b91e, 0x5563911d, 0x59dfa6aa, 0x78c14389, 0xd95a537f, 0x207d5ba2, 0x02e5b9c5, 0x83260376, 0x6295cfa9, 0x11c81968, 0x4e734a41, 0xb3472dca, 0x7b14a94a, 0x1b510052, 0x9a532915, 0xd60f573f, 0xbc9bc6e4, 0x2b60a476, 0x81e67400, 0x08ba6fb5, 0x571be91f, 0xf296ec6b, 0x2a0dd915, 0xb6636521, 0xe7b9f9b6, 0xff34052e, 0xc5855664, 0x53b02d5d, 0xa99f8fa1, 0x08ba4799, 0x6e85076a]
def Fbf_S1(self):
return [0x4b7a70e9, 0xb5b32944, 0xdb75092e, 0xc4192623, 0xad6ea6b0, 0x49a7df7d, 0x9cee60b8, 0x8fedb266, 0xecaa8c71, 0x699a17ff, 0x5664526c, 0xc2b19ee1, 0x193602a5, 0x75094c29, 0xa0591340, 0xe4183a3e, 0x3f54989a, 0x5b429d65, 0x6b8fe4d6, 0x99f73fd6, 0xa1d29c07, 0xefe830f5, 0x4d2d38e6, 0xf0255dc1, 0x4cdd2086, 0x8470eb26, 0x6382e9c6, 0x021ecc5e, 0x09686b3f, 0x3ebaefc9, 0x3c971814, 0x6b6a70a1, 0x687f3584, 0x52a0e286, 0xb79c5305, 0xaa500737, 0x3e07841c, 0x7fdeae5c, 0x8e7d44ec, 0x5716f2b8, 0xb03ada37, 0xf0500c0d, 0xf01c1f04, 0x0200b3ff, 0xae0cf51a, 0x3cb574b2, 0x25837a58, 0xdc0921bd, 0xd19113f9, 0x7ca92ff6, 0x94324773, 0x22f54701, 0x3ae5e581, 0x37c2dadc, 0xc8b57634, 0x9af3dda7, 0xa9446146, 0x0fd0030e, 0xecc8c73e, 0xa4751e41, 0xe238cd99, 0x3bea0e2f, 0x3280bba1, 0x183eb331, 0x4e548b38, 0x4f6db908, 0x6f420d03, 0xf60a04bf, 0x2cb81290, 0x24977c79, 0x5679b072, 0xbcaf89af, 0xde9a771f, 0xd9930810, 0xb38bae12, 0xdccf3f2e, 0x5512721f, 0x2e6b7124, 0x501adde6, 0x9f84cd87, 0x7a584718, 0x7408da17, 0xbc9f9abc, 0xe94b7d8c, 0xec7aec3a, 0xdb851dfa, 0x63094366, 0xc464c3d2, 0xef1c1847, 0x3215d908, 0xdd433b37, 0x24c2ba16, 0x12a14d43, 0x2a65c451, 0x50940002, 0x133ae4dd, 0x71dff89e, 0x10314e55, 0x81ac77d6, 0x5f11199b, 0x043556f1, 0xd7a3c76b, 0x3c11183b, 0x5924a509, 0xf28fe6ed, 0x97f1fbfa, 0x9ebabf2c, 0x1e153c6e, 0x86e34570, 0xeae96fb1, 0x860e5e0a, 0x5a3e2ab3, 0x771fe71c, 0x4e3d06fa, 0x2965dcb9, 0x99e71d0f, 0x803e89d6, 0x5266c825, 0x2e4cc978, 0x9c10b36a, 0xc6150eba, 0x94e2ea78, 0xa5fc3c53, 0x1e0a2df4, 0xf2f74ea7, 0x361d2b3d, 0x1939260f, 0x19c27960, 0x5223a708, 0xf71312b6, 0xebadfe6e, 0xeac31f66, 0xe3bc4595, 0xa67bc883, 0xb17f37d1, 0x018cff28, 0xc332ddef, 0xbe6c5aa5, 0x65582185, 0x68ab9802, 0xeecea50f, 0xdb2f953b, 0x2aef7dad, 0x5b6e2f84, 0x1521b628, 0x29076170, 0xecdd4775, 0x619f1510, 0x13cca830, 0xeb61bd96, 0x0334fe1e, 0xaa0363cf, 0xb5735c90, 0x4c70a239, 0xd59e9e0b, 0xcbaade14, 0xeecc86bc, 0x60622ca7, 0x9cab5cab, 0xb2f3846e, 0x648b1eaf, 0x19bdf0ca, 0xa02369b9, 0x655abb50, 0x40685a32, 0x3c2ab4b3, 0x319ee9d5, 0xc021b8f7, 0x9b540b19, 0x875fa099, 0x95f7997e, 0x623d7da8, 0xf837889a, 0x97e32d77, 0x11ed935f, 0x16681281, 0x0e358829, 0xc7e61fd6, 0x96dedfa1, 0x7858ba99, 0x57f584a5, 0x1b227263, 0x9b83c3ff, 0x1ac24696, 0xcdb30aeb, 0x532e3054, 0x8fd948e4, 0x6dbc3128, 0x58ebf2ef, 0x34c6ffea, 0xfe28ed61, 0xee7c3c73, 0x5d4a14d9, 0xe864b7e3, 0x42105d14, 0x203e13e0, 0x45eee2b6, 0xa3aaabea, 0xdb6c4f15, 0xfacb4fd0, 0xc742f442, 0xef6abbb5, 0x654f3b1d, 0x41cd2105, 0xd81e799e, 0x86854dc7, 0xe44b476a, 0x3d816250, 0xcf62a1f2, 0x5b8d2646, 0xfc8883a0, 0xc1c7b6a3, 0x7f1524c3, 0x69cb7492, 0x47848a0b, 0x5692b285, 0x095bbf00, 0xad19489d, 0x1462b174, 0x23820e00, 0x58428d2a, 0x0c55f5ea, 0x1dadf43e, 0x233f7061, 0x3372f092, 0x8d937e41, 0xd65fecf1, 0x6c223bdb, 0x7cde3759, 0xcbee7460, 0x4085f2a7, 0xce77326e, 0xa6078084, 0x19f8509e, 0xe8efd855, 0x61d99735, 0xa969a7aa, 0xc50c06c2, 0x5a04abfc, 0x800bcadc, 0x9e447a2e, 0xc3453484, 0xfdd56705, 0x0e1e9ec9, 0xdb73dbd3, 0x105588cd, 0x675fda79, 0xe3674340, 0xc5c43465, 0x713e38d8, 0x3d28f89e, 0xf16dff20, 0x153e21e7, 0x8fb03d4a, 0xe6e39f2b, 0xdb83adf7]
def Fbf_S2(self):
return [0xe93d5a68, 0x948140f7, 0xf64c261c, 0x94692934, 0x411520f7, 0x7602d4f7, 0xbcf46b2e, 0xd4a20068, 0xd4082471, 0x3320f46a, 0x43b7d4b7, 0x500061af, 0x1e39f62e, 0x97244546, 0x14214f74, 0xbf8b8840, 0x4d95fc1d, 0x96b591af, 0x70f4ddd3, 0x66a02f45, 0xbfbc09ec, 0x03bd9785, 0x7fac6dd0, 0x31cb8504, 0x96eb27b3, 0x55fd3941, 0xda2547e6, 0xabca0a9a, 0x28507825, 0x530429f4, 0x0a2c86da, 0xe9b66dfb, 0x68dc1462, 0xd7486900, 0x680ec0a4, 0x27a18dee, 0x4f3ffea2, 0xe887ad8c, 0xb58ce006, 0x7af4d6b6, 0xaace1e7c, 0xd3375fec, 0xce78a399, 0x406b2a42, 0x20fe9e35, 0xd9f385b9, 0xee39d7ab, 0x3b124e8b, 0x1dc9faf7, 0x4b6d1856, 0x26a36631, 0xeae397b2, 0x3a6efa74, 0xdd5b4332, 0x6841e7f7, 0xca7820fb, 0xfb0af54e, 0xd8feb397, 0x454056ac, 0xba489527, 0x55533a3a, 0x20838d87, 0xfe6ba9b7, 0xd096954b, 0x55a867bc, 0xa1159a58, 0xcca92963, 0x99e1db33, 0xa62a4a56, 0x3f3125f9, 0x5ef47e1c, 0x9029317c, 0xfdf8e802, 0x04272f70, 0x80bb155c, 0x05282ce3, 0x95c11548, 0xe4c66d22, 0x48c1133f, 0xc70f86dc, 0x07f9c9ee, 0x41041f0f, 0x404779a4, 0x5d886e17, 0x325f51eb, 0xd59bc0d1, 0xf2bcc18f, 0x41113564, 0x257b7834, 0x602a9c60, 0xdff8e8a3, 0x1f636c1b, 0x0e12b4c2, 0x02e1329e, 0xaf664fd1, 0xcad18115, 0x6b2395e0, 0x333e92e1, 0x3b240b62, 0xeebeb922, 0x85b2a20e, 0xe6ba0d99, 0xde720c8c, 0x2da2f728, 0xd0127845, 0x95b794fd, 0x647d0862, 0xe7ccf5f0, 0x5449a36f, 0x877d48fa, 0xc39dfd27, 0xf33e8d1e, 0x0a476341, 0x992eff74, 0x3a6f6eab, 0xf4f8fd37, 0xa812dc60, 0xa1ebddf8, 0x991be14c, 0xdb6e6b0d, 0xc67b5510, 0x6d672c37, 0x2765d43b, 0xdcd0e804, 0xf1290dc7, 0xcc00ffa3, 0xb5390f92, 0x690fed0b, 0x667b9ffb, 0xcedb7d9c, 0xa091cf0b, 0xd9155ea3, 0xbb132f88, 0x515bad24, 0x7b9479bf, 0x763bd6eb, 0x37392eb3, 0xcc115979, 0x8026e297, 0xf42e312d, 0x6842ada7, 0xc66a2b3b, 0x12754ccc, 0x782ef11c, 0x6a124237, 0xb79251e7, 0x06a1bbe6, 0x4bfb6350, 0x1a6b1018, 0x11caedfa, 0x3d25bdd8, 0xe2e1c3c9, 0x44421659, 0x0a121386, 0xd90cec6e, 0xd5abea2a, 0x64af674e, 0xda86a85f, 0xbebfe988, 0x64e4c3fe, 0x9dbc8057, 0xf0f7c086, 0x60787bf8, 0x6003604d, 0xd1fd8346, 0xf6381fb0, 0x7745ae04, 0xd736fccc, 0x83426b33, 0xf01eab71, 0xb0804187, 0x3c005e5f, 0x77a057be, 0xbde8ae24, 0x55464299, 0xbf582e61, 0x4e58f48f, 0xf2ddfda2, 0xf474ef38, 0x8789bdc2, 0x5366f9c3, 0xc8b38e74, 0xb475f255, 0x46fcd9b9, 0x7aeb2661, 0x8b1ddf84, 0x846a0e79, 0x915f95e2, 0x466e598e, 0x20b45770, 0x8cd55591, 0xc902de4c, 0xb90bace1, 0xbb8205d0, 0x11a86248, 0x7574a99e, 0xb77f19b6, 0xe0a9dc09, 0x662d09a1, 0xc4324633, 0xe85a1f02, 0x09f0be8c, 0x4a99a025, 0x1d6efe10, 0x1ab93d1d, 0x0ba5a4df, 0xa186f20f, 0x2868f169, 0xdcb7da83, 0x573906fe, 0xa1e2ce9b, 0x4fcd7f52, 0x50115e01, 0xa70683fa, 0xa002b5c4, 0x0de6d027, 0x9af88c27, 0x773f8641, 0xc3604c06, 0x61a806b5, 0xf0177a28, 0xc0f586e0, 0x006058aa, 0x30dc7d62, 0x11e69ed7, 0x2338ea63, 0x53c2dd94, 0xc2c21634, 0xbbcbee56, 0x90bcb6de, 0xebfc7da1, 0xce591d76, 0x6f05e409, 0x4b7c0188, 0x39720a3d, 0x7c927c24, 0x86e3725f, 0x724d9db9, 0x1ac15bb4, 0xd39eb8fc, 0xed545578, 0x08fca5b5, 0xd83d7cd3, 0x4dad0fc4, 0x1e50ef5e, 0xb161e6f8, 0xa28514d9, 0x6c51133c, 0x6fd5c7e7, 0x56e14ec4, 0x362abfce, 0xddc6c837, 0xd79a3234, 0x92638212, 0x670efa8e, 0x406000e0]
def Fbf_S3(self):
return [0x3a39ce37, 0xd3faf5cf, 0xabc27737, 0x5ac52d1b, 0x5cb0679e, 0x4fa33742, 0xd3822740, 0x99bc9bbe, 0xd5118e9d, 0xbf0f7315, 0xd62d1c7e, 0xc700c47b, 0xb78c1b6b, 0x21a19045, 0xb26eb1be, 0x6a366eb4, 0x5748ab2f, 0xbc946e79, 0xc6a376d2, 0x6549c2c8, 0x530ff8ee, 0x468dde7d, 0xd5730a1d, 0x4cd04dc6, 0x2939bbdb, 0xa9ba4650, 0xac9526e8, 0xbe5ee304, 0xa1fad5f0, 0x6a2d519a, 0x63ef8ce2, 0x9a86ee22, 0xc089c2b8, 0x43242ef6, 0xa51e03aa, 0x9cf2d0a4, 0x83c061ba, 0x9be96a4d, 0x8fe51550, 0xba645bd6, 0x2826a2f9, 0xa73a3ae1, 0x4ba99586, 0xef5562e9, 0xc72fefd3, 0xf752f7da, 0x3f046f69, 0x77fa0a59, 0x80e4a915, 0x87b08601, 0x9b09e6ad, 0x3b3ee593, 0xe990fd5a, 0x9e34d797, 0x2cf0b7d9, 0x022b8b51, 0x96d5ac3a, 0x017da67d, 0xd1cf3ed6, 0x7c7d2d28, 0x1f9f25cf, 0xadf2b89b, 0x5ad6b472, 0x5a88f54c, 0xe029ac71, 0xe019a5e6, 0x47b0acfd, 0xed93fa9b, 0xe8d3c48d, 0x283b57cc, 0xf8d56629, 0x79132e28, 0x785f0191, 0xed756055, 0xf7960e44, 0xe3d35e8c, 0x15056dd4, 0x88f46dba, 0x03a16125, 0x0564f0bd, 0xc3eb9e15, 0x3c9057a2, 0x97271aec, 0xa93a072a, 0x1b3f6d9b, 0x1e6321f5, 0xf59c66fb, 0x26dcf319, 0x7533d928, 0xb155fdf5, 0x03563482, 0x8aba3cbb, 0x28517711, 0xc20ad9f8, 0xabcc5167, 0xccad925f, 0x4de81751, 0x3830dc8e, 0x379d5862, 0x9320f991, 0xea7a90c2, 0xfb3e7bce, 0x5121ce64, 0x774fbe32, 0xa8b6e37e, 0xc3293d46, 0x48de5369, 0x6413e680, 0xa2ae0810, 0xdd6db224, 0x69852dfd, 0x09072166, 0xb39a460a, 0x6445c0dd, 0x586cdecf, 0x1c20c8ae, 0x5bbef7dd, 0x1b588d40, 0xccd2017f, 0x6bb4e3bb, 0xdda26a7e, 0x3a59ff45, 0x3e350a44, 0xbcb4cdd5, 0x72eacea8, 0xfa6484bb, 0x8d6612ae, 0xbf3c6f47, 0xd29be463, 0x542f5d9e, 0xaec2771b, 0xf64e6370, 0x740e0d8d, 0xe75b1357, 0xf8721671, 0xaf537d5d, 0x4040cb08, 0x4eb4e2cc, 0x34d2466a, 0x0115af84, 0xe1b00428, 0x95983a1d, 0x06b89fb4, 0xce6ea048, 0x6f3f3b82, 0x3520ab82, 0x011a1d4b, 0x277227f8, 0x611560b1, 0xe7933fdc, 0xbb3a792b, 0x344525bd, 0xa08839e1, 0x51ce794b, 0x2f32c9b7, 0xa01fbac9, 0xe01cc87e, 0xbcc7d1f6, 0xcf0111c3, 0xa1e8aac7, 0x1a908749, 0xd44fbd9a, 0xd0dadecb, 0xd50ada38, 0x0339c32a, 0xc6913667, 0x8df9317c, 0xe0b12b4f, 0xf79e59b7, 0x43f5bb3a, 0xf2d519ff, 0x27d9459c, 0xbf97222c, 0x15e6fc2a, 0x0f91fc71, 0x9b941525, 0xfae59361, 0xceb69ceb, 0xc2a86459, 0x12baa8d1, 0xb6c1075e, 0xe3056a0c, 0x10d25065, 0xcb03a442, 0xe0ec6e0e, 0x1698db3b, 0x4c98a0be, 0x3278e964, 0x9f1f9532, 0xe0d392df, 0xd3a0342b, 0x8971f21e, 0x1b0a7441, 0x4ba3348c, 0xc5be7120, 0xc37632d8, 0xdf359f8d, 0x9b992f2e, 0xe60b6f47, 0x0fe3f11d, 0xe54cda54, 0x1edad891, 0xce6279cf, 0xcd3e7e6f, 0x1618b166, 0xfd2c1d05, 0x848fd2c5, 0xf6fb2299, 0xf523f357, 0xa6327623, 0x93a83531, 0x56cccd02, 0xacf08162, 0x5a75ebb5, 0x6e163697, 0x88d273cc, 0xde966292, 0x81b949d0, 0x4c50901b, 0x71c65614, 0xe6c6c7bd, 0x327a140a, 0x45e1d006, 0xc3f27b9a, 0xc9aa53fd, 0x62a80f00, 0xbb25bfe2, 0x35bdd2f6, 0x71126905, 0xb2040222, 0xb6cbcf7c, 0xcd769c2b, 0x53113ec0, 0x1640e3d3, 0x38abbd60, 0x2547adf0, 0xba38209c, 0xf746ce76, 0x77afa1c5, 0x20756060, 0x85cbfe4e, 0x8ae88dd8, 0x7aaaf9b0, 0x4cf9aa7e, 0x1948c25c, 0x02fb8a8c, 0x01c36ae4, 0xd6ebe1f9, 0x90d4f869, 0xa65cdea0, 0x3f09252d, 0xc208e69f, 0xb74e6132, 0xce77e25b, 0x578fdfe3, 0x3ac372e6]
def encrypt(self,t):
t = self.escape(t)
i = 0
l = len(t) % 16
while i < l:
t += '0'
i+=1
r = ''
i = 0
l = len(t)
while i < l:
self.xr_par = self.wordunescape(t[i:i+8])
self.xl_par = self.wordunescape(t[i+8:i+16])
self.encipher()
r += self.wordescape(self.xr_par) + self.wordescape(self.xl_par)
i += 16
return r
def decrypt(self,t):
i = 0
l = len(t) % 16
while i < l:
t += '0'
i+=1
r = ''
i = 0
l = len(t)
while i < l:
self.xr_par = self.wordunescape(t[i:i+8])
self.xl_par = self.wordunescape(t[i+8:i+16])
self.decipher()
r += self.wordescape(self.xr_par) + self.wordescape(self.xl_par)
i += 16
return self.unescape(r).replace('\x00', '')
def wordescape(self,w):
r = ''
m = [self.wordbyte0(w), self.wordbyte1(w), self.wordbyte2(w), self.wordbyte3(w)]
i = 3
while i is not -1:
t1 = int(Math.floor(m[i] / 16))
t2 = m[i] % 16
if t1 < 10:
t1 += 48
else:
t1 += 55
if t2 < 10:
t2 += 48
else:
t2 += 55
r += chr(t1) + chr(t2)
i-=1
return r
def wordunescape(self,t):
r = 0
i = 6
while i is not -2:
t1 = ord(t[i])
t2 = ord(t[i+1])
if t1 < 58:
t1 -= 48
else:
t1 -= 55
if t2 < 58:
t2 -= 48
else:
t2 -= 55
r = r * 256 + t1 * 16 + t2
i -= 2
return r
def round(self, a, b, n):
t = self
return t.xor(a, t.xor(t.xor(t.bf_S0[t.wordbyte0(b)] + t.bf_S1[t.wordbyte1(b)], t.bf_S2[t.wordbyte2(b)]) + t.bf_S3[t.wordbyte3(b)], t.bf_P[n]))
def encipher(self):
t = self
Xl = t.xl_par
Xr = t.xr_par
Xl = t.xor(Xl, t.bf_P[0])
Xr = t.round(Xr, Xl, 1)
Xl = t.round(Xl, Xr, 2)
Xr = t.round(Xr, Xl, 3)
Xl = t.round(Xl, Xr, 4)
Xr = t.round(Xr, Xl, 5)
Xl = t.round(Xl, Xr, 6)
Xr = t.round(Xr, Xl, 7)
Xl = t.round(Xl, Xr, 8)
Xr = t.round(Xr, Xl, 9)
Xl = t.round(Xl, Xr, 10)
Xr = t.round(Xr, Xl, 11)
Xl = t.round(Xl, Xr, 12)
Xr = t.round(Xr, Xl, 13)
Xl = t.round(Xl, Xr, 14)
Xr = t.round(Xr, Xl, 15)
Xl = t.round(Xl, Xr, 16)
Xr = t.xor(Xr, t.bf_P[17])
t.xl_par = Xr
t.xr_par = Xl
def decipher(self):
t = self
Xl = t.xl_par
Xr = t.xr_par
Xl = t.xor(Xl, t.bf_P[17])
Xr = t.round(Xr, Xl, 16)
Xl = t.round(Xl, Xr, 15)
Xr = t.round(Xr, Xl, 14)
Xl = t.round(Xl, Xr, 13)
Xr = t.round(Xr, Xl, 12)
Xl = t.round(Xl, Xr, 11)
Xr = t.round(Xr, Xl, 10)
Xl = t.round(Xl, Xr, 9)
Xr = t.round(Xr, Xl, 8)
Xl = t.round(Xl, Xr, 7)
Xr = t.round(Xr, Xl, 6)
Xl = t.round(Xl, Xr, 5)
Xr = t.round(Xr, Xl, 4)
Xl = t.round(Xl, Xr, 3)
Xr = t.round(Xr, Xl, 2)
Xl = t.round(Xl, Xr, 1)
Xr = t.xor(Xr, t.bf_P[0])
t.xl_par = Xr
t.xr_par = Xl
| 60.224026 | 3,083 | 0.721441 |
import math as Math
class blowfish:
def __init__(self,k):
if len(k) is 0:
raise '0 length key'
self.bf_P = self.Fbf_P()
self.bf_S0 = self.Fbf_S0()
self.bf_S1 = self.Fbf_S1()
self.bf_S2 = self.Fbf_S2()
self.bf_S3 = self.Fbf_S3()
self.key = k
j = 0
i = 0
while i < 18:
d = ((ord(self.key[j % len(self.key)]) * 256 + ord(self.key[(j + 1) % len(self.key)])) * 256 + ord(self.key[(j + 2) % len(self.key)])) * 256 + ord(self.key[(j + 3) % len(self.key)])
self.bf_P[i] = self.xor(self.bf_P[i], d)
j = (j + 4) % len(self.key)
i+=1
self.key = self.escape(self.key)
self.xl_par = 0x00000000
self.xr_par = 0x00000000
i = 0
while i < 18:
self.encipher()
self.bf_P[i] = self.xl_par
self.bf_P[i + 1] = self.xr_par
i += 2
j = 0
while j < 256:
self.encipher()
self.bf_S0[j] = self.xl_par
self.bf_S0[j + 1] = self.xr_par
j += 2
j = 0
while j < 256:
self.encipher()
self.bf_S1[j] = self.xl_par
self.bf_S1[j + 1] = self.xr_par
j += 2
j = 0
while j < 256:
self.encipher()
self.bf_S2[j] = self.xl_par
self.bf_S2[j + 1] = self.xr_par
j += 2
j = 0
while j < 256:
self.encipher()
self.bf_S3[j] = self.xl_par
self.bf_S3[j + 1] = self.xr_par
j += 2
def unescape(self,t):
r = ''
i = 0
l = len(t)
while i < l:
t1 = ord(t[i])
i+=1
t2 = ord(t[i])
if t1 < 58:
t1 -= 48
else:
if t1 > 96:
t1 -= 87
else:
t1 -= 55
if t2 < 58:
t2 -= 48
else:
if t2 > 96:
t2 -= 87
else:
t2 -= 55
r += chr(t1 * 16 + t2)
i+=1
return r
def escape(self,t):
r = ''
i = 0
l = len(t)
while i < l:
c = ord(t[i])
t1 = int(Math.floor(c / 16))
t2 = c % 16
if t1 < 10:
t1 += 48
else:
t1 += 55
if t2 < 10:
t2 += 48
else:
t2 += 55
r += chr(t1) + chr(t2)
i+=1
return r
def wordbyte0(self,w):
return int(Math.floor(Math.floor(Math.floor(w / 256) / 256) / 256) % 256)
def wordbyte1(self,w):
return int(Math.floor(Math.floor(w / 256) / 256) % 256)
def wordbyte2(self,w):
return int(Math.floor(w / 256) % 256)
def wordbyte3(self,w):
return w % 256
def xor(self,w1, w2):
r = w1 ^ w2
if r < 0:
r = 0xffffffff + 1 + r
return r
def Fbf_P(self):
return [0x243f6a88, 0x85a308d3, 0x13198a2e, 0x03707344, 0xa4093822, 0x299f31d0, 0x082efa98, 0xec4e6c89, 0x452821e6, 0x38d01377, 0xbe5466cf, 0x34e90c6c, 0xc0ac29b7, 0xc97c50dd, 0x3f84d5b5, 0xb5470917, 0x9216d5d9, 0x8979fb1b]
def Fbf_S0(self):
return [0xd1310ba6, 0x98dfb5ac, 0x2ffd72db, 0xd01adfb7, 0xb8e1afed, 0x6a267e96, 0xba7c9045, 0xf12c7f99, 0x24a19947, 0xb3916cf7, 0x0801f2e2, 0x858efc16, 0x636920d8, 0x71574e69, 0xa458fea3, 0xf4933d7e, 0x0d95748f, 0x728eb658, 0x718bcd58, 0x82154aee, 0x7b54a41d, 0xc25a59b5, 0x9c30d539, 0x2af26013, 0xc5d1b023, 0x286085f0, 0xca417918, 0xb8db38ef, 0x8e79dcb0, 0x603a180e, 0x6c9e0e8b, 0xb01e8a3e, 0xd71577c1, 0xbd314b27, 0x78af2fda, 0x55605c60, 0xe65525f3, 0xaa55ab94, 0x57489862, 0x63e81440, 0x55ca396a, 0x2aab10b6, 0xb4cc5c34, 0x1141e8ce, 0xa15486af, 0x7c72e993, 0xb3ee1411, 0x636fbc2a, 0x2ba9c55d, 0x741831f6, 0xce5c3e16, 0x9b87931e, 0xafd6ba33, 0x6c24cf5c, 0x7a325381, 0x28958677, 0x3b8f4898, 0x6b4bb9af, 0xc4bfe81b, 0x66282193, 0x61d809cc, 0xfb21a991, 0x487cac60, 0x5dec8032, 0xef845d5d, 0xe98575b1, 0xdc262302, 0xeb651b88, 0x23893e81, 0xd396acc5, 0x0f6d6ff3, 0x83f44239, 0x2e0b4482, 0xa4842004, 0x69c8f04a, 0x9e1f9b5e, 0x21c66842, 0xf6e96c9a, 0x670c9c61, 0xabd388f0, 0x6a51a0d2, 0xd8542f68, 0x960fa728, 0xab5133a3, 0x6eef0b6c, 0x137a3be4, 0xba3bf050, 0x7efb2a98, 0xa1f1651d, 0x39af0176, 0x66ca593e, 0x82430e88, 0x8cee8619, 0x456f9fb4, 0x7d84a5c3, 0x3b8b5ebe, 0xe06f75d8, 0x85c12073, 0x401a449f, 0x56c16aa6, 0x4ed3aa62, 0x363f7706, 0x1bfedf72, 0x429b023d, 0x37d0d724, 0xd00a1248, 0xdb0fead3, 0x49f1c09b, 0x075372c9, 0x80991b7b, 0x25d479d8, 0xf6e8def7, 0xe3fe501a, 0xb6794c3b, 0x976ce0bd, 0x04c006ba, 0xc1a94fb6, 0x409f60c4, 0x5e5c9ec2, 0x196a2463, 0x68fb6faf, 0x3e6c53b5, 0x1339b2eb, 0x3b52ec6f, 0x6dfc511f, 0x9b30952c, 0xcc814544, 0xaf5ebd09, 0xbee3d004, 0xde334afd, 0x660f2807, 0x192e4bb3, 0xc0cba857, 0x45c8740f, 0xd20b5f39, 0xb9d3fbdb, 0x5579c0bd, 0x1a60320a, 0xd6a100c6, 0x402c7279, 0x679f25fe, 0xfb1fa3cc, 0x8ea5e9f8, 0xdb3222f8, 0x3c7516df, 0xfd616b15, 0x2f501ec8, 0xad0552ab, 0x323db5fa, 0xfd238760, 0x53317b48, 0x3e00df82, 0x9e5c57bb, 0xca6f8ca0, 0x1a87562e, 0xdf1769db, 0xd542a8f6, 0x287effc3, 0xac6732c6, 0x8c4f5573, 0x695b27b0, 0xbbca58c8, 0xe1ffa35d, 0xb8f011a0, 0x10fa3d98, 0xfd2183b8, 0x4afcb56c, 0x2dd1d35b, 0x9a53e479, 0xb6f84565, 0xd28e49bc, 0x4bfb9790, 0xe1ddf2da, 0xa4cb7e33, 0x62fb1341, 0xcee4c6e8, 0xef20cada, 0x36774c01, 0xd07e9efe, 0x2bf11fb4, 0x95dbda4d, 0xae909198, 0xeaad8e71, 0x6b93d5a0, 0xd08ed1d0, 0xafc725e0, 0x8e3c5b2f, 0x8e7594b7, 0x8ff6e2fb, 0xf2122b64, 0x8888b812, 0x900df01c, 0x4fad5ea0, 0x688fc31c, 0xd1cff191, 0xb3a8c1ad, 0x2f2f2218, 0xbe0e1777, 0xea752dfe, 0x8b021fa1, 0xe5a0cc0f, 0xb56f74e8, 0x18acf3d6, 0xce89e299, 0xb4a84fe0, 0xfd13e0b7, 0x7cc43b81, 0xd2ada8d9, 0x165fa266, 0x80957705, 0x93cc7314, 0x211a1477, 0xe6ad2065, 0x77b5fa86, 0xc75442f5, 0xfb9d35cf, 0xebcdaf0c, 0x7b3e89a0, 0xd6411bd3, 0xae1e7e49, 0x00250e2d, 0x2071b35e, 0x226800bb, 0x57b8e0af, 0x2464369b, 0xf009b91e, 0x5563911d, 0x59dfa6aa, 0x78c14389, 0xd95a537f, 0x207d5ba2, 0x02e5b9c5, 0x83260376, 0x6295cfa9, 0x11c81968, 0x4e734a41, 0xb3472dca, 0x7b14a94a, 0x1b510052, 0x9a532915, 0xd60f573f, 0xbc9bc6e4, 0x2b60a476, 0x81e67400, 0x08ba6fb5, 0x571be91f, 0xf296ec6b, 0x2a0dd915, 0xb6636521, 0xe7b9f9b6, 0xff34052e, 0xc5855664, 0x53b02d5d, 0xa99f8fa1, 0x08ba4799, 0x6e85076a]
def Fbf_S1(self):
return [0x4b7a70e9, 0xb5b32944, 0xdb75092e, 0xc4192623, 0xad6ea6b0, 0x49a7df7d, 0x9cee60b8, 0x8fedb266, 0xecaa8c71, 0x699a17ff, 0x5664526c, 0xc2b19ee1, 0x193602a5, 0x75094c29, 0xa0591340, 0xe4183a3e, 0x3f54989a, 0x5b429d65, 0x6b8fe4d6, 0x99f73fd6, 0xa1d29c07, 0xefe830f5, 0x4d2d38e6, 0xf0255dc1, 0x4cdd2086, 0x8470eb26, 0x6382e9c6, 0x021ecc5e, 0x09686b3f, 0x3ebaefc9, 0x3c971814, 0x6b6a70a1, 0x687f3584, 0x52a0e286, 0xb79c5305, 0xaa500737, 0x3e07841c, 0x7fdeae5c, 0x8e7d44ec, 0x5716f2b8, 0xb03ada37, 0xf0500c0d, 0xf01c1f04, 0x0200b3ff, 0xae0cf51a, 0x3cb574b2, 0x25837a58, 0xdc0921bd, 0xd19113f9, 0x7ca92ff6, 0x94324773, 0x22f54701, 0x3ae5e581, 0x37c2dadc, 0xc8b57634, 0x9af3dda7, 0xa9446146, 0x0fd0030e, 0xecc8c73e, 0xa4751e41, 0xe238cd99, 0x3bea0e2f, 0x3280bba1, 0x183eb331, 0x4e548b38, 0x4f6db908, 0x6f420d03, 0xf60a04bf, 0x2cb81290, 0x24977c79, 0x5679b072, 0xbcaf89af, 0xde9a771f, 0xd9930810, 0xb38bae12, 0xdccf3f2e, 0x5512721f, 0x2e6b7124, 0x501adde6, 0x9f84cd87, 0x7a584718, 0x7408da17, 0xbc9f9abc, 0xe94b7d8c, 0xec7aec3a, 0xdb851dfa, 0x63094366, 0xc464c3d2, 0xef1c1847, 0x3215d908, 0xdd433b37, 0x24c2ba16, 0x12a14d43, 0x2a65c451, 0x50940002, 0x133ae4dd, 0x71dff89e, 0x10314e55, 0x81ac77d6, 0x5f11199b, 0x043556f1, 0xd7a3c76b, 0x3c11183b, 0x5924a509, 0xf28fe6ed, 0x97f1fbfa, 0x9ebabf2c, 0x1e153c6e, 0x86e34570, 0xeae96fb1, 0x860e5e0a, 0x5a3e2ab3, 0x771fe71c, 0x4e3d06fa, 0x2965dcb9, 0x99e71d0f, 0x803e89d6, 0x5266c825, 0x2e4cc978, 0x9c10b36a, 0xc6150eba, 0x94e2ea78, 0xa5fc3c53, 0x1e0a2df4, 0xf2f74ea7, 0x361d2b3d, 0x1939260f, 0x19c27960, 0x5223a708, 0xf71312b6, 0xebadfe6e, 0xeac31f66, 0xe3bc4595, 0xa67bc883, 0xb17f37d1, 0x018cff28, 0xc332ddef, 0xbe6c5aa5, 0x65582185, 0x68ab9802, 0xeecea50f, 0xdb2f953b, 0x2aef7dad, 0x5b6e2f84, 0x1521b628, 0x29076170, 0xecdd4775, 0x619f1510, 0x13cca830, 0xeb61bd96, 0x0334fe1e, 0xaa0363cf, 0xb5735c90, 0x4c70a239, 0xd59e9e0b, 0xcbaade14, 0xeecc86bc, 0x60622ca7, 0x9cab5cab, 0xb2f3846e, 0x648b1eaf, 0x19bdf0ca, 0xa02369b9, 0x655abb50, 0x40685a32, 0x3c2ab4b3, 0x319ee9d5, 0xc021b8f7, 0x9b540b19, 0x875fa099, 0x95f7997e, 0x623d7da8, 0xf837889a, 0x97e32d77, 0x11ed935f, 0x16681281, 0x0e358829, 0xc7e61fd6, 0x96dedfa1, 0x7858ba99, 0x57f584a5, 0x1b227263, 0x9b83c3ff, 0x1ac24696, 0xcdb30aeb, 0x532e3054, 0x8fd948e4, 0x6dbc3128, 0x58ebf2ef, 0x34c6ffea, 0xfe28ed61, 0xee7c3c73, 0x5d4a14d9, 0xe864b7e3, 0x42105d14, 0x203e13e0, 0x45eee2b6, 0xa3aaabea, 0xdb6c4f15, 0xfacb4fd0, 0xc742f442, 0xef6abbb5, 0x654f3b1d, 0x41cd2105, 0xd81e799e, 0x86854dc7, 0xe44b476a, 0x3d816250, 0xcf62a1f2, 0x5b8d2646, 0xfc8883a0, 0xc1c7b6a3, 0x7f1524c3, 0x69cb7492, 0x47848a0b, 0x5692b285, 0x095bbf00, 0xad19489d, 0x1462b174, 0x23820e00, 0x58428d2a, 0x0c55f5ea, 0x1dadf43e, 0x233f7061, 0x3372f092, 0x8d937e41, 0xd65fecf1, 0x6c223bdb, 0x7cde3759, 0xcbee7460, 0x4085f2a7, 0xce77326e, 0xa6078084, 0x19f8509e, 0xe8efd855, 0x61d99735, 0xa969a7aa, 0xc50c06c2, 0x5a04abfc, 0x800bcadc, 0x9e447a2e, 0xc3453484, 0xfdd56705, 0x0e1e9ec9, 0xdb73dbd3, 0x105588cd, 0x675fda79, 0xe3674340, 0xc5c43465, 0x713e38d8, 0x3d28f89e, 0xf16dff20, 0x153e21e7, 0x8fb03d4a, 0xe6e39f2b, 0xdb83adf7]
def Fbf_S2(self):
return [0xe93d5a68, 0x948140f7, 0xf64c261c, 0x94692934, 0x411520f7, 0x7602d4f7, 0xbcf46b2e, 0xd4a20068, 0xd4082471, 0x3320f46a, 0x43b7d4b7, 0x500061af, 0x1e39f62e, 0x97244546, 0x14214f74, 0xbf8b8840, 0x4d95fc1d, 0x96b591af, 0x70f4ddd3, 0x66a02f45, 0xbfbc09ec, 0x03bd9785, 0x7fac6dd0, 0x31cb8504, 0x96eb27b3, 0x55fd3941, 0xda2547e6, 0xabca0a9a, 0x28507825, 0x530429f4, 0x0a2c86da, 0xe9b66dfb, 0x68dc1462, 0xd7486900, 0x680ec0a4, 0x27a18dee, 0x4f3ffea2, 0xe887ad8c, 0xb58ce006, 0x7af4d6b6, 0xaace1e7c, 0xd3375fec, 0xce78a399, 0x406b2a42, 0x20fe9e35, 0xd9f385b9, 0xee39d7ab, 0x3b124e8b, 0x1dc9faf7, 0x4b6d1856, 0x26a36631, 0xeae397b2, 0x3a6efa74, 0xdd5b4332, 0x6841e7f7, 0xca7820fb, 0xfb0af54e, 0xd8feb397, 0x454056ac, 0xba489527, 0x55533a3a, 0x20838d87, 0xfe6ba9b7, 0xd096954b, 0x55a867bc, 0xa1159a58, 0xcca92963, 0x99e1db33, 0xa62a4a56, 0x3f3125f9, 0x5ef47e1c, 0x9029317c, 0xfdf8e802, 0x04272f70, 0x80bb155c, 0x05282ce3, 0x95c11548, 0xe4c66d22, 0x48c1133f, 0xc70f86dc, 0x07f9c9ee, 0x41041f0f, 0x404779a4, 0x5d886e17, 0x325f51eb, 0xd59bc0d1, 0xf2bcc18f, 0x41113564, 0x257b7834, 0x602a9c60, 0xdff8e8a3, 0x1f636c1b, 0x0e12b4c2, 0x02e1329e, 0xaf664fd1, 0xcad18115, 0x6b2395e0, 0x333e92e1, 0x3b240b62, 0xeebeb922, 0x85b2a20e, 0xe6ba0d99, 0xde720c8c, 0x2da2f728, 0xd0127845, 0x95b794fd, 0x647d0862, 0xe7ccf5f0, 0x5449a36f, 0x877d48fa, 0xc39dfd27, 0xf33e8d1e, 0x0a476341, 0x992eff74, 0x3a6f6eab, 0xf4f8fd37, 0xa812dc60, 0xa1ebddf8, 0x991be14c, 0xdb6e6b0d, 0xc67b5510, 0x6d672c37, 0x2765d43b, 0xdcd0e804, 0xf1290dc7, 0xcc00ffa3, 0xb5390f92, 0x690fed0b, 0x667b9ffb, 0xcedb7d9c, 0xa091cf0b, 0xd9155ea3, 0xbb132f88, 0x515bad24, 0x7b9479bf, 0x763bd6eb, 0x37392eb3, 0xcc115979, 0x8026e297, 0xf42e312d, 0x6842ada7, 0xc66a2b3b, 0x12754ccc, 0x782ef11c, 0x6a124237, 0xb79251e7, 0x06a1bbe6, 0x4bfb6350, 0x1a6b1018, 0x11caedfa, 0x3d25bdd8, 0xe2e1c3c9, 0x44421659, 0x0a121386, 0xd90cec6e, 0xd5abea2a, 0x64af674e, 0xda86a85f, 0xbebfe988, 0x64e4c3fe, 0x9dbc8057, 0xf0f7c086, 0x60787bf8, 0x6003604d, 0xd1fd8346, 0xf6381fb0, 0x7745ae04, 0xd736fccc, 0x83426b33, 0xf01eab71, 0xb0804187, 0x3c005e5f, 0x77a057be, 0xbde8ae24, 0x55464299, 0xbf582e61, 0x4e58f48f, 0xf2ddfda2, 0xf474ef38, 0x8789bdc2, 0x5366f9c3, 0xc8b38e74, 0xb475f255, 0x46fcd9b9, 0x7aeb2661, 0x8b1ddf84, 0x846a0e79, 0x915f95e2, 0x466e598e, 0x20b45770, 0x8cd55591, 0xc902de4c, 0xb90bace1, 0xbb8205d0, 0x11a86248, 0x7574a99e, 0xb77f19b6, 0xe0a9dc09, 0x662d09a1, 0xc4324633, 0xe85a1f02, 0x09f0be8c, 0x4a99a025, 0x1d6efe10, 0x1ab93d1d, 0x0ba5a4df, 0xa186f20f, 0x2868f169, 0xdcb7da83, 0x573906fe, 0xa1e2ce9b, 0x4fcd7f52, 0x50115e01, 0xa70683fa, 0xa002b5c4, 0x0de6d027, 0x9af88c27, 0x773f8641, 0xc3604c06, 0x61a806b5, 0xf0177a28, 0xc0f586e0, 0x006058aa, 0x30dc7d62, 0x11e69ed7, 0x2338ea63, 0x53c2dd94, 0xc2c21634, 0xbbcbee56, 0x90bcb6de, 0xebfc7da1, 0xce591d76, 0x6f05e409, 0x4b7c0188, 0x39720a3d, 0x7c927c24, 0x86e3725f, 0x724d9db9, 0x1ac15bb4, 0xd39eb8fc, 0xed545578, 0x08fca5b5, 0xd83d7cd3, 0x4dad0fc4, 0x1e50ef5e, 0xb161e6f8, 0xa28514d9, 0x6c51133c, 0x6fd5c7e7, 0x56e14ec4, 0x362abfce, 0xddc6c837, 0xd79a3234, 0x92638212, 0x670efa8e, 0x406000e0]
def Fbf_S3(self):
return [0x3a39ce37, 0xd3faf5cf, 0xabc27737, 0x5ac52d1b, 0x5cb0679e, 0x4fa33742, 0xd3822740, 0x99bc9bbe, 0xd5118e9d, 0xbf0f7315, 0xd62d1c7e, 0xc700c47b, 0xb78c1b6b, 0x21a19045, 0xb26eb1be, 0x6a366eb4, 0x5748ab2f, 0xbc946e79, 0xc6a376d2, 0x6549c2c8, 0x530ff8ee, 0x468dde7d, 0xd5730a1d, 0x4cd04dc6, 0x2939bbdb, 0xa9ba4650, 0xac9526e8, 0xbe5ee304, 0xa1fad5f0, 0x6a2d519a, 0x63ef8ce2, 0x9a86ee22, 0xc089c2b8, 0x43242ef6, 0xa51e03aa, 0x9cf2d0a4, 0x83c061ba, 0x9be96a4d, 0x8fe51550, 0xba645bd6, 0x2826a2f9, 0xa73a3ae1, 0x4ba99586, 0xef5562e9, 0xc72fefd3, 0xf752f7da, 0x3f046f69, 0x77fa0a59, 0x80e4a915, 0x87b08601, 0x9b09e6ad, 0x3b3ee593, 0xe990fd5a, 0x9e34d797, 0x2cf0b7d9, 0x022b8b51, 0x96d5ac3a, 0x017da67d, 0xd1cf3ed6, 0x7c7d2d28, 0x1f9f25cf, 0xadf2b89b, 0x5ad6b472, 0x5a88f54c, 0xe029ac71, 0xe019a5e6, 0x47b0acfd, 0xed93fa9b, 0xe8d3c48d, 0x283b57cc, 0xf8d56629, 0x79132e28, 0x785f0191, 0xed756055, 0xf7960e44, 0xe3d35e8c, 0x15056dd4, 0x88f46dba, 0x03a16125, 0x0564f0bd, 0xc3eb9e15, 0x3c9057a2, 0x97271aec, 0xa93a072a, 0x1b3f6d9b, 0x1e6321f5, 0xf59c66fb, 0x26dcf319, 0x7533d928, 0xb155fdf5, 0x03563482, 0x8aba3cbb, 0x28517711, 0xc20ad9f8, 0xabcc5167, 0xccad925f, 0x4de81751, 0x3830dc8e, 0x379d5862, 0x9320f991, 0xea7a90c2, 0xfb3e7bce, 0x5121ce64, 0x774fbe32, 0xa8b6e37e, 0xc3293d46, 0x48de5369, 0x6413e680, 0xa2ae0810, 0xdd6db224, 0x69852dfd, 0x09072166, 0xb39a460a, 0x6445c0dd, 0x586cdecf, 0x1c20c8ae, 0x5bbef7dd, 0x1b588d40, 0xccd2017f, 0x6bb4e3bb, 0xdda26a7e, 0x3a59ff45, 0x3e350a44, 0xbcb4cdd5, 0x72eacea8, 0xfa6484bb, 0x8d6612ae, 0xbf3c6f47, 0xd29be463, 0x542f5d9e, 0xaec2771b, 0xf64e6370, 0x740e0d8d, 0xe75b1357, 0xf8721671, 0xaf537d5d, 0x4040cb08, 0x4eb4e2cc, 0x34d2466a, 0x0115af84, 0xe1b00428, 0x95983a1d, 0x06b89fb4, 0xce6ea048, 0x6f3f3b82, 0x3520ab82, 0x011a1d4b, 0x277227f8, 0x611560b1, 0xe7933fdc, 0xbb3a792b, 0x344525bd, 0xa08839e1, 0x51ce794b, 0x2f32c9b7, 0xa01fbac9, 0xe01cc87e, 0xbcc7d1f6, 0xcf0111c3, 0xa1e8aac7, 0x1a908749, 0xd44fbd9a, 0xd0dadecb, 0xd50ada38, 0x0339c32a, 0xc6913667, 0x8df9317c, 0xe0b12b4f, 0xf79e59b7, 0x43f5bb3a, 0xf2d519ff, 0x27d9459c, 0xbf97222c, 0x15e6fc2a, 0x0f91fc71, 0x9b941525, 0xfae59361, 0xceb69ceb, 0xc2a86459, 0x12baa8d1, 0xb6c1075e, 0xe3056a0c, 0x10d25065, 0xcb03a442, 0xe0ec6e0e, 0x1698db3b, 0x4c98a0be, 0x3278e964, 0x9f1f9532, 0xe0d392df, 0xd3a0342b, 0x8971f21e, 0x1b0a7441, 0x4ba3348c, 0xc5be7120, 0xc37632d8, 0xdf359f8d, 0x9b992f2e, 0xe60b6f47, 0x0fe3f11d, 0xe54cda54, 0x1edad891, 0xce6279cf, 0xcd3e7e6f, 0x1618b166, 0xfd2c1d05, 0x848fd2c5, 0xf6fb2299, 0xf523f357, 0xa6327623, 0x93a83531, 0x56cccd02, 0xacf08162, 0x5a75ebb5, 0x6e163697, 0x88d273cc, 0xde966292, 0x81b949d0, 0x4c50901b, 0x71c65614, 0xe6c6c7bd, 0x327a140a, 0x45e1d006, 0xc3f27b9a, 0xc9aa53fd, 0x62a80f00, 0xbb25bfe2, 0x35bdd2f6, 0x71126905, 0xb2040222, 0xb6cbcf7c, 0xcd769c2b, 0x53113ec0, 0x1640e3d3, 0x38abbd60, 0x2547adf0, 0xba38209c, 0xf746ce76, 0x77afa1c5, 0x20756060, 0x85cbfe4e, 0x8ae88dd8, 0x7aaaf9b0, 0x4cf9aa7e, 0x1948c25c, 0x02fb8a8c, 0x01c36ae4, 0xd6ebe1f9, 0x90d4f869, 0xa65cdea0, 0x3f09252d, 0xc208e69f, 0xb74e6132, 0xce77e25b, 0x578fdfe3, 0x3ac372e6]
def encrypt(self,t):
t = self.escape(t)
i = 0
l = len(t) % 16
while i < l:
t += '0'
i+=1
r = ''
i = 0
l = len(t)
while i < l:
self.xr_par = self.wordunescape(t[i:i+8])
self.xl_par = self.wordunescape(t[i+8:i+16])
self.encipher()
r += self.wordescape(self.xr_par) + self.wordescape(self.xl_par)
i += 16
return r
def decrypt(self,t):
i = 0
l = len(t) % 16
while i < l:
t += '0'
i+=1
r = ''
i = 0
l = len(t)
while i < l:
self.xr_par = self.wordunescape(t[i:i+8])
self.xl_par = self.wordunescape(t[i+8:i+16])
self.decipher()
r += self.wordescape(self.xr_par) + self.wordescape(self.xl_par)
i += 16
return self.unescape(r).replace('\x00', '')
def wordescape(self,w):
r = ''
m = [self.wordbyte0(w), self.wordbyte1(w), self.wordbyte2(w), self.wordbyte3(w)]
i = 3
while i is not -1:
t1 = int(Math.floor(m[i] / 16))
t2 = m[i] % 16
if t1 < 10:
t1 += 48
else:
t1 += 55
if t2 < 10:
t2 += 48
else:
t2 += 55
r += chr(t1) + chr(t2)
i-=1
return r
def wordunescape(self,t):
r = 0
i = 6
while i is not -2:
t1 = ord(t[i])
t2 = ord(t[i+1])
if t1 < 58:
t1 -= 48
else:
t1 -= 55
if t2 < 58:
t2 -= 48
else:
t2 -= 55
r = r * 256 + t1 * 16 + t2
i -= 2
return r
def round(self, a, b, n):
t = self
return t.xor(a, t.xor(t.xor(t.bf_S0[t.wordbyte0(b)] + t.bf_S1[t.wordbyte1(b)], t.bf_S2[t.wordbyte2(b)]) + t.bf_S3[t.wordbyte3(b)], t.bf_P[n]))
def encipher(self):
t = self
Xl = t.xl_par
Xr = t.xr_par
Xl = t.xor(Xl, t.bf_P[0])
Xr = t.round(Xr, Xl, 1)
Xl = t.round(Xl, Xr, 2)
Xr = t.round(Xr, Xl, 3)
Xl = t.round(Xl, Xr, 4)
Xr = t.round(Xr, Xl, 5)
Xl = t.round(Xl, Xr, 6)
Xr = t.round(Xr, Xl, 7)
Xl = t.round(Xl, Xr, 8)
Xr = t.round(Xr, Xl, 9)
Xl = t.round(Xl, Xr, 10)
Xr = t.round(Xr, Xl, 11)
Xl = t.round(Xl, Xr, 12)
Xr = t.round(Xr, Xl, 13)
Xl = t.round(Xl, Xr, 14)
Xr = t.round(Xr, Xl, 15)
Xl = t.round(Xl, Xr, 16)
Xr = t.xor(Xr, t.bf_P[17])
t.xl_par = Xr
t.xr_par = Xl
def decipher(self):
t = self
Xl = t.xl_par
Xr = t.xr_par
Xl = t.xor(Xl, t.bf_P[17])
Xr = t.round(Xr, Xl, 16)
Xl = t.round(Xl, Xr, 15)
Xr = t.round(Xr, Xl, 14)
Xl = t.round(Xl, Xr, 13)
Xr = t.round(Xr, Xl, 12)
Xl = t.round(Xl, Xr, 11)
Xr = t.round(Xr, Xl, 10)
Xl = t.round(Xl, Xr, 9)
Xr = t.round(Xr, Xl, 8)
Xl = t.round(Xl, Xr, 7)
Xr = t.round(Xr, Xl, 6)
Xl = t.round(Xl, Xr, 5)
Xr = t.round(Xr, Xl, 4)
Xl = t.round(Xl, Xr, 3)
Xr = t.round(Xr, Xl, 2)
Xl = t.round(Xl, Xr, 1)
Xr = t.xor(Xr, t.bf_P[0])
t.xl_par = Xr
t.xr_par = Xl
| true | true |
f72fd3f3fdf870d59380133842ea37b254c8a18a | 24,808 | py | Python | Scripts/simulation/restaurants/restaurant_commands.py | velocist/TS4CheatsInfo | b59ea7e5f4bd01d3b3bd7603843d525a9c179867 | [
"Apache-2.0"
] | null | null | null | Scripts/simulation/restaurants/restaurant_commands.py | velocist/TS4CheatsInfo | b59ea7e5f4bd01d3b3bd7603843d525a9c179867 | [
"Apache-2.0"
] | null | null | null | Scripts/simulation/restaurants/restaurant_commands.py | velocist/TS4CheatsInfo | b59ea7e5f4bd01d3b3bd7603843d525a9c179867 | [
"Apache-2.0"
] | null | null | null | # uncompyle6 version 3.7.4
# Python bytecode 3.7 (3394)
# Decompiled from: Python 3.7.9 (tags/v3.7.9:13c94747c7, Aug 17 2020, 18:58:18) [MSC v.1900 64 bit (AMD64)]
# Embedded file name: T:\InGame\Gameplay\Scripts\Server\restaurants\restaurant_commands.py
# Compiled at: 2018-08-28 03:56:41
# Size of source mod 2**32: 29007 bytes
from protocolbuffers import Restaurant_pb2
from event_testing import test_events
from google.protobuf import text_format
from restaurants import restaurant_utils
from restaurants.chefs_choice import ChefsChoice
from restaurants.restaurant_diner_situation import DinerSubSituationState, RestaurantDinerSubSituation, RestaurantDinerBackGroundSituation
from restaurants.restaurant_order import OrderStatus, OrderRecommendationState, GroupOrder
from restaurants.restaurant_tuning import RestaurantTuning, RestaurantIngredientQualityType, get_restaurant_zone_director
from server_commands.argument_helpers import TunableInstanceParam, OptionalTargetParam, get_optional_target
from sims import sim
from sims4.protocol_buffer_utils import has_field
import services, sims4.commands
@sims4.commands.Command('restaurant.order_food', command_type=(sims4.commands.CommandType.Live))
def order_food(recipe_type: TunableInstanceParam(sims4.resources.Types.RECIPE), opt_sim: OptionalTargetParam=None, _connection=None):
if recipe_type is None:
sims4.commands.output('Recipe is None', _connection)
sims4.commands.automation_output('RestaurantOrderFood; Status:Failed', _connection)
return False
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist".format(opt_sim), _connection)
sims4.commands.automation_output('RestaurantOrderFood; Status:Failed', _connection)
return False
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Current venue is not restaurant', _connection)
sims4.commands.automation_output('RestaurantOrderFood; Status:Failed', _connection)
return False
zone_director.make_one_order(sim, recipe_type)
groups = zone_director.get_dining_groups_by_sim(sim)
if groups is None:
sims4.commands.output('Sim {} is not in dining group'.format(opt_sim), _connection)
sims4.commands.automation_output('RestaurantOrderFood; Status:Failed', _connection)
group = groups.pop()
group.hold_ordered_cost(recipe_type.restaurant_base_price)
sims4.commands.automation_output('RestaurantOrderFood; Status:Success', _connection)
return True
@sims4.commands.Command('restaurant.show_menu', command_type=(sims4.commands.CommandType.Live))
def show_menu(opt_sim: OptionalTargetParam=None, _connection=None):
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist".format(opt_sim), _connection)
return False
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Current venue is not restaurant', _connection)
return False
zone_director.show_menu(sim)
@sims4.commands.Command('restaurant.show_menu_for_chef', command_type=(sims4.commands.CommandType.Live))
def show_menu_for_chef(opt_sim: OptionalTargetParam=None, chef_sim: OptionalTargetParam=None, _connection=None):
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist".format(opt_sim), _connection)
return False
chef_sim = get_optional_target(chef_sim, _connection)
if chef_sim is None:
sims4.commands.output("Chef {} doesn't exist.".format(chef_sim), _connection)
return False
chef_situation = restaurant_utils.get_chef_situation(chef_sim=chef_sim)
if chef_situation is None:
sims4.commands.output("Couldn't find a Chef Situation in this zone.")
return False
chef_situation.show_menu(sim)
@sims4.commands.Command('restaurant.show_recommendation_menu_for_sim', command_type=(sims4.commands.CommandType.Live))
def show_recommendation_menu_for_sim(opt_sim: OptionalTargetParam=None, owner_sim: OptionalTargetParam=None, _connection=None):
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist".format(opt_sim), _connection)
return False
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Current venue is not restaurant', _connection)
return False
zone_director.show_menu(sim, is_recommendation=True)
@sims4.commands.Command('restaurant.claim_table', command_type=(sims4.commands.CommandType.Live))
def claim_table(opt_sim: OptionalTargetParam=None, opt_table: OptionalTargetParam=None, _connection=None):
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist".format(opt_sim), _connection)
return False
table_to_claim = get_optional_target(opt_table, _connection)
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Current venue is not restaurant', _connection)
return False
zone_director.claim_table(sim, table_to_claim)
@sims4.commands.Command('restaurant.order_for_table', command_type=(sims4.commands.CommandType.Live))
def order_for_table(sim_orders: str, _connection=None):
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Current venue is not restaurant', _connection)
return False
proto = Restaurant_pb2.SimOrders()
text_format.Merge(sim_orders, proto)
orders = [(order.sim_id, order.recipe_id) for order in proto.sim_orders]
sim = services.object_manager().get(orders[0][0])
if sim is None:
sims4.commands.output("Trying to order for a Sim that isn't on the lot", _connection)
return False
zone_director.order_for_table(orders)
groups = zone_director.get_dining_groups_by_sim(sim)
group = groups.pop()
group.hold_ordered_cost(proto.meal_cost if has_field(proto, 'meal_cost') else 0)
return True
@sims4.commands.Command('restaurant.comp_drinks_for_group', command_type=(sims4.commands.CommandType.Live))
def comp_drinks_for_group(opt_sim: OptionalTargetParam=None, _connection=None):
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Current venue is not restaurant', _connection)
return False
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist".format(opt_sim), _connection)
return False
groups = zone_director.get_dining_groups_by_sim(sim)
group = groups.pop()
group.order_course_for_group((ChefsChoice.DRINK_COURSE), complimentary=True)
return True
@sims4.commands.Command('restaurant.comp_desserts_for_group', command_type=(sims4.commands.CommandType.Live))
def comp_desserts_for_group(opt_sim: OptionalTargetParam=None, _connection=None):
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Current venue is not restaurant', _connection)
return False
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist".format(opt_sim), _connection)
return False
groups = zone_director.get_dining_groups_by_sim(sim)
group = groups.pop()
group.order_course_for_group((ChefsChoice.DESSERT_COURSE), complimentary=True)
return True
@sims4.commands.Command('restaurant.recommend_order_for_table', command_type=(sims4.commands.CommandType.Live))
def recommend_order_for_table(sim_orders: str, _connection=None):
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Current venue is not restaurant', _connection)
return False
proto = Restaurant_pb2.SimOrders()
text_format.Merge(sim_orders, proto)
orders = [(order.sim_id, order.recipe_id) for order in proto.sim_orders]
sims_in_order = set([services.object_manager().get(order_sim_id) for order_sim_id in [order[0] for order in orders]])
for sim in sims_in_order:
if sim is None:
sims4.commands.output("Trying to target order for a Sim that isn't on the lot", _connection)
return False
active_group_order = _get_active_group_order_for_dining_group(sim)
if active_group_order:
recipe_manager = services.get_instance_manager(sims4.resources.Types.RECIPE)
for order in orders:
recipe = recipe_manager.get(order[1])
recipes = GroupOrder.get_food_drink_recipe_id_tuple(recipe)
active_group_order.add_sim_order((order[0]), food_recipe_id=(recipes[0]), drink_recipe_id=(recipes[1]),
recommendation_state=(OrderRecommendationState.RECOMMENDATION_PROPOSAL),
order_status=(OrderStatus.ORDER_INIT))
else:
zone_director.order_for_table(orders, send_order=False,
recommendation_state=(OrderRecommendationState.RECOMMENDATION_PROPOSAL),
order_status=(OrderStatus.ORDER_INIT))
groups = zone_director.get_dining_groups_by_sim(sim)
group = groups.pop()
group.hold_ordered_cost(proto.meal_cost if has_field(proto, 'meal_cost') else 0)
for sim in sims_in_order:
zone_director.trigger_recommendation_interaction(services.get_active_sim(), sim)
return True
@sims4.commands.Command('restaurant.npc_accept_or_reject_recommendation', command_type=(sims4.commands.CommandType.Live))
def npc_accept_or_reject_recommendation(opt_sim: OptionalTargetParam=None, accept_recommendation: bool=True, _connection=None):
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist.".format(opt_sim), _connection)
return False
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Current venue is not restaurant', _connection)
return False
group_order = zone_director.get_active_group_order_for_sim(sim.sim_id)
if group_order is None:
sims4.commands.output('Sim {} was not offered a recommendation.'.format(opt_sim), _connection)
return False
if accept_recommendation:
sim_order = group_order.get_sim_order(sim.sim_id)
if sim_order is not None:
sim_order.recommendation_state = OrderRecommendationState.RECOMMENDATION_ACCEPTED
else:
group_order.remove_sim_order(sim.sim_id)
food_recipe, drink_recipe = ChefsChoice.get_order_for_npc_sim(sim)
group_order.add_sim_order((sim.sim_id), food_recipe_id=(food_recipe.guid64),
drink_recipe_id=(drink_recipe.guid64),
recommendation_state=(OrderRecommendationState.RECOMMENDATION_REJECTED),
order_status=(OrderStatus.ORDER_INIT))
return True
@sims4.commands.Command('restaurant.order_food_at_chef_station', command_type=(sims4.commands.CommandType.Live))
def order_food_at_chef_station(recipe_type: TunableInstanceParam(sims4.resources.Types.RECIPE), opt_sim: OptionalTargetParam=None, _connection=None):
if recipe_type is None:
sims4.commands.output('Recipe is None', _connection)
return False
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist.".format(opt_sim), _connection)
return False
chef_situation = restaurant_utils.get_chef_situation()
if chef_situation is None:
sims4.commands.output("Couldn't find a Chef Situation in this zone.")
return False
chef_situation.add_direct_order(recipe_type, sim)
services.get_event_manager().process_event((test_events.TestEvent.RestaurantFoodOrdered), sim_info=(sim.sim_info))
return True
@sims4.commands.Command('restaurant.npc_order_food_at_chef_station', command_type=(sims4.commands.CommandType.Live))
def npc_order_food_at_chef_station(opt_sim: OptionalTargetParam=None, chef_sim: OptionalTargetParam=None, _connection=None):
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist.".format(opt_sim), _connection)
return False
else:
chef_sim = get_optional_target(chef_sim, _connection)
if chef_sim is None:
sims4.commands.output("Chef {} doesn't exist.".format(chef_sim), _connection)
return False
chef_situation = restaurant_utils.get_chef_situation(chef_sim=chef_sim)
if chef_situation is None:
sims4.commands.output("Couldn't find a Chef Situation in this zone.")
return False
if chef_situation.menu_preset is not None:
food_order = ChefsChoice.get_order_for_npc_sim_with_menu(sim, chef_situation.menu_preset)
else:
food_order, _ = ChefsChoice.get_order_for_npc_sim(sim)
chef_situation.add_direct_order(food_order, sim)
services.get_event_manager().process_event((test_events.TestEvent.RestaurantFoodOrdered), sim_info=(sim.sim_info))
return True
@sims4.commands.Command('restaurant.give_chef_feedback', command_type=(sims4.commands.CommandType.Live))
def give_chef_feedback(to_chef_sim_id: OptionalTargetParam=None, from_sim_id: OptionalTargetParam=None, is_compliment: bool=True, waitstaff_sim_id: OptionalTargetParam=None, _connection=None):
from_sim = get_optional_target(from_sim_id, _connection)
if from_sim is None:
sims4.commands.output("From Sim {} doesn't exist.".format(from_sim_id), _connection)
return False
to_chef_sim = get_optional_target(to_chef_sim_id, _connection)
if to_chef_sim is None:
sims4.commands.output("To Chef Sim {} doesn't exist.".format(to_chef_sim_id), _connection)
return False
waitstaff_sim = get_optional_target(waitstaff_sim_id, _connection)
if waitstaff_sim is None:
sims4.commands.output("Waitstaff Sim {} doesn't exist.".format(waitstaff_sim_id), _connection)
return False
waitstaff_situation = restaurant_utils.get_waitstaff_situation(waitstaff_sim)
waitstaff_situation.give_chef_feedback(to_chef_sim, from_sim, is_compliment)
@sims4.commands.Command('restaurant.npc_order_food_from_waitstaff', command_type=(sims4.commands.CommandType.Live))
def npc_order_food_from_waitstaff(opt_sim: OptionalTargetParam=None, _connection=None):
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist.".format(opt_sim), _connection)
return False
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Not currently on a restaurant lot so cannot place orders with the waitstaff for NPC groups.', _connection)
return False
active_group_order = _get_active_group_order_for_dining_group(sim)
dining_groups = zone_director.get_dining_groups_by_sim(sim)
for dining_group in dining_groups:
if not dining_group.order_for_table(active_group_order=active_group_order):
sims4.commands.output('Failed to place order for dining group.', _connection)
return False
return True
@sims4.commands.Command('restaurant.comp_order_for_sim', command_type=(sims4.commands.CommandType.Live))
def comp_order_for_sim(opt_sim: OptionalTargetParam=None, _connection=None):
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.Command("Sim {} doesn't exist.".format(opt_sim), _connection)
return False
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.Command('Not currently on a restaurant lot.', _connection)
return False
business_manager = zone_director.business_manager
if business_manager is None:
sims4.commands.Command("The current zone doesn't have a business manager.", _connection)
return False
for group_order in zone_director.get_delivered_orders_for_sim(sim.id):
business_manager.comp_order_for_sim(group_order.get_sim_order(sim.id))
@sims4.commands.Command('restaurant.create_food_for_group_order_sim', command_type=(sims4.commands.CommandType.Live))
def create_food_for_group_order_sim(opt_sim: OptionalTargetParam=None, _connection=None):
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist.".format(opt_sim), _connection)
return False
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Not currently on a restaurant lot so can not create an order for a table.', _connection)
return False
group_order = zone_director.get_active_group_order_for_sim(sim.id)
if group_order is None:
sims4.commands.output('There is no group order in for the passed in sim {}.'.format(sim), _connection)
return False
zone_director.create_food_for_group_order(group_order)
return True
@sims4.commands.Command('restaurant.create_food_for_group_order_table', command_type=(sims4.commands.CommandType.Live))
def create_food_for_group_order_table(table_id: OptionalTargetParam=None, _connection=None):
table = get_optional_target(table_id, _connection)
if table is None:
sims4.commands.output("Table {} doesn't exist.".format(table_id), _connection)
return False
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Not currently on a restaurant lot so can not create an order for a table.', _connection)
return False
group_order = zone_director.get_active_group_order_for_table(table.id)
if group_order is None:
sims4.commands.output('There is no group order in for the passed in sim {}.'.format(sim), _connection)
return False
zone_director.create_food_for_group_order(group_order)
return True
@sims4.commands.Command('restaurant.set_ingredient_quality', command_type=(sims4.commands.CommandType.Live))
def set_ingredient_quality(ingredient_quality: RestaurantIngredientQualityType, _connection=None):
business_manager = services.business_service().get_business_manager_for_zone()
if business_manager is None:
sims4.commands.output('Trying to set the ingredient quality for a restaurant but there was no valid business manager found for the current zone.')
return False
business_manager.set_ingredient_quality(ingredient_quality)
@sims4.commands.Command('restaurant.expedite_sims_order', command_type=(sims4.commands.CommandType.Live))
def expedite_sim_order(opt_sim: OptionalTargetParam=None, _connection=None):
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist.".format(opt_sim), _connection)
return False
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Not on a restaurant lot.', _connection)
return
if not zone_director.has_group_order(sim.id):
sims4.commands.output('Sim {} does not have an order.'.format(sim), _connection)
return
group_order = zone_director.get_group_order(sim.id)
if group_order is not None:
group_order.expedited = True
@sims4.commands.Command('restaurant.refresh_configuration', command_type=(sims4.commands.CommandType.Live))
def refresh_configuration(_connection=None):
zone_director = get_restaurant_zone_director()
if zone_director is not None:
zone_director.refresh_configuration()
def _get_active_group_order_for_dining_group(sim):
zone_director = get_restaurant_zone_director()
if zone_director is None:
return
dining_groups = zone_director.get_dining_groups_by_sim(sim)
for dining_group in dining_groups:
for group_sim in dining_group.all_sims_in_situation_gen():
active_group_order = zone_director.get_active_group_order_for_sim(group_sim.sim_id)
if active_group_order:
return active_group_order
@sims4.commands.Command('restaurant.sim_is_employee', command_type=(sims4.commands.CommandType.Automation))
def sim_is_employee(opt_sim: OptionalTargetParam=None, _connection=None):
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("False, Sim {} doesn't exist.".format(opt_sim), _connection)
sims4.commands.automation_output('RestaurantIsEmployee; Status:InvalidSim', _connection)
return False
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('False, Not on a restaurant lot.', _connection)
sims4.commands.automation_output('RestaurantIsEmployee; Status:NotOnLot', _connection)
return False
situation_manager = services.get_zone_situation_manager()
if situation_manager is None:
sims4.commands.output('False, There is no situation manager on this lot.', _connection)
sims4.commands.automation_output('RestaurantIsEmployee; Status:NoSituationMgr', _connection)
return False
business_manager = zone_director.business_manager
if business_manager is None:
sim_situations = situation_manager.get_situations_sim_is_in(sim)
for situation in sim_situations:
if type(situation) in (RestaurantTuning.CHEF_SITUATION,
RestaurantTuning.HOST_SITUATION,
RestaurantTuning.WAITSTAFF_SITUATION):
sims4.commands.output('True, Sim is an employee of the current restaurant.', _connection)
sims4.commands.automation_output('RestaurantIsEmployee; Status:Success', _connection)
return True
elif business_manager.is_employee(sim.sim_info):
sims4.commands.output('True, Sim is currently an employee', _connection)
sims4.commands.automation_output('RestaurantIsEmployee; Status:Success', _connection)
return True
sims4.commands.output('False, Sim is not an employee of the current restaurant.', _connection)
sims4.commands.automation_output('RestaurantIsEmployee; Status:Failed', _connection)
return False
@sims4.commands.Command('restaurant.is_open', command_type=(sims4.commands.CommandType.Automation))
def is_open(_connection=None):
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('False, Not on a restaurant lot.', _connection)
sims4.commands.automation_output('RestaurantIsOpen; Status:NotOnLot', _connection)
return False
if zone_director.business_manager is None:
sims4.commands.output('True, unowned restaurants are always open.', _connection)
sims4.commands.automation_output('RestaurantIsOpen; Status:Success', _connection)
return True
if zone_director.business_manager.is_open:
sims4.commands.output('True, this owned restaurant is currently open', _connection)
sims4.commands.automation_output('RestaurantIsOpen; Status:Success', _connection)
return True
sims4.commands.output('False, this owned restaurant is currently closed', _connection)
sims4.commands.automation_output('RestaurantIsOpen; Status:Failed', _connection)
return False
@sims4.commands.Command('restaurant.get_sim_diner_state', command_type=(sims4.commands.CommandType.Automation))
def get_sim_dining_state(opt_sim: OptionalTargetParam=None, _connection=None):
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist".format(opt_sim), _connection)
return False
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Not on a restaurant lot.', _connection)
return False
groups = zone_director.get_dining_groups_by_sim(sim)
if not groups:
sims4.commands.output('Sim {} is not in dining group'.format(sim), _connection)
sims4.commands.automation_output('RestaurantDinerState; Status:NotReady', _connection)
return True
dining_group = groups.pop()
for sub_situation in dining_group.sub_situations:
state = sub_situation.current_state_index().name
sims4.commands.automation_output('RestaurantDinerState; Status:{}'.format(state), _connection)
return True | 51.791232 | 192 | 0.747743 |
from protocolbuffers import Restaurant_pb2
from event_testing import test_events
from google.protobuf import text_format
from restaurants import restaurant_utils
from restaurants.chefs_choice import ChefsChoice
from restaurants.restaurant_diner_situation import DinerSubSituationState, RestaurantDinerSubSituation, RestaurantDinerBackGroundSituation
from restaurants.restaurant_order import OrderStatus, OrderRecommendationState, GroupOrder
from restaurants.restaurant_tuning import RestaurantTuning, RestaurantIngredientQualityType, get_restaurant_zone_director
from server_commands.argument_helpers import TunableInstanceParam, OptionalTargetParam, get_optional_target
from sims import sim
from sims4.protocol_buffer_utils import has_field
import services, sims4.commands
@sims4.commands.Command('restaurant.order_food', command_type=(sims4.commands.CommandType.Live))
def order_food(recipe_type: TunableInstanceParam(sims4.resources.Types.RECIPE), opt_sim: OptionalTargetParam=None, _connection=None):
if recipe_type is None:
sims4.commands.output('Recipe is None', _connection)
sims4.commands.automation_output('RestaurantOrderFood; Status:Failed', _connection)
return False
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist".format(opt_sim), _connection)
sims4.commands.automation_output('RestaurantOrderFood; Status:Failed', _connection)
return False
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Current venue is not restaurant', _connection)
sims4.commands.automation_output('RestaurantOrderFood; Status:Failed', _connection)
return False
zone_director.make_one_order(sim, recipe_type)
groups = zone_director.get_dining_groups_by_sim(sim)
if groups is None:
sims4.commands.output('Sim {} is not in dining group'.format(opt_sim), _connection)
sims4.commands.automation_output('RestaurantOrderFood; Status:Failed', _connection)
group = groups.pop()
group.hold_ordered_cost(recipe_type.restaurant_base_price)
sims4.commands.automation_output('RestaurantOrderFood; Status:Success', _connection)
return True
@sims4.commands.Command('restaurant.show_menu', command_type=(sims4.commands.CommandType.Live))
def show_menu(opt_sim: OptionalTargetParam=None, _connection=None):
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist".format(opt_sim), _connection)
return False
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Current venue is not restaurant', _connection)
return False
zone_director.show_menu(sim)
@sims4.commands.Command('restaurant.show_menu_for_chef', command_type=(sims4.commands.CommandType.Live))
def show_menu_for_chef(opt_sim: OptionalTargetParam=None, chef_sim: OptionalTargetParam=None, _connection=None):
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist".format(opt_sim), _connection)
return False
chef_sim = get_optional_target(chef_sim, _connection)
if chef_sim is None:
sims4.commands.output("Chef {} doesn't exist.".format(chef_sim), _connection)
return False
chef_situation = restaurant_utils.get_chef_situation(chef_sim=chef_sim)
if chef_situation is None:
sims4.commands.output("Couldn't find a Chef Situation in this zone.")
return False
chef_situation.show_menu(sim)
@sims4.commands.Command('restaurant.show_recommendation_menu_for_sim', command_type=(sims4.commands.CommandType.Live))
def show_recommendation_menu_for_sim(opt_sim: OptionalTargetParam=None, owner_sim: OptionalTargetParam=None, _connection=None):
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist".format(opt_sim), _connection)
return False
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Current venue is not restaurant', _connection)
return False
zone_director.show_menu(sim, is_recommendation=True)
@sims4.commands.Command('restaurant.claim_table', command_type=(sims4.commands.CommandType.Live))
def claim_table(opt_sim: OptionalTargetParam=None, opt_table: OptionalTargetParam=None, _connection=None):
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist".format(opt_sim), _connection)
return False
table_to_claim = get_optional_target(opt_table, _connection)
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Current venue is not restaurant', _connection)
return False
zone_director.claim_table(sim, table_to_claim)
@sims4.commands.Command('restaurant.order_for_table', command_type=(sims4.commands.CommandType.Live))
def order_for_table(sim_orders: str, _connection=None):
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Current venue is not restaurant', _connection)
return False
proto = Restaurant_pb2.SimOrders()
text_format.Merge(sim_orders, proto)
orders = [(order.sim_id, order.recipe_id) for order in proto.sim_orders]
sim = services.object_manager().get(orders[0][0])
if sim is None:
sims4.commands.output("Trying to order for a Sim that isn't on the lot", _connection)
return False
zone_director.order_for_table(orders)
groups = zone_director.get_dining_groups_by_sim(sim)
group = groups.pop()
group.hold_ordered_cost(proto.meal_cost if has_field(proto, 'meal_cost') else 0)
return True
@sims4.commands.Command('restaurant.comp_drinks_for_group', command_type=(sims4.commands.CommandType.Live))
def comp_drinks_for_group(opt_sim: OptionalTargetParam=None, _connection=None):
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Current venue is not restaurant', _connection)
return False
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist".format(opt_sim), _connection)
return False
groups = zone_director.get_dining_groups_by_sim(sim)
group = groups.pop()
group.order_course_for_group((ChefsChoice.DRINK_COURSE), complimentary=True)
return True
@sims4.commands.Command('restaurant.comp_desserts_for_group', command_type=(sims4.commands.CommandType.Live))
def comp_desserts_for_group(opt_sim: OptionalTargetParam=None, _connection=None):
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Current venue is not restaurant', _connection)
return False
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist".format(opt_sim), _connection)
return False
groups = zone_director.get_dining_groups_by_sim(sim)
group = groups.pop()
group.order_course_for_group((ChefsChoice.DESSERT_COURSE), complimentary=True)
return True
@sims4.commands.Command('restaurant.recommend_order_for_table', command_type=(sims4.commands.CommandType.Live))
def recommend_order_for_table(sim_orders: str, _connection=None):
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Current venue is not restaurant', _connection)
return False
proto = Restaurant_pb2.SimOrders()
text_format.Merge(sim_orders, proto)
orders = [(order.sim_id, order.recipe_id) for order in proto.sim_orders]
sims_in_order = set([services.object_manager().get(order_sim_id) for order_sim_id in [order[0] for order in orders]])
for sim in sims_in_order:
if sim is None:
sims4.commands.output("Trying to target order for a Sim that isn't on the lot", _connection)
return False
active_group_order = _get_active_group_order_for_dining_group(sim)
if active_group_order:
recipe_manager = services.get_instance_manager(sims4.resources.Types.RECIPE)
for order in orders:
recipe = recipe_manager.get(order[1])
recipes = GroupOrder.get_food_drink_recipe_id_tuple(recipe)
active_group_order.add_sim_order((order[0]), food_recipe_id=(recipes[0]), drink_recipe_id=(recipes[1]),
recommendation_state=(OrderRecommendationState.RECOMMENDATION_PROPOSAL),
order_status=(OrderStatus.ORDER_INIT))
else:
zone_director.order_for_table(orders, send_order=False,
recommendation_state=(OrderRecommendationState.RECOMMENDATION_PROPOSAL),
order_status=(OrderStatus.ORDER_INIT))
groups = zone_director.get_dining_groups_by_sim(sim)
group = groups.pop()
group.hold_ordered_cost(proto.meal_cost if has_field(proto, 'meal_cost') else 0)
for sim in sims_in_order:
zone_director.trigger_recommendation_interaction(services.get_active_sim(), sim)
return True
@sims4.commands.Command('restaurant.npc_accept_or_reject_recommendation', command_type=(sims4.commands.CommandType.Live))
def npc_accept_or_reject_recommendation(opt_sim: OptionalTargetParam=None, accept_recommendation: bool=True, _connection=None):
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist.".format(opt_sim), _connection)
return False
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Current venue is not restaurant', _connection)
return False
group_order = zone_director.get_active_group_order_for_sim(sim.sim_id)
if group_order is None:
sims4.commands.output('Sim {} was not offered a recommendation.'.format(opt_sim), _connection)
return False
if accept_recommendation:
sim_order = group_order.get_sim_order(sim.sim_id)
if sim_order is not None:
sim_order.recommendation_state = OrderRecommendationState.RECOMMENDATION_ACCEPTED
else:
group_order.remove_sim_order(sim.sim_id)
food_recipe, drink_recipe = ChefsChoice.get_order_for_npc_sim(sim)
group_order.add_sim_order((sim.sim_id), food_recipe_id=(food_recipe.guid64),
drink_recipe_id=(drink_recipe.guid64),
recommendation_state=(OrderRecommendationState.RECOMMENDATION_REJECTED),
order_status=(OrderStatus.ORDER_INIT))
return True
@sims4.commands.Command('restaurant.order_food_at_chef_station', command_type=(sims4.commands.CommandType.Live))
def order_food_at_chef_station(recipe_type: TunableInstanceParam(sims4.resources.Types.RECIPE), opt_sim: OptionalTargetParam=None, _connection=None):
if recipe_type is None:
sims4.commands.output('Recipe is None', _connection)
return False
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist.".format(opt_sim), _connection)
return False
chef_situation = restaurant_utils.get_chef_situation()
if chef_situation is None:
sims4.commands.output("Couldn't find a Chef Situation in this zone.")
return False
chef_situation.add_direct_order(recipe_type, sim)
services.get_event_manager().process_event((test_events.TestEvent.RestaurantFoodOrdered), sim_info=(sim.sim_info))
return True
@sims4.commands.Command('restaurant.npc_order_food_at_chef_station', command_type=(sims4.commands.CommandType.Live))
def npc_order_food_at_chef_station(opt_sim: OptionalTargetParam=None, chef_sim: OptionalTargetParam=None, _connection=None):
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist.".format(opt_sim), _connection)
return False
else:
chef_sim = get_optional_target(chef_sim, _connection)
if chef_sim is None:
sims4.commands.output("Chef {} doesn't exist.".format(chef_sim), _connection)
return False
chef_situation = restaurant_utils.get_chef_situation(chef_sim=chef_sim)
if chef_situation is None:
sims4.commands.output("Couldn't find a Chef Situation in this zone.")
return False
if chef_situation.menu_preset is not None:
food_order = ChefsChoice.get_order_for_npc_sim_with_menu(sim, chef_situation.menu_preset)
else:
food_order, _ = ChefsChoice.get_order_for_npc_sim(sim)
chef_situation.add_direct_order(food_order, sim)
services.get_event_manager().process_event((test_events.TestEvent.RestaurantFoodOrdered), sim_info=(sim.sim_info))
return True
@sims4.commands.Command('restaurant.give_chef_feedback', command_type=(sims4.commands.CommandType.Live))
def give_chef_feedback(to_chef_sim_id: OptionalTargetParam=None, from_sim_id: OptionalTargetParam=None, is_compliment: bool=True, waitstaff_sim_id: OptionalTargetParam=None, _connection=None):
from_sim = get_optional_target(from_sim_id, _connection)
if from_sim is None:
sims4.commands.output("From Sim {} doesn't exist.".format(from_sim_id), _connection)
return False
to_chef_sim = get_optional_target(to_chef_sim_id, _connection)
if to_chef_sim is None:
sims4.commands.output("To Chef Sim {} doesn't exist.".format(to_chef_sim_id), _connection)
return False
waitstaff_sim = get_optional_target(waitstaff_sim_id, _connection)
if waitstaff_sim is None:
sims4.commands.output("Waitstaff Sim {} doesn't exist.".format(waitstaff_sim_id), _connection)
return False
waitstaff_situation = restaurant_utils.get_waitstaff_situation(waitstaff_sim)
waitstaff_situation.give_chef_feedback(to_chef_sim, from_sim, is_compliment)
@sims4.commands.Command('restaurant.npc_order_food_from_waitstaff', command_type=(sims4.commands.CommandType.Live))
def npc_order_food_from_waitstaff(opt_sim: OptionalTargetParam=None, _connection=None):
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist.".format(opt_sim), _connection)
return False
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Not currently on a restaurant lot so cannot place orders with the waitstaff for NPC groups.', _connection)
return False
active_group_order = _get_active_group_order_for_dining_group(sim)
dining_groups = zone_director.get_dining_groups_by_sim(sim)
for dining_group in dining_groups:
if not dining_group.order_for_table(active_group_order=active_group_order):
sims4.commands.output('Failed to place order for dining group.', _connection)
return False
return True
@sims4.commands.Command('restaurant.comp_order_for_sim', command_type=(sims4.commands.CommandType.Live))
def comp_order_for_sim(opt_sim: OptionalTargetParam=None, _connection=None):
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.Command("Sim {} doesn't exist.".format(opt_sim), _connection)
return False
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.Command('Not currently on a restaurant lot.', _connection)
return False
business_manager = zone_director.business_manager
if business_manager is None:
sims4.commands.Command("The current zone doesn't have a business manager.", _connection)
return False
for group_order in zone_director.get_delivered_orders_for_sim(sim.id):
business_manager.comp_order_for_sim(group_order.get_sim_order(sim.id))
@sims4.commands.Command('restaurant.create_food_for_group_order_sim', command_type=(sims4.commands.CommandType.Live))
def create_food_for_group_order_sim(opt_sim: OptionalTargetParam=None, _connection=None):
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist.".format(opt_sim), _connection)
return False
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Not currently on a restaurant lot so can not create an order for a table.', _connection)
return False
group_order = zone_director.get_active_group_order_for_sim(sim.id)
if group_order is None:
sims4.commands.output('There is no group order in for the passed in sim {}.'.format(sim), _connection)
return False
zone_director.create_food_for_group_order(group_order)
return True
@sims4.commands.Command('restaurant.create_food_for_group_order_table', command_type=(sims4.commands.CommandType.Live))
def create_food_for_group_order_table(table_id: OptionalTargetParam=None, _connection=None):
table = get_optional_target(table_id, _connection)
if table is None:
sims4.commands.output("Table {} doesn't exist.".format(table_id), _connection)
return False
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Not currently on a restaurant lot so can not create an order for a table.', _connection)
return False
group_order = zone_director.get_active_group_order_for_table(table.id)
if group_order is None:
sims4.commands.output('There is no group order in for the passed in sim {}.'.format(sim), _connection)
return False
zone_director.create_food_for_group_order(group_order)
return True
@sims4.commands.Command('restaurant.set_ingredient_quality', command_type=(sims4.commands.CommandType.Live))
def set_ingredient_quality(ingredient_quality: RestaurantIngredientQualityType, _connection=None):
business_manager = services.business_service().get_business_manager_for_zone()
if business_manager is None:
sims4.commands.output('Trying to set the ingredient quality for a restaurant but there was no valid business manager found for the current zone.')
return False
business_manager.set_ingredient_quality(ingredient_quality)
@sims4.commands.Command('restaurant.expedite_sims_order', command_type=(sims4.commands.CommandType.Live))
def expedite_sim_order(opt_sim: OptionalTargetParam=None, _connection=None):
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist.".format(opt_sim), _connection)
return False
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Not on a restaurant lot.', _connection)
return
if not zone_director.has_group_order(sim.id):
sims4.commands.output('Sim {} does not have an order.'.format(sim), _connection)
return
group_order = zone_director.get_group_order(sim.id)
if group_order is not None:
group_order.expedited = True
@sims4.commands.Command('restaurant.refresh_configuration', command_type=(sims4.commands.CommandType.Live))
def refresh_configuration(_connection=None):
zone_director = get_restaurant_zone_director()
if zone_director is not None:
zone_director.refresh_configuration()
def _get_active_group_order_for_dining_group(sim):
zone_director = get_restaurant_zone_director()
if zone_director is None:
return
dining_groups = zone_director.get_dining_groups_by_sim(sim)
for dining_group in dining_groups:
for group_sim in dining_group.all_sims_in_situation_gen():
active_group_order = zone_director.get_active_group_order_for_sim(group_sim.sim_id)
if active_group_order:
return active_group_order
@sims4.commands.Command('restaurant.sim_is_employee', command_type=(sims4.commands.CommandType.Automation))
def sim_is_employee(opt_sim: OptionalTargetParam=None, _connection=None):
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("False, Sim {} doesn't exist.".format(opt_sim), _connection)
sims4.commands.automation_output('RestaurantIsEmployee; Status:InvalidSim', _connection)
return False
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('False, Not on a restaurant lot.', _connection)
sims4.commands.automation_output('RestaurantIsEmployee; Status:NotOnLot', _connection)
return False
situation_manager = services.get_zone_situation_manager()
if situation_manager is None:
sims4.commands.output('False, There is no situation manager on this lot.', _connection)
sims4.commands.automation_output('RestaurantIsEmployee; Status:NoSituationMgr', _connection)
return False
business_manager = zone_director.business_manager
if business_manager is None:
sim_situations = situation_manager.get_situations_sim_is_in(sim)
for situation in sim_situations:
if type(situation) in (RestaurantTuning.CHEF_SITUATION,
RestaurantTuning.HOST_SITUATION,
RestaurantTuning.WAITSTAFF_SITUATION):
sims4.commands.output('True, Sim is an employee of the current restaurant.', _connection)
sims4.commands.automation_output('RestaurantIsEmployee; Status:Success', _connection)
return True
elif business_manager.is_employee(sim.sim_info):
sims4.commands.output('True, Sim is currently an employee', _connection)
sims4.commands.automation_output('RestaurantIsEmployee; Status:Success', _connection)
return True
sims4.commands.output('False, Sim is not an employee of the current restaurant.', _connection)
sims4.commands.automation_output('RestaurantIsEmployee; Status:Failed', _connection)
return False
@sims4.commands.Command('restaurant.is_open', command_type=(sims4.commands.CommandType.Automation))
def is_open(_connection=None):
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('False, Not on a restaurant lot.', _connection)
sims4.commands.automation_output('RestaurantIsOpen; Status:NotOnLot', _connection)
return False
if zone_director.business_manager is None:
sims4.commands.output('True, unowned restaurants are always open.', _connection)
sims4.commands.automation_output('RestaurantIsOpen; Status:Success', _connection)
return True
if zone_director.business_manager.is_open:
sims4.commands.output('True, this owned restaurant is currently open', _connection)
sims4.commands.automation_output('RestaurantIsOpen; Status:Success', _connection)
return True
sims4.commands.output('False, this owned restaurant is currently closed', _connection)
sims4.commands.automation_output('RestaurantIsOpen; Status:Failed', _connection)
return False
@sims4.commands.Command('restaurant.get_sim_diner_state', command_type=(sims4.commands.CommandType.Automation))
def get_sim_dining_state(opt_sim: OptionalTargetParam=None, _connection=None):
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist".format(opt_sim), _connection)
return False
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Not on a restaurant lot.', _connection)
return False
groups = zone_director.get_dining_groups_by_sim(sim)
if not groups:
sims4.commands.output('Sim {} is not in dining group'.format(sim), _connection)
sims4.commands.automation_output('RestaurantDinerState; Status:NotReady', _connection)
return True
dining_group = groups.pop()
for sub_situation in dining_group.sub_situations:
state = sub_situation.current_state_index().name
sims4.commands.automation_output('RestaurantDinerState; Status:{}'.format(state), _connection)
return True | true | true |
f72fd45c61980e5c188d7f2e1db08ef2a024468b | 685 | py | Python | examples/custom_plugin/plugins/MyFirstPlugin/pwba_plugin.py | pxlc/PyWebBrowserApp | 0165b29cbe5f88068f62d8298b1f5e3ee611a985 | [
"MIT"
] | 1 | 2021-11-09T07:53:25.000Z | 2021-11-09T07:53:25.000Z | examples/custom_plugin/plugins/MyFirstPlugin/pwba_plugin.py | pxlc/PyWebBrowserApp | 0165b29cbe5f88068f62d8298b1f5e3ee611a985 | [
"MIT"
] | null | null | null | examples/custom_plugin/plugins/MyFirstPlugin/pwba_plugin.py | pxlc/PyWebBrowserApp | 0165b29cbe5f88068f62d8298b1f5e3ee611a985 | [
"MIT"
] | null | null | null |
from PyWebBrowserApp import PluginBase
from PyWebBrowserApp import register_plugin_op
class Plugin(PluginBase):
def __init__(self):
super(Plugin, self).__init__()
self.name = '${P}'
@register_plugin_op
def test_plugin_callback(self, op_data):
# self.info(op_data.get('message', ''))
print('Hello from ${P} callback')
@register_plugin_op
def roundtrip_from_js(self, op_data):
alert_msg = op_data.get('alert_msg', '???')
self.info('[Plugin "%s"] in roundtrip_from_js() method, got alert_msg "%s"' % (self.name, alert_msg))
self.plugin_to_webbrowser('roundtrip_from_python', {'alert_msg': alert_msg})
| 25.37037 | 109 | 0.665693 |
from PyWebBrowserApp import PluginBase
from PyWebBrowserApp import register_plugin_op
class Plugin(PluginBase):
def __init__(self):
super(Plugin, self).__init__()
self.name = '${P}'
@register_plugin_op
def test_plugin_callback(self, op_data):
print('Hello from ${P} callback')
@register_plugin_op
def roundtrip_from_js(self, op_data):
alert_msg = op_data.get('alert_msg', '???')
self.info('[Plugin "%s"] in roundtrip_from_js() method, got alert_msg "%s"' % (self.name, alert_msg))
self.plugin_to_webbrowser('roundtrip_from_python', {'alert_msg': alert_msg})
| true | true |
f72fd4a32451e1afa48eb80e2147811dcd4f5f9f | 54,405 | py | Python | .kodi/addons/script.ftvguide/gui.py | C6SUMMER/allinclusive-kodi-pi | 8baf247c79526849c640c6e56ca57a708a65bd11 | [
"Apache-2.0"
] | null | null | null | .kodi/addons/script.ftvguide/gui.py | C6SUMMER/allinclusive-kodi-pi | 8baf247c79526849c640c6e56ca57a708a65bd11 | [
"Apache-2.0"
] | null | null | null | .kodi/addons/script.ftvguide/gui.py | C6SUMMER/allinclusive-kodi-pi | 8baf247c79526849c640c6e56ca57a708a65bd11 | [
"Apache-2.0"
] | 2 | 2018-04-17T17:34:39.000Z | 2020-07-26T03:43:33.000Z | #
# Copyright (C) 2014 Tommy Winther
# http://tommy.winther.nu
#
# Modified for FTV Guide (09/2014 onwards)
# by Thomas Geppert [bluezed] - bluezed.apps@gmail.com
#
# This Program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This Program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this Program; see the file LICENSE.txt. If not, write to
# the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA.
# http://www.gnu.org/copyleft/gpl.html
#
import datetime
import threading
import time
import xbmc
import xbmcgui
import source as src
from notification import Notification
from strings import *
import streaming
DEBUG = False
MODE_EPG = 'EPG'
MODE_TV = 'TV'
MODE_OSD = 'OSD'
ACTION_LEFT = 1
ACTION_RIGHT = 2
ACTION_UP = 3
ACTION_DOWN = 4
ACTION_PAGE_UP = 5
ACTION_PAGE_DOWN = 6
ACTION_SELECT_ITEM = 7
ACTION_PARENT_DIR = 9
ACTION_PREVIOUS_MENU = 10
ACTION_SHOW_INFO = 11
ACTION_NEXT_ITEM = 14
ACTION_PREV_ITEM = 15
ACTION_MOUSE_WHEEL_UP = 104
ACTION_MOUSE_WHEEL_DOWN = 105
ACTION_MOUSE_MOVE = 107
KEY_NAV_BACK = 92
KEY_CONTEXT_MENU = 117
KEY_HOME = 159
KEY_ESC = 61467
CHANNELS_PER_PAGE = 8
HALF_HOUR = datetime.timedelta(minutes=30)
SKIN = ADDON.getSetting('skin')
def debug(s):
if DEBUG: xbmc.log(str(s), xbmc.LOGDEBUG)
class Point(object):
def __init__(self):
self.x = self.y = 0
def __repr__(self):
return 'Point(x=%d, y=%d)' % (self.x, self.y)
class EPGView(object):
def __init__(self):
self.top = self.left = self.right = self.bottom = self.width = self.cellHeight = 0
class ControlAndProgram(object):
def __init__(self, control, program):
self.control = control
self.program = program
class TVGuide(xbmcgui.WindowXML):
C_MAIN_DATE_LONG = 3999
C_MAIN_DATE = 4000
C_MAIN_TITLE = 4020
C_MAIN_TIME = 4021
C_MAIN_DESCRIPTION = 4022
C_MAIN_IMAGE = 4023
C_MAIN_LOGO = 4024
C_MAIN_TIMEBAR = 4100
C_MAIN_LOADING = 4200
C_MAIN_LOADING_PROGRESS = 4201
C_MAIN_LOADING_TIME_LEFT = 4202
C_MAIN_LOADING_CANCEL = 4203
C_MAIN_MOUSE_CONTROLS = 4300
C_MAIN_MOUSE_HOME = 4301
C_MAIN_MOUSE_LEFT = 4302
C_MAIN_MOUSE_UP = 4303
C_MAIN_MOUSE_DOWN = 4304
C_MAIN_MOUSE_RIGHT = 4305
C_MAIN_MOUSE_EXIT = 4306
C_MAIN_BACKGROUND = 4600
C_MAIN_EPG = 5000
C_MAIN_EPG_VIEW_MARKER = 5001
C_MAIN_OSD = 6000
C_MAIN_OSD_TITLE = 6001
C_MAIN_OSD_TIME = 6002
C_MAIN_OSD_DESCRIPTION = 6003
C_MAIN_OSD_CHANNEL_LOGO = 6004
C_MAIN_OSD_CHANNEL_TITLE = 6005
def __new__(cls):
return super(TVGuide, cls).__new__(cls, 'script-tvguide-main.xml', ADDON.getAddonInfo('path'), SKIN)
def __init__(self):
super(TVGuide, self).__init__()
self.notification = None
self.redrawingEPG = False
self.isClosing = False
self.controlAndProgramList = list()
self.ignoreMissingControlIds = list()
self.channelIdx = 0
self.focusPoint = Point()
self.epgView = EPGView()
self.streamingService = streaming.StreamsService(ADDON)
self.player = xbmc.Player()
self.database = None
self.mode = MODE_EPG
self.currentChannel = None
self.osdEnabled = ADDON.getSetting('enable.osd') == 'true' and ADDON.getSetting(
'alternative.playback') != 'true'
self.alternativePlayback = ADDON.getSetting('alternative.playback') == 'true'
self.osdChannel = None
self.osdProgram = None
# find nearest half hour
self.viewStartDate = datetime.datetime.today()
self.viewStartDate -= datetime.timedelta(minutes=self.viewStartDate.minute % 30,
seconds=self.viewStartDate.second)
def getControl(self, controlId):
try:
return super(TVGuide, self).getControl(controlId)
except:
if controlId in self.ignoreMissingControlIds:
return None
if not self.isClosing:
self.close()
return None
def close(self):
if not self.isClosing:
self.isClosing = True
if self.player.isPlaying():
self.player.stop()
if self.database:
self.database.close(super(TVGuide, self).close)
else:
super(TVGuide, self).close()
def onInit(self):
self._hideControl(self.C_MAIN_MOUSE_CONTROLS, self.C_MAIN_OSD)
self._showControl(self.C_MAIN_EPG, self.C_MAIN_LOADING)
self.setControlLabel(self.C_MAIN_LOADING_TIME_LEFT, strings(BACKGROUND_UPDATE_IN_PROGRESS))
self.setFocusId(self.C_MAIN_LOADING_CANCEL)
control = self.getControl(self.C_MAIN_EPG_VIEW_MARKER)
if control:
left, top = control.getPosition()
self.focusPoint.x = left
self.focusPoint.y = top
self.epgView.left = left
self.epgView.top = top
self.epgView.right = left + control.getWidth()
self.epgView.bottom = top + control.getHeight()
self.epgView.width = control.getWidth()
self.epgView.cellHeight = control.getHeight() / CHANNELS_PER_PAGE
if self.database:
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
else:
try:
self.database = src.Database()
except src.SourceNotConfiguredException:
self.onSourceNotConfigured()
self.close()
return
self.database.initialize(self.onSourceInitialized, self.isSourceInitializationCancelled)
self.updateTimebar()
def onAction(self, action):
debug('Mode is: %s' % self.mode)
if self.mode == MODE_TV:
self.onActionTVMode(action)
elif self.mode == MODE_OSD:
self.onActionOSDMode(action)
elif self.mode == MODE_EPG:
self.onActionEPGMode(action)
def onActionTVMode(self, action):
if action.getId() == ACTION_PAGE_UP:
self._channelUp()
elif action.getId() == ACTION_PAGE_DOWN:
self._channelDown()
elif not self.osdEnabled:
pass # skip the rest of the actions
elif action.getId() in [ACTION_PARENT_DIR, KEY_NAV_BACK, KEY_CONTEXT_MENU, ACTION_PREVIOUS_MENU]:
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
elif action.getId() == ACTION_SHOW_INFO:
self._showOsd()
def onActionOSDMode(self, action):
if action.getId() == ACTION_SHOW_INFO:
self._hideOsd()
elif action.getId() in [ACTION_PARENT_DIR, KEY_NAV_BACK, KEY_CONTEXT_MENU, ACTION_PREVIOUS_MENU]:
self._hideOsd()
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
elif action.getId() == ACTION_SELECT_ITEM:
if self.playChannel(self.osdChannel):
self._hideOsd()
elif action.getId() == ACTION_PAGE_UP:
self._channelUp()
self._showOsd()
elif action.getId() == ACTION_PAGE_DOWN:
self._channelDown()
self._showOsd()
elif action.getId() == ACTION_UP:
self.osdChannel = self.database.getPreviousChannel(self.osdChannel)
self.osdProgram = self.database.getCurrentProgram(self.osdChannel)
self._showOsd()
elif action.getId() == ACTION_DOWN:
self.osdChannel = self.database.getNextChannel(self.osdChannel)
self.osdProgram = self.database.getCurrentProgram(self.osdChannel)
self._showOsd()
elif action.getId() == ACTION_LEFT:
previousProgram = self.database.getPreviousProgram(self.osdProgram)
if previousProgram:
self.osdProgram = previousProgram
self._showOsd()
elif action.getId() == ACTION_RIGHT:
nextProgram = self.database.getNextProgram(self.osdProgram)
if nextProgram:
self.osdProgram = nextProgram
self._showOsd()
def onActionEPGMode(self, action):
if action.getId() in [ACTION_PARENT_DIR, KEY_NAV_BACK]:
self.close()
return
# catch the ESC key
elif action.getId() == ACTION_PREVIOUS_MENU and action.getButtonCode() == KEY_ESC:
self.close()
return
elif action.getId() == ACTION_MOUSE_MOVE:
self._showControl(self.C_MAIN_MOUSE_CONTROLS)
return
elif action.getId() == KEY_CONTEXT_MENU:
if self.player.isPlaying():
self._hideEpg()
controlInFocus = None
currentFocus = self.focusPoint
try:
controlInFocus = self.getFocus()
if controlInFocus in [elem.control for elem in self.controlAndProgramList]:
(left, top) = controlInFocus.getPosition()
currentFocus = Point()
currentFocus.x = left + (controlInFocus.getWidth() / 2)
currentFocus.y = top + (controlInFocus.getHeight() / 2)
except Exception:
control = self._findControlAt(self.focusPoint)
if control is None and len(self.controlAndProgramList) > 0:
control = self.controlAndProgramList[0].control
if control is not None:
self.setFocus(control)
return
if action.getId() == ACTION_LEFT:
self._left(currentFocus)
elif action.getId() == ACTION_RIGHT:
self._right(currentFocus)
elif action.getId() == ACTION_UP:
self._up(currentFocus)
elif action.getId() == ACTION_DOWN:
self._down(currentFocus)
elif action.getId() == ACTION_NEXT_ITEM:
self._nextDay()
elif action.getId() == ACTION_PREV_ITEM:
self._previousDay()
elif action.getId() == ACTION_PAGE_UP:
self._moveUp(CHANNELS_PER_PAGE)
elif action.getId() == ACTION_PAGE_DOWN:
self._moveDown(CHANNELS_PER_PAGE)
elif action.getId() == ACTION_MOUSE_WHEEL_UP:
self._moveUp(scrollEvent=True)
elif action.getId() == ACTION_MOUSE_WHEEL_DOWN:
self._moveDown(scrollEvent=True)
elif action.getId() == KEY_HOME:
self.viewStartDate = datetime.datetime.today()
self.viewStartDate -= datetime.timedelta(minutes=self.viewStartDate.minute % 30,
seconds=self.viewStartDate.second)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
elif action.getId() in [KEY_CONTEXT_MENU, ACTION_PREVIOUS_MENU] and controlInFocus is not None:
program = self._getProgramFromControl(controlInFocus)
if program is not None:
self._showContextMenu(program)
else:
xbmc.log('[script.ftvguide] Unhandled ActionId: ' + str(action.getId()), xbmc.LOGDEBUG)
def onClick(self, controlId):
if controlId in [self.C_MAIN_LOADING_CANCEL, self.C_MAIN_MOUSE_EXIT]:
self.close()
return
if self.isClosing:
return
if controlId == self.C_MAIN_MOUSE_HOME:
self.viewStartDate = datetime.datetime.today()
self.viewStartDate -= datetime.timedelta(minutes=self.viewStartDate.minute % 30, seconds=self.viewStartDate.second)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
return
elif controlId == self.C_MAIN_MOUSE_LEFT:
self.viewStartDate -= datetime.timedelta(hours=2)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
return
elif controlId == self.C_MAIN_MOUSE_UP:
self._moveUp(count=CHANNELS_PER_PAGE)
return
elif controlId == self.C_MAIN_MOUSE_DOWN:
self._moveDown(count=CHANNELS_PER_PAGE)
return
elif controlId == self.C_MAIN_MOUSE_RIGHT:
self.viewStartDate += datetime.timedelta(hours=2)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
return
program = self._getProgramFromControl(self.getControl(controlId))
if program is None:
return
if not self.playChannel(program.channel):
result = self.streamingService.detectStream(program.channel)
if not result:
# could not detect stream, show context menu
self._showContextMenu(program)
elif type(result) == str:
# one single stream detected, save it and start streaming
self.database.setCustomStreamUrl(program.channel, result)
self.playChannel(program.channel)
else:
# multiple matches, let user decide
d = ChooseStreamAddonDialog(result)
d.doModal()
if d.stream is not None:
self.database.setCustomStreamUrl(program.channel, d.stream)
self.playChannel(program.channel)
def _showContextMenu(self, program):
self._hideControl(self.C_MAIN_MOUSE_CONTROLS)
d = PopupMenu(self.database, program, not program.notificationScheduled)
d.doModal()
buttonClicked = d.buttonClicked
del d
if buttonClicked == PopupMenu.C_POPUP_REMIND:
if program.notificationScheduled:
self.notification.removeNotification(program)
else:
self.notification.addNotification(program)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
elif buttonClicked == PopupMenu.C_POPUP_CHOOSE_STREAM:
d = StreamSetupDialog(self.database, program.channel)
d.doModal()
del d
elif buttonClicked == PopupMenu.C_POPUP_PLAY:
self.playChannel(program.channel)
elif buttonClicked == PopupMenu.C_POPUP_CHANNELS:
d = ChannelsMenu(self.database)
d.doModal()
del d
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
elif buttonClicked == PopupMenu.C_POPUP_QUIT:
self.close()
elif buttonClicked == PopupMenu.C_POPUP_LIBMOV:
xbmc.executebuiltin('ActivateWindow(Videos,videodb://movies/titles/)')
elif buttonClicked == PopupMenu.C_POPUP_LIBTV:
xbmc.executebuiltin('ActivateWindow(Videos,videodb://tvshows/titles/)')
elif buttonClicked == PopupMenu.C_POPUP_VIDEOADDONS:
xbmc.executebuiltin('ActivateWindow(Videos,addons://sources/video/)')
def setFocusId(self, controlId):
control = self.getControl(controlId)
if control:
self.setFocus(control)
def setFocus(self, control):
debug('setFocus %d' % control.getId())
if control in [elem.control for elem in self.controlAndProgramList]:
debug('Focus before %s' % self.focusPoint)
(left, top) = control.getPosition()
if left > self.focusPoint.x or left + control.getWidth() < self.focusPoint.x:
self.focusPoint.x = left
self.focusPoint.y = top + (control.getHeight() / 2)
debug('New focus at %s' % self.focusPoint)
super(TVGuide, self).setFocus(control)
def onFocus(self, controlId):
try:
controlInFocus = self.getControl(controlId)
except Exception:
return
program = self._getProgramFromControl(controlInFocus)
if program is None:
return
self.setControlLabel(self.C_MAIN_TITLE, '[B]%s[/B]' % program.title)
if program.startDate or program.endDate:
self.setControlLabel(self.C_MAIN_TIME,
'[B]%s - %s[/B]' % (self.formatTime(program.startDate), self.formatTime(program.endDate)))
else:
self.setControlLabel(self.C_MAIN_TIME, '')
if program.description:
description = program.description
else:
description = strings(NO_DESCRIPTION)
self.setControlText(self.C_MAIN_DESCRIPTION, description)
if program.channel.logo is not None:
self.setControlImage(self.C_MAIN_LOGO, program.channel.logo)
else:
self.setControlImage(self.C_MAIN_LOGO, '')
if program.imageSmall is not None:
self.setControlImage(self.C_MAIN_IMAGE, program.imageSmall)
else:
self.setControlImage(self.C_MAIN_IMAGE, 'tvguide-logo-epg.png')
if ADDON.getSetting('program.background.enabled') == 'true' and program.imageLarge is not None:
self.setControlImage(self.C_MAIN_BACKGROUND, program.imageLarge)
if not self.osdEnabled and self.player.isPlaying():
self.player.stop()
def _left(self, currentFocus):
control = self._findControlOnLeft(currentFocus)
if control is not None:
self.setFocus(control)
elif control is None:
self.viewStartDate -= datetime.timedelta(hours=2)
self.focusPoint.x = self.epgView.right
self.onRedrawEPG(self.channelIdx, self.viewStartDate, focusFunction=self._findControlOnLeft)
def _right(self, currentFocus):
control = self._findControlOnRight(currentFocus)
if control is not None:
self.setFocus(control)
elif control is None:
self.viewStartDate += datetime.timedelta(hours=2)
self.focusPoint.x = self.epgView.left
self.onRedrawEPG(self.channelIdx, self.viewStartDate, focusFunction=self._findControlOnRight)
def _up(self, currentFocus):
currentFocus.x = self.focusPoint.x
control = self._findControlAbove(currentFocus)
if control is not None:
self.setFocus(control)
elif control is None:
self.focusPoint.y = self.epgView.bottom
self.onRedrawEPG(self.channelIdx - CHANNELS_PER_PAGE, self.viewStartDate,
focusFunction=self._findControlAbove)
def _down(self, currentFocus):
currentFocus.x = self.focusPoint.x
control = self._findControlBelow(currentFocus)
if control is not None:
self.setFocus(control)
elif control is None:
self.focusPoint.y = self.epgView.top
self.onRedrawEPG(self.channelIdx + CHANNELS_PER_PAGE, self.viewStartDate,
focusFunction=self._findControlBelow)
def _nextDay(self):
self.viewStartDate += datetime.timedelta(days=1)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
def _previousDay(self):
self.viewStartDate -= datetime.timedelta(days=1)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
def _moveUp(self, count=1, scrollEvent=False):
if scrollEvent:
self.onRedrawEPG(self.channelIdx - count, self.viewStartDate)
else:
self.focusPoint.y = self.epgView.bottom
self.onRedrawEPG(self.channelIdx - count, self.viewStartDate, focusFunction=self._findControlAbove)
def _moveDown(self, count=1, scrollEvent=False):
if scrollEvent:
self.onRedrawEPG(self.channelIdx + count, self.viewStartDate)
else:
self.focusPoint.y = self.epgView.top
self.onRedrawEPG(self.channelIdx + count, self.viewStartDate, focusFunction=self._findControlBelow)
def _channelUp(self):
channel = self.database.getNextChannel(self.currentChannel)
self.playChannel(channel)
def _channelDown(self):
channel = self.database.getPreviousChannel(self.currentChannel)
self.playChannel(channel)
def playChannel(self, channel):
self.currentChannel = channel
wasPlaying = self.player.isPlaying()
url = self.database.getStreamUrl(channel)
if url:
if url[0:9] == 'plugin://':
if self.alternativePlayback:
xbmc.executebuiltin('XBMC.RunPlugin(%s)' % url)
elif self.osdEnabled:
xbmc.executebuiltin('PlayMedia(%s,1)' % url)
else:
xbmc.executebuiltin('PlayMedia(%s)' % url)
else:
self.player.play(item=url, windowed=self.osdEnabled)
if not wasPlaying:
self._hideEpg()
threading.Timer(1, self.waitForPlayBackStopped).start()
self.osdProgram = self.database.getCurrentProgram(self.currentChannel)
return url is not None
def waitForPlayBackStopped(self):
for retry in range(0, 100):
time.sleep(0.1)
if self.player.isPlaying():
break
while self.player.isPlaying() and not xbmc.abortRequested and not self.isClosing:
time.sleep(0.5)
self.onPlayBackStopped()
def _showOsd(self):
if not self.osdEnabled:
return
if self.mode != MODE_OSD:
self.osdChannel = self.currentChannel
if self.osdProgram is not None:
self.setControlLabel(self.C_MAIN_OSD_TITLE, '[B]%s[/B]' % self.osdProgram.title)
if self.osdProgram.startDate or self.osdProgram.endDate:
self.setControlLabel(self.C_MAIN_OSD_TIME, '[B]%s - %s[/B]' % (
self.formatTime(self.osdProgram.startDate), self.formatTime(self.osdProgram.endDate)))
else:
self.setControlLabel(self.C_MAIN_OSD_TIME, '')
self.setControlText(self.C_MAIN_OSD_DESCRIPTION, self.osdProgram.description)
self.setControlLabel(self.C_MAIN_OSD_CHANNEL_TITLE, self.osdChannel.title)
if self.osdProgram.channel.logo is not None:
self.setControlImage(self.C_MAIN_OSD_CHANNEL_LOGO, self.osdProgram.channel.logo)
else:
self.setControlImage(self.C_MAIN_OSD_CHANNEL_LOGO, '')
self.mode = MODE_OSD
self._showControl(self.C_MAIN_OSD)
def _hideOsd(self):
self.mode = MODE_TV
self._hideControl(self.C_MAIN_OSD)
def _hideEpg(self):
self._hideControl(self.C_MAIN_EPG)
self.mode = MODE_TV
self._clearEpg()
def onRedrawEPG(self, channelStart, startTime, focusFunction=None):
if self.redrawingEPG or (self.database is not None and self.database.updateInProgress) or self.isClosing:
debug('onRedrawEPG - already redrawing')
return # ignore redraw request while redrawing
debug('onRedrawEPG')
self.redrawingEPG = True
self.mode = MODE_EPG
self._showControl(self.C_MAIN_EPG)
self.updateTimebar(scheduleTimer=False)
# show Loading screen
self.setControlLabel(self.C_MAIN_LOADING_TIME_LEFT, strings(CALCULATING_REMAINING_TIME))
self._showControl(self.C_MAIN_LOADING)
self.setFocusId(self.C_MAIN_LOADING_CANCEL)
# remove existing controls
self._clearEpg()
try:
self.channelIdx, channels, programs = self.database.getEPGView(channelStart, startTime, self.onSourceProgressUpdate, clearExistingProgramList=False)
except src.SourceException:
self.onEPGLoadError()
return
channelsWithoutPrograms = list(channels)
# date and time row
self.setControlLabel(self.C_MAIN_DATE, self.formatDate(self.viewStartDate, False))
self.setControlLabel(self.C_MAIN_DATE_LONG, self.formatDate(self.viewStartDate, True))
for col in range(1, 5):
self.setControlLabel(4000 + col, self.formatTime(startTime))
startTime += HALF_HOUR
if programs is None:
self.onEPGLoadError()
return
# set channel logo or text
showLogo = ADDON.getSetting('logos.enabled') == 'true'
for idx in range(0, CHANNELS_PER_PAGE):
if idx >= len(channels):
self.setControlImage(4110 + idx, ' ')
self.setControlLabel(4010 + idx, ' ')
else:
channel = channels[idx]
self.setControlLabel(4010 + idx, channel.title)
if (channel.logo is not None and showLogo == True):
self.setControlImage(4110 + idx, channel.logo)
else:
self.setControlImage(4110 + idx, ' ')
for program in programs:
idx = channels.index(program.channel)
if program.channel in channelsWithoutPrograms:
channelsWithoutPrograms.remove(program.channel)
startDelta = program.startDate - self.viewStartDate
stopDelta = program.endDate - self.viewStartDate
cellStart = self._secondsToXposition(startDelta.seconds)
if startDelta.days < 0:
cellStart = self.epgView.left
cellWidth = self._secondsToXposition(stopDelta.seconds) - cellStart
if cellStart + cellWidth > self.epgView.right:
cellWidth = self.epgView.right - cellStart
if cellWidth > 1:
if program.notificationScheduled:
noFocusTexture = 'tvguide-program-red.png'
focusTexture = 'tvguide-program-red-focus.png'
else:
noFocusTexture = 'tvguide-program-grey.png'
focusTexture = 'tvguide-program-grey-focus.png'
if cellWidth < 25:
title = '' # Text will overflow outside the button if it is too narrow
else:
title = program.title
control = xbmcgui.ControlButton(
cellStart,
self.epgView.top + self.epgView.cellHeight * idx,
cellWidth - 2,
self.epgView.cellHeight - 2,
title,
noFocusTexture=noFocusTexture,
focusTexture=focusTexture
)
self.controlAndProgramList.append(ControlAndProgram(control, program))
for channel in channelsWithoutPrograms:
idx = channels.index(channel)
control = xbmcgui.ControlButton(
self.epgView.left,
self.epgView.top + self.epgView.cellHeight * idx,
(self.epgView.right - self.epgView.left) - 2,
self.epgView.cellHeight - 2,
strings(NO_PROGRAM_AVAILABLE),
noFocusTexture='tvguide-program-grey.png',
focusTexture='tvguide-program-grey-focus.png'
)
program = src.Program(channel, strings(NO_PROGRAM_AVAILABLE), None, None, None)
self.controlAndProgramList.append(ControlAndProgram(control, program))
# add program controls
if focusFunction is None:
focusFunction = self._findControlAt
focusControl = focusFunction(self.focusPoint)
controls = [elem.control for elem in self.controlAndProgramList]
self.addControls(controls)
if focusControl is not None:
debug('onRedrawEPG - setFocus %d' % focusControl.getId())
self.setFocus(focusControl)
self.ignoreMissingControlIds.extend([elem.control.getId() for elem in self.controlAndProgramList])
if focusControl is None and len(self.controlAndProgramList) > 0:
self.setFocus(self.controlAndProgramList[0].control)
self._hideControl(self.C_MAIN_LOADING)
self.redrawingEPG = False
def _clearEpg(self):
controls = [elem.control for elem in self.controlAndProgramList]
try:
self.removeControls(controls)
except RuntimeError:
for elem in self.controlAndProgramList:
try:
self.removeControl(elem.control)
except RuntimeError:
pass # happens if we try to remove a control that doesn't exist
del self.controlAndProgramList[:]
def onEPGLoadError(self):
self.redrawingEPG = False
self._hideControl(self.C_MAIN_LOADING)
xbmcgui.Dialog().ok(strings(LOAD_ERROR_TITLE), strings(LOAD_ERROR_LINE1), strings(LOAD_ERROR_LINE2))
self.close()
def onSourceNotConfigured(self):
self.redrawingEPG = False
self._hideControl(self.C_MAIN_LOADING)
xbmcgui.Dialog().ok(strings(LOAD_ERROR_TITLE), strings(LOAD_ERROR_LINE1), strings(CONFIGURATION_ERROR_LINE2))
self.close()
def isSourceInitializationCancelled(self):
return xbmc.abortRequested or self.isClosing
def onSourceInitialized(self, success):
if success:
self.notification = Notification(self.database, ADDON.getAddonInfo('path'))
self.onRedrawEPG(0, self.viewStartDate)
def onSourceProgressUpdate(self, percentageComplete):
control = self.getControl(self.C_MAIN_LOADING_PROGRESS)
if percentageComplete < 1:
if control:
control.setPercent(1)
self.progressStartTime = datetime.datetime.now()
self.progressPreviousPercentage = percentageComplete
elif percentageComplete != self.progressPreviousPercentage:
if control:
control.setPercent(percentageComplete)
self.progressPreviousPercentage = percentageComplete
delta = datetime.datetime.now() - self.progressStartTime
if percentageComplete < 20:
self.setControlLabel(self.C_MAIN_LOADING_TIME_LEFT, strings(CALCULATING_REMAINING_TIME))
else:
secondsLeft = int(delta.seconds) / float(percentageComplete) * (100.0 - percentageComplete)
if secondsLeft > 30:
secondsLeft -= secondsLeft % 10
self.setControlLabel(self.C_MAIN_LOADING_TIME_LEFT, strings(TIME_LEFT) % secondsLeft)
return not xbmc.abortRequested and not self.isClosing
def onPlayBackStopped(self):
if not self.player.isPlaying() and not self.isClosing:
self._hideControl(self.C_MAIN_OSD)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
def _secondsToXposition(self, seconds):
return self.epgView.left + (seconds * self.epgView.width / 7200)
def _findControlOnRight(self, point):
distanceToNearest = 10000
nearestControl = None
for elem in self.controlAndProgramList:
control = elem.control
(left, top) = control.getPosition()
x = left + (control.getWidth() / 2)
y = top + (control.getHeight() / 2)
if point.x < x and point.y == y:
distance = abs(point.x - x)
if distance < distanceToNearest:
distanceToNearest = distance
nearestControl = control
return nearestControl
def _findControlOnLeft(self, point):
distanceToNearest = 10000
nearestControl = None
for elem in self.controlAndProgramList:
control = elem.control
(left, top) = control.getPosition()
x = left + (control.getWidth() / 2)
y = top + (control.getHeight() / 2)
if point.x > x and point.y == y:
distance = abs(point.x - x)
if distance < distanceToNearest:
distanceToNearest = distance
nearestControl = control
return nearestControl
def _findControlBelow(self, point):
nearestControl = None
for elem in self.controlAndProgramList:
control = elem.control
(leftEdge, top) = control.getPosition()
y = top + (control.getHeight() / 2)
if point.y < y:
rightEdge = leftEdge + control.getWidth()
if leftEdge <= point.x < rightEdge and (nearestControl is None or nearestControl.getPosition()[1] > top):
nearestControl = control
return nearestControl
def _findControlAbove(self, point):
nearestControl = None
for elem in self.controlAndProgramList:
control = elem.control
(leftEdge, top) = control.getPosition()
y = top + (control.getHeight() / 2)
if point.y > y:
rightEdge = leftEdge + control.getWidth()
if leftEdge <= point.x < rightEdge and (nearestControl is None or nearestControl.getPosition()[1] < top):
nearestControl = control
return nearestControl
def _findControlAt(self, point):
for elem in self.controlAndProgramList:
control = elem.control
(left, top) = control.getPosition()
bottom = top + control.getHeight()
right = left + control.getWidth()
if left <= point.x <= right and top <= point.y <= bottom:
return control
return None
def _getProgramFromControl(self, control):
for elem in self.controlAndProgramList:
if elem.control == control:
return elem.program
return None
def _hideControl(self, *controlIds):
"""
Visibility is inverted in skin
"""
for controlId in controlIds:
control = self.getControl(controlId)
if control:
control.setVisible(True)
def _showControl(self, *controlIds):
"""
Visibility is inverted in skin
"""
for controlId in controlIds:
control = self.getControl(controlId)
if control:
control.setVisible(False)
def formatTime(self, timestamp):
if timestamp:
format = xbmc.getRegion('time').replace(':%S', '').replace('%H%H', '%H')
return timestamp.strftime(format)
else:
return ''
def formatDate(self, timestamp, longdate=False):
if timestamp:
if longdate == True:
format = xbmc.getRegion('datelong')
else:
format = xbmc.getRegion('dateshort')
return timestamp.strftime(format)
else:
return ''
def setControlImage(self, controlId, image):
control = self.getControl(controlId)
if control:
control.setImage(image.encode('utf-8'))
def setControlLabel(self, controlId, label):
control = self.getControl(controlId)
if control and label:
control.setLabel(label)
def setControlText(self, controlId, text):
control = self.getControl(controlId)
if control:
control.setText(text)
def updateTimebar(self, scheduleTimer=True):
# move timebar to current time
timeDelta = datetime.datetime.today() - self.viewStartDate
control = self.getControl(self.C_MAIN_TIMEBAR)
if control:
(x, y) = control.getPosition()
try:
# Sometimes raises:
# exceptions.RuntimeError: Unknown exception thrown from the call "setVisible"
control.setVisible(timeDelta.days == 0)
except:
pass
control.setPosition(self._secondsToXposition(timeDelta.seconds), y)
if scheduleTimer and not xbmc.abortRequested and not self.isClosing:
threading.Timer(1, self.updateTimebar).start()
class PopupMenu(xbmcgui.WindowXMLDialog):
C_POPUP_PLAY = 4000
C_POPUP_CHOOSE_STREAM = 4001
C_POPUP_REMIND = 4002
C_POPUP_CHANNELS = 4003
C_POPUP_QUIT = 4004
C_POPUP_CHANNEL_LOGO = 4100
C_POPUP_CHANNEL_TITLE = 4101
C_POPUP_PROGRAM_TITLE = 4102
C_POPUP_LIBMOV = 80000
C_POPUP_LIBTV = 80001
C_POPUP_VIDEOADDONS = 80002
def __new__(cls, database, program, showRemind):
return super(PopupMenu, cls).__new__(cls, 'script-tvguide-menu.xml', ADDON.getAddonInfo('path'), SKIN)
def __init__(self, database, program, showRemind):
"""
@type database: source.Database
@param program:
@type program: source.Program
@param showRemind:
"""
super(PopupMenu, self).__init__()
self.database = database
self.program = program
self.showRemind = showRemind
self.buttonClicked = None
def onInit(self):
playControl = self.getControl(self.C_POPUP_PLAY)
remindControl = self.getControl(self.C_POPUP_REMIND)
channelLogoControl = self.getControl(self.C_POPUP_CHANNEL_LOGO)
channelTitleControl = self.getControl(self.C_POPUP_CHANNEL_TITLE)
programTitleControl = self.getControl(self.C_POPUP_PROGRAM_TITLE)
playControl.setLabel(strings(WATCH_CHANNEL, self.program.channel.title))
if not self.program.channel.isPlayable():
playControl.setEnabled(False)
self.setFocusId(self.C_POPUP_CHOOSE_STREAM)
if self.database.getCustomStreamUrl(self.program.channel):
chooseStrmControl = self.getControl(self.C_POPUP_CHOOSE_STREAM)
chooseStrmControl.setLabel(strings(REMOVE_STRM_FILE))
if self.program.channel.logo is not None:
channelLogoControl.setImage(self.program.channel.logo)
channelTitleControl.setVisible(False)
else:
channelTitleControl.setLabel(self.program.channel.title)
channelLogoControl.setVisible(False)
programTitleControl.setLabel(self.program.title)
if self.program.startDate:
remindControl.setEnabled(True)
if self.showRemind:
remindControl.setLabel(strings(REMIND_PROGRAM))
else:
remindControl.setLabel(strings(DONT_REMIND_PROGRAM))
else:
remindControl.setEnabled(False)
def onAction(self, action):
if action.getId() in [ACTION_PARENT_DIR, ACTION_PREVIOUS_MENU, KEY_NAV_BACK, KEY_CONTEXT_MENU]:
self.close()
return
def onClick(self, controlId):
if controlId == self.C_POPUP_CHOOSE_STREAM and self.database.getCustomStreamUrl(self.program.channel):
self.database.deleteCustomStreamUrl(self.program.channel)
chooseStrmControl = self.getControl(self.C_POPUP_CHOOSE_STREAM)
chooseStrmControl.setLabel(strings(CHOOSE_STRM_FILE))
if not self.program.channel.isPlayable():
playControl = self.getControl(self.C_POPUP_PLAY)
playControl.setEnabled(False)
else:
self.buttonClicked = controlId
self.close()
def onFocus(self, controlId):
pass
class ChannelsMenu(xbmcgui.WindowXMLDialog):
C_CHANNELS_LIST = 6000
C_CHANNELS_SELECTION_VISIBLE = 6001
C_CHANNELS_SELECTION = 6002
C_CHANNELS_SAVE = 6003
C_CHANNELS_CANCEL = 6004
def __new__(cls, database):
return super(ChannelsMenu, cls).__new__(cls, 'script-tvguide-channels.xml', ADDON.getAddonInfo('path'), SKIN)
def __init__(self, database):
"""
@type database: source.Database
"""
super(ChannelsMenu, self).__init__()
self.database = database
self.channelList = database.getChannelList(onlyVisible=False)
self.swapInProgress = False
self.selectedChannel = 0
def onInit(self):
self.updateChannelList()
self.setFocusId(self.C_CHANNELS_LIST)
def onAction(self, action):
if action.getId() in [ACTION_PARENT_DIR, KEY_NAV_BACK]:
self.close()
return
if self.getFocusId() == self.C_CHANNELS_LIST and action.getId() in [ACTION_PREVIOUS_MENU, KEY_CONTEXT_MENU, ACTION_LEFT]:
listControl = self.getControl(self.C_CHANNELS_LIST)
idx = listControl.getSelectedPosition()
self.selectedChannel = idx
buttonControl = self.getControl(self.C_CHANNELS_SELECTION)
buttonControl.setLabel('[B]%s[/B]' % self.channelList[idx].title)
self.getControl(self.C_CHANNELS_SELECTION_VISIBLE).setVisible(False)
self.setFocusId(self.C_CHANNELS_SELECTION)
elif self.getFocusId() == self.C_CHANNELS_SELECTION and action.getId() in [ACTION_RIGHT, ACTION_SELECT_ITEM]:
self.getControl(self.C_CHANNELS_SELECTION_VISIBLE).setVisible(True)
xbmc.sleep(350)
self.setFocusId(self.C_CHANNELS_LIST)
elif self.getFocusId() == self.C_CHANNELS_SELECTION and action.getId() in [ACTION_PREVIOUS_MENU, KEY_CONTEXT_MENU]:
listControl = self.getControl(self.C_CHANNELS_LIST)
idx = listControl.getSelectedPosition()
self.swapChannels(self.selectedChannel, idx)
self.getControl(self.C_CHANNELS_SELECTION_VISIBLE).setVisible(True)
xbmc.sleep(350)
self.setFocusId(self.C_CHANNELS_LIST)
elif self.getFocusId() == self.C_CHANNELS_SELECTION and action.getId() == ACTION_UP:
listControl = self.getControl(self.C_CHANNELS_LIST)
idx = listControl.getSelectedPosition()
if idx > 0:
self.swapChannels(idx, idx - 1)
elif self.getFocusId() == self.C_CHANNELS_SELECTION and action.getId() == ACTION_DOWN:
listControl = self.getControl(self.C_CHANNELS_LIST)
idx = listControl.getSelectedPosition()
if idx < listControl.size() - 1:
self.swapChannels(idx, idx + 1)
def onClick(self, controlId):
if controlId == self.C_CHANNELS_LIST:
listControl = self.getControl(self.C_CHANNELS_LIST)
item = listControl.getSelectedItem()
channel = self.channelList[int(item.getProperty('idx'))]
channel.visible = not channel.visible
if channel.visible:
iconImage = 'tvguide-channel-visible.png'
else:
iconImage = 'tvguide-channel-hidden.png'
item.setIconImage(iconImage)
elif controlId == self.C_CHANNELS_SAVE:
self.database.saveChannelList(self.close, self.channelList)
elif controlId == self.C_CHANNELS_CANCEL:
self.close()
def onFocus(self, controlId):
pass
def updateChannelList(self):
listControl = self.getControl(self.C_CHANNELS_LIST)
listControl.reset()
for idx, channel in enumerate(self.channelList):
if channel.visible:
iconImage = 'tvguide-channel-visible.png'
else:
iconImage = 'tvguide-channel-hidden.png'
item = xbmcgui.ListItem('%3d. %s' % (idx + 1, channel.title), iconImage=iconImage)
item.setProperty('idx', str(idx))
listControl.addItem(item)
def updateListItem(self, idx, item):
channel = self.channelList[idx]
item.setLabel('%3d. %s' % (idx + 1, channel.title))
if channel.visible:
iconImage = 'tvguide-channel-visible.png'
else:
iconImage = 'tvguide-channel-hidden.png'
item.setIconImage(iconImage)
item.setProperty('idx', str(idx))
def swapChannels(self, fromIdx, toIdx):
if self.swapInProgress:
return
self.swapInProgress = True
c = self.channelList[fromIdx]
self.channelList[fromIdx] = self.channelList[toIdx]
self.channelList[toIdx] = c
# recalculate weight
for idx, channel in enumerate(self.channelList):
channel.weight = idx
listControl = self.getControl(self.C_CHANNELS_LIST)
self.updateListItem(fromIdx, listControl.getListItem(fromIdx))
self.updateListItem(toIdx, listControl.getListItem(toIdx))
listControl.selectItem(toIdx)
xbmc.sleep(50)
self.swapInProgress = False
class StreamSetupDialog(xbmcgui.WindowXMLDialog):
C_STREAM_STRM_TAB = 101
C_STREAM_FAVOURITES_TAB = 102
C_STREAM_ADDONS_TAB = 103
C_STREAM_STRM_BROWSE = 1001
C_STREAM_STRM_FILE_LABEL = 1005
C_STREAM_STRM_PREVIEW = 1002
C_STREAM_STRM_OK = 1003
C_STREAM_STRM_CANCEL = 1004
C_STREAM_FAVOURITES = 2001
C_STREAM_FAVOURITES_PREVIEW = 2002
C_STREAM_FAVOURITES_OK = 2003
C_STREAM_FAVOURITES_CANCEL = 2004
C_STREAM_ADDONS = 3001
C_STREAM_ADDONS_STREAMS = 3002
C_STREAM_ADDONS_NAME = 3003
C_STREAM_ADDONS_DESCRIPTION = 3004
C_STREAM_ADDONS_PREVIEW = 3005
C_STREAM_ADDONS_OK = 3006
C_STREAM_ADDONS_CANCEL = 3007
C_STREAM_VISIBILITY_MARKER = 100
VISIBLE_STRM = 'strm'
VISIBLE_FAVOURITES = 'favourites'
VISIBLE_ADDONS = 'addons'
def __new__(cls, database, channel):
return super(StreamSetupDialog, cls).__new__(cls, 'script-tvguide-streamsetup.xml', ADDON.getAddonInfo('path'), SKIN)
def __init__(self, database, channel):
"""
@type database: source.Database
@type channel:source.Channel
"""
super(StreamSetupDialog, self).__init__()
self.database = database
self.channel = channel
self.player = xbmc.Player()
self.previousAddonId = None
self.strmFile = None
self.streamingService = streaming.StreamsService(ADDON)
def close(self):
if self.player.isPlaying():
self.player.stop()
super(StreamSetupDialog, self).close()
def onInit(self):
self.getControl(self.C_STREAM_VISIBILITY_MARKER).setLabel(self.VISIBLE_STRM)
favourites = self.streamingService.loadFavourites()
items = list()
for label, value in favourites:
item = xbmcgui.ListItem(label)
item.setProperty('stream', value)
items.append(item)
listControl = self.getControl(StreamSetupDialog.C_STREAM_FAVOURITES)
listControl.addItems(items)
items = list()
for id in self.streamingService.getAddons():
try:
addon = xbmcaddon.Addon(id) # raises Exception if addon is not installed
item = xbmcgui.ListItem(addon.getAddonInfo('name'), iconImage=addon.getAddonInfo('icon'))
item.setProperty('addon_id', id)
items.append(item)
except Exception:
pass
listControl = self.getControl(StreamSetupDialog.C_STREAM_ADDONS)
listControl.addItems(items)
self.updateAddonInfo()
def onAction(self, action):
if action.getId() in [ACTION_PARENT_DIR, ACTION_PREVIOUS_MENU, KEY_NAV_BACK, KEY_CONTEXT_MENU]:
self.close()
return
elif self.getFocusId() == self.C_STREAM_ADDONS:
self.updateAddonInfo()
def onClick(self, controlId):
if controlId == self.C_STREAM_STRM_BROWSE:
stream = xbmcgui.Dialog().browse(1, ADDON.getLocalizedString(30304), 'video', '.strm')
if stream:
self.database.setCustomStreamUrl(self.channel, stream)
self.getControl(self.C_STREAM_STRM_FILE_LABEL).setText(stream)
self.strmFile = stream
elif controlId == self.C_STREAM_ADDONS_OK:
listControl = self.getControl(self.C_STREAM_ADDONS_STREAMS)
item = listControl.getSelectedItem()
if item:
stream = item.getProperty('stream')
self.database.setCustomStreamUrl(self.channel, stream)
self.close()
elif controlId == self.C_STREAM_FAVOURITES_OK:
listControl = self.getControl(self.C_STREAM_FAVOURITES)
item = listControl.getSelectedItem()
if item:
stream = item.getProperty('stream')
self.database.setCustomStreamUrl(self.channel, stream)
self.close()
elif controlId == self.C_STREAM_STRM_OK:
self.database.setCustomStreamUrl(self.channel, self.strmFile)
self.close()
elif controlId in [self.C_STREAM_ADDONS_CANCEL, self.C_STREAM_FAVOURITES_CANCEL, self.C_STREAM_STRM_CANCEL]:
self.close()
elif controlId in [self.C_STREAM_ADDONS_PREVIEW, self.C_STREAM_FAVOURITES_PREVIEW, self.C_STREAM_STRM_PREVIEW]:
if self.player.isPlaying():
self.player.stop()
self.getControl(self.C_STREAM_ADDONS_PREVIEW).setLabel(strings(PREVIEW_STREAM))
self.getControl(self.C_STREAM_FAVOURITES_PREVIEW).setLabel(strings(PREVIEW_STREAM))
self.getControl(self.C_STREAM_STRM_PREVIEW).setLabel(strings(PREVIEW_STREAM))
return
stream = None
visible = self.getControl(self.C_STREAM_VISIBILITY_MARKER).getLabel()
if visible == self.VISIBLE_ADDONS:
listControl = self.getControl(self.C_STREAM_ADDONS_STREAMS)
item = listControl.getSelectedItem()
if item:
stream = item.getProperty('stream')
elif visible == self.VISIBLE_FAVOURITES:
listControl = self.getControl(self.C_STREAM_FAVOURITES)
item = listControl.getSelectedItem()
if item:
stream = item.getProperty('stream')
elif visible == self.VISIBLE_STRM:
stream = self.strmFile
if stream is not None:
self.player.play(item=stream, windowed=True)
if self.player.isPlaying():
self.getControl(self.C_STREAM_ADDONS_PREVIEW).setLabel(strings(STOP_PREVIEW))
self.getControl(self.C_STREAM_FAVOURITES_PREVIEW).setLabel(strings(STOP_PREVIEW))
self.getControl(self.C_STREAM_STRM_PREVIEW).setLabel(strings(STOP_PREVIEW))
def onFocus(self, controlId):
if controlId == self.C_STREAM_STRM_TAB:
self.getControl(self.C_STREAM_VISIBILITY_MARKER).setLabel(self.VISIBLE_STRM)
elif controlId == self.C_STREAM_FAVOURITES_TAB:
self.getControl(self.C_STREAM_VISIBILITY_MARKER).setLabel(self.VISIBLE_FAVOURITES)
elif controlId == self.C_STREAM_ADDONS_TAB:
self.getControl(self.C_STREAM_VISIBILITY_MARKER).setLabel(self.VISIBLE_ADDONS)
def updateAddonInfo(self):
listControl = self.getControl(self.C_STREAM_ADDONS)
item = listControl.getSelectedItem()
if item is None:
return
if item.getProperty('addon_id') == self.previousAddonId:
return
self.previousAddonId = item.getProperty('addon_id')
addon = xbmcaddon.Addon(id=item.getProperty('addon_id'))
self.getControl(self.C_STREAM_ADDONS_NAME).setLabel('[B]%s[/B]' % addon.getAddonInfo('name'))
self.getControl(self.C_STREAM_ADDONS_DESCRIPTION).setText(addon.getAddonInfo('description'))
streams = self.streamingService.getAddonStreams(item.getProperty('addon_id'))
items = list()
for (label, stream) in streams:
item = xbmcgui.ListItem(label)
item.setProperty('stream', stream)
items.append(item)
listControl = self.getControl(StreamSetupDialog.C_STREAM_ADDONS_STREAMS)
listControl.reset()
listControl.addItems(items)
class ChooseStreamAddonDialog(xbmcgui.WindowXMLDialog):
C_SELECTION_LIST = 1000
def __new__(cls, addons):
return super(ChooseStreamAddonDialog, cls).__new__(cls, 'script-tvguide-streamaddon.xml', ADDON.getAddonInfo('path'), SKIN)
def __init__(self, addons):
super(ChooseStreamAddonDialog, self).__init__()
self.addons = addons
self.stream = None
def onInit(self):
items = list()
for id, label, url in self.addons:
addon = xbmcaddon.Addon(id)
item = xbmcgui.ListItem(label, addon.getAddonInfo('name'), addon.getAddonInfo('icon'))
item.setProperty('stream', url)
items.append(item)
listControl = self.getControl(ChooseStreamAddonDialog.C_SELECTION_LIST)
listControl.addItems(items)
self.setFocus(listControl)
def onAction(self, action):
if action.getId() in [ACTION_PARENT_DIR, ACTION_PREVIOUS_MENU, KEY_NAV_BACK]:
self.close()
def onClick(self, controlId):
if controlId == ChooseStreamAddonDialog.C_SELECTION_LIST:
listControl = self.getControl(ChooseStreamAddonDialog.C_SELECTION_LIST)
self.stream = listControl.getSelectedItem().getProperty('stream')
self.close()
def onFocus(self, controlId):
pass
| 38.944166 | 161 | 0.614282 |
import datetime
import threading
import time
import xbmc
import xbmcgui
import source as src
from notification import Notification
from strings import *
import streaming
DEBUG = False
MODE_EPG = 'EPG'
MODE_TV = 'TV'
MODE_OSD = 'OSD'
ACTION_LEFT = 1
ACTION_RIGHT = 2
ACTION_UP = 3
ACTION_DOWN = 4
ACTION_PAGE_UP = 5
ACTION_PAGE_DOWN = 6
ACTION_SELECT_ITEM = 7
ACTION_PARENT_DIR = 9
ACTION_PREVIOUS_MENU = 10
ACTION_SHOW_INFO = 11
ACTION_NEXT_ITEM = 14
ACTION_PREV_ITEM = 15
ACTION_MOUSE_WHEEL_UP = 104
ACTION_MOUSE_WHEEL_DOWN = 105
ACTION_MOUSE_MOVE = 107
KEY_NAV_BACK = 92
KEY_CONTEXT_MENU = 117
KEY_HOME = 159
KEY_ESC = 61467
CHANNELS_PER_PAGE = 8
HALF_HOUR = datetime.timedelta(minutes=30)
SKIN = ADDON.getSetting('skin')
def debug(s):
if DEBUG: xbmc.log(str(s), xbmc.LOGDEBUG)
class Point(object):
def __init__(self):
self.x = self.y = 0
def __repr__(self):
return 'Point(x=%d, y=%d)' % (self.x, self.y)
class EPGView(object):
def __init__(self):
self.top = self.left = self.right = self.bottom = self.width = self.cellHeight = 0
class ControlAndProgram(object):
def __init__(self, control, program):
self.control = control
self.program = program
class TVGuide(xbmcgui.WindowXML):
C_MAIN_DATE_LONG = 3999
C_MAIN_DATE = 4000
C_MAIN_TITLE = 4020
C_MAIN_TIME = 4021
C_MAIN_DESCRIPTION = 4022
C_MAIN_IMAGE = 4023
C_MAIN_LOGO = 4024
C_MAIN_TIMEBAR = 4100
C_MAIN_LOADING = 4200
C_MAIN_LOADING_PROGRESS = 4201
C_MAIN_LOADING_TIME_LEFT = 4202
C_MAIN_LOADING_CANCEL = 4203
C_MAIN_MOUSE_CONTROLS = 4300
C_MAIN_MOUSE_HOME = 4301
C_MAIN_MOUSE_LEFT = 4302
C_MAIN_MOUSE_UP = 4303
C_MAIN_MOUSE_DOWN = 4304
C_MAIN_MOUSE_RIGHT = 4305
C_MAIN_MOUSE_EXIT = 4306
C_MAIN_BACKGROUND = 4600
C_MAIN_EPG = 5000
C_MAIN_EPG_VIEW_MARKER = 5001
C_MAIN_OSD = 6000
C_MAIN_OSD_TITLE = 6001
C_MAIN_OSD_TIME = 6002
C_MAIN_OSD_DESCRIPTION = 6003
C_MAIN_OSD_CHANNEL_LOGO = 6004
C_MAIN_OSD_CHANNEL_TITLE = 6005
def __new__(cls):
return super(TVGuide, cls).__new__(cls, 'script-tvguide-main.xml', ADDON.getAddonInfo('path'), SKIN)
def __init__(self):
super(TVGuide, self).__init__()
self.notification = None
self.redrawingEPG = False
self.isClosing = False
self.controlAndProgramList = list()
self.ignoreMissingControlIds = list()
self.channelIdx = 0
self.focusPoint = Point()
self.epgView = EPGView()
self.streamingService = streaming.StreamsService(ADDON)
self.player = xbmc.Player()
self.database = None
self.mode = MODE_EPG
self.currentChannel = None
self.osdEnabled = ADDON.getSetting('enable.osd') == 'true' and ADDON.getSetting(
'alternative.playback') != 'true'
self.alternativePlayback = ADDON.getSetting('alternative.playback') == 'true'
self.osdChannel = None
self.osdProgram = None
self.viewStartDate = datetime.datetime.today()
self.viewStartDate -= datetime.timedelta(minutes=self.viewStartDate.minute % 30,
seconds=self.viewStartDate.second)
def getControl(self, controlId):
try:
return super(TVGuide, self).getControl(controlId)
except:
if controlId in self.ignoreMissingControlIds:
return None
if not self.isClosing:
self.close()
return None
def close(self):
if not self.isClosing:
self.isClosing = True
if self.player.isPlaying():
self.player.stop()
if self.database:
self.database.close(super(TVGuide, self).close)
else:
super(TVGuide, self).close()
def onInit(self):
self._hideControl(self.C_MAIN_MOUSE_CONTROLS, self.C_MAIN_OSD)
self._showControl(self.C_MAIN_EPG, self.C_MAIN_LOADING)
self.setControlLabel(self.C_MAIN_LOADING_TIME_LEFT, strings(BACKGROUND_UPDATE_IN_PROGRESS))
self.setFocusId(self.C_MAIN_LOADING_CANCEL)
control = self.getControl(self.C_MAIN_EPG_VIEW_MARKER)
if control:
left, top = control.getPosition()
self.focusPoint.x = left
self.focusPoint.y = top
self.epgView.left = left
self.epgView.top = top
self.epgView.right = left + control.getWidth()
self.epgView.bottom = top + control.getHeight()
self.epgView.width = control.getWidth()
self.epgView.cellHeight = control.getHeight() / CHANNELS_PER_PAGE
if self.database:
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
else:
try:
self.database = src.Database()
except src.SourceNotConfiguredException:
self.onSourceNotConfigured()
self.close()
return
self.database.initialize(self.onSourceInitialized, self.isSourceInitializationCancelled)
self.updateTimebar()
def onAction(self, action):
debug('Mode is: %s' % self.mode)
if self.mode == MODE_TV:
self.onActionTVMode(action)
elif self.mode == MODE_OSD:
self.onActionOSDMode(action)
elif self.mode == MODE_EPG:
self.onActionEPGMode(action)
def onActionTVMode(self, action):
if action.getId() == ACTION_PAGE_UP:
self._channelUp()
elif action.getId() == ACTION_PAGE_DOWN:
self._channelDown()
elif not self.osdEnabled:
pass
elif action.getId() in [ACTION_PARENT_DIR, KEY_NAV_BACK, KEY_CONTEXT_MENU, ACTION_PREVIOUS_MENU]:
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
elif action.getId() == ACTION_SHOW_INFO:
self._showOsd()
def onActionOSDMode(self, action):
if action.getId() == ACTION_SHOW_INFO:
self._hideOsd()
elif action.getId() in [ACTION_PARENT_DIR, KEY_NAV_BACK, KEY_CONTEXT_MENU, ACTION_PREVIOUS_MENU]:
self._hideOsd()
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
elif action.getId() == ACTION_SELECT_ITEM:
if self.playChannel(self.osdChannel):
self._hideOsd()
elif action.getId() == ACTION_PAGE_UP:
self._channelUp()
self._showOsd()
elif action.getId() == ACTION_PAGE_DOWN:
self._channelDown()
self._showOsd()
elif action.getId() == ACTION_UP:
self.osdChannel = self.database.getPreviousChannel(self.osdChannel)
self.osdProgram = self.database.getCurrentProgram(self.osdChannel)
self._showOsd()
elif action.getId() == ACTION_DOWN:
self.osdChannel = self.database.getNextChannel(self.osdChannel)
self.osdProgram = self.database.getCurrentProgram(self.osdChannel)
self._showOsd()
elif action.getId() == ACTION_LEFT:
previousProgram = self.database.getPreviousProgram(self.osdProgram)
if previousProgram:
self.osdProgram = previousProgram
self._showOsd()
elif action.getId() == ACTION_RIGHT:
nextProgram = self.database.getNextProgram(self.osdProgram)
if nextProgram:
self.osdProgram = nextProgram
self._showOsd()
def onActionEPGMode(self, action):
if action.getId() in [ACTION_PARENT_DIR, KEY_NAV_BACK]:
self.close()
return
elif action.getId() == ACTION_PREVIOUS_MENU and action.getButtonCode() == KEY_ESC:
self.close()
return
elif action.getId() == ACTION_MOUSE_MOVE:
self._showControl(self.C_MAIN_MOUSE_CONTROLS)
return
elif action.getId() == KEY_CONTEXT_MENU:
if self.player.isPlaying():
self._hideEpg()
controlInFocus = None
currentFocus = self.focusPoint
try:
controlInFocus = self.getFocus()
if controlInFocus in [elem.control for elem in self.controlAndProgramList]:
(left, top) = controlInFocus.getPosition()
currentFocus = Point()
currentFocus.x = left + (controlInFocus.getWidth() / 2)
currentFocus.y = top + (controlInFocus.getHeight() / 2)
except Exception:
control = self._findControlAt(self.focusPoint)
if control is None and len(self.controlAndProgramList) > 0:
control = self.controlAndProgramList[0].control
if control is not None:
self.setFocus(control)
return
if action.getId() == ACTION_LEFT:
self._left(currentFocus)
elif action.getId() == ACTION_RIGHT:
self._right(currentFocus)
elif action.getId() == ACTION_UP:
self._up(currentFocus)
elif action.getId() == ACTION_DOWN:
self._down(currentFocus)
elif action.getId() == ACTION_NEXT_ITEM:
self._nextDay()
elif action.getId() == ACTION_PREV_ITEM:
self._previousDay()
elif action.getId() == ACTION_PAGE_UP:
self._moveUp(CHANNELS_PER_PAGE)
elif action.getId() == ACTION_PAGE_DOWN:
self._moveDown(CHANNELS_PER_PAGE)
elif action.getId() == ACTION_MOUSE_WHEEL_UP:
self._moveUp(scrollEvent=True)
elif action.getId() == ACTION_MOUSE_WHEEL_DOWN:
self._moveDown(scrollEvent=True)
elif action.getId() == KEY_HOME:
self.viewStartDate = datetime.datetime.today()
self.viewStartDate -= datetime.timedelta(minutes=self.viewStartDate.minute % 30,
seconds=self.viewStartDate.second)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
elif action.getId() in [KEY_CONTEXT_MENU, ACTION_PREVIOUS_MENU] and controlInFocus is not None:
program = self._getProgramFromControl(controlInFocus)
if program is not None:
self._showContextMenu(program)
else:
xbmc.log('[script.ftvguide] Unhandled ActionId: ' + str(action.getId()), xbmc.LOGDEBUG)
def onClick(self, controlId):
if controlId in [self.C_MAIN_LOADING_CANCEL, self.C_MAIN_MOUSE_EXIT]:
self.close()
return
if self.isClosing:
return
if controlId == self.C_MAIN_MOUSE_HOME:
self.viewStartDate = datetime.datetime.today()
self.viewStartDate -= datetime.timedelta(minutes=self.viewStartDate.minute % 30, seconds=self.viewStartDate.second)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
return
elif controlId == self.C_MAIN_MOUSE_LEFT:
self.viewStartDate -= datetime.timedelta(hours=2)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
return
elif controlId == self.C_MAIN_MOUSE_UP:
self._moveUp(count=CHANNELS_PER_PAGE)
return
elif controlId == self.C_MAIN_MOUSE_DOWN:
self._moveDown(count=CHANNELS_PER_PAGE)
return
elif controlId == self.C_MAIN_MOUSE_RIGHT:
self.viewStartDate += datetime.timedelta(hours=2)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
return
program = self._getProgramFromControl(self.getControl(controlId))
if program is None:
return
if not self.playChannel(program.channel):
result = self.streamingService.detectStream(program.channel)
if not result:
self._showContextMenu(program)
elif type(result) == str:
self.database.setCustomStreamUrl(program.channel, result)
self.playChannel(program.channel)
else:
d = ChooseStreamAddonDialog(result)
d.doModal()
if d.stream is not None:
self.database.setCustomStreamUrl(program.channel, d.stream)
self.playChannel(program.channel)
def _showContextMenu(self, program):
self._hideControl(self.C_MAIN_MOUSE_CONTROLS)
d = PopupMenu(self.database, program, not program.notificationScheduled)
d.doModal()
buttonClicked = d.buttonClicked
del d
if buttonClicked == PopupMenu.C_POPUP_REMIND:
if program.notificationScheduled:
self.notification.removeNotification(program)
else:
self.notification.addNotification(program)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
elif buttonClicked == PopupMenu.C_POPUP_CHOOSE_STREAM:
d = StreamSetupDialog(self.database, program.channel)
d.doModal()
del d
elif buttonClicked == PopupMenu.C_POPUP_PLAY:
self.playChannel(program.channel)
elif buttonClicked == PopupMenu.C_POPUP_CHANNELS:
d = ChannelsMenu(self.database)
d.doModal()
del d
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
elif buttonClicked == PopupMenu.C_POPUP_QUIT:
self.close()
elif buttonClicked == PopupMenu.C_POPUP_LIBMOV:
xbmc.executebuiltin('ActivateWindow(Videos,videodb://movies/titles/)')
elif buttonClicked == PopupMenu.C_POPUP_LIBTV:
xbmc.executebuiltin('ActivateWindow(Videos,videodb://tvshows/titles/)')
elif buttonClicked == PopupMenu.C_POPUP_VIDEOADDONS:
xbmc.executebuiltin('ActivateWindow(Videos,addons://sources/video/)')
def setFocusId(self, controlId):
control = self.getControl(controlId)
if control:
self.setFocus(control)
def setFocus(self, control):
debug('setFocus %d' % control.getId())
if control in [elem.control for elem in self.controlAndProgramList]:
debug('Focus before %s' % self.focusPoint)
(left, top) = control.getPosition()
if left > self.focusPoint.x or left + control.getWidth() < self.focusPoint.x:
self.focusPoint.x = left
self.focusPoint.y = top + (control.getHeight() / 2)
debug('New focus at %s' % self.focusPoint)
super(TVGuide, self).setFocus(control)
def onFocus(self, controlId):
try:
controlInFocus = self.getControl(controlId)
except Exception:
return
program = self._getProgramFromControl(controlInFocus)
if program is None:
return
self.setControlLabel(self.C_MAIN_TITLE, '[B]%s[/B]' % program.title)
if program.startDate or program.endDate:
self.setControlLabel(self.C_MAIN_TIME,
'[B]%s - %s[/B]' % (self.formatTime(program.startDate), self.formatTime(program.endDate)))
else:
self.setControlLabel(self.C_MAIN_TIME, '')
if program.description:
description = program.description
else:
description = strings(NO_DESCRIPTION)
self.setControlText(self.C_MAIN_DESCRIPTION, description)
if program.channel.logo is not None:
self.setControlImage(self.C_MAIN_LOGO, program.channel.logo)
else:
self.setControlImage(self.C_MAIN_LOGO, '')
if program.imageSmall is not None:
self.setControlImage(self.C_MAIN_IMAGE, program.imageSmall)
else:
self.setControlImage(self.C_MAIN_IMAGE, 'tvguide-logo-epg.png')
if ADDON.getSetting('program.background.enabled') == 'true' and program.imageLarge is not None:
self.setControlImage(self.C_MAIN_BACKGROUND, program.imageLarge)
if not self.osdEnabled and self.player.isPlaying():
self.player.stop()
def _left(self, currentFocus):
control = self._findControlOnLeft(currentFocus)
if control is not None:
self.setFocus(control)
elif control is None:
self.viewStartDate -= datetime.timedelta(hours=2)
self.focusPoint.x = self.epgView.right
self.onRedrawEPG(self.channelIdx, self.viewStartDate, focusFunction=self._findControlOnLeft)
def _right(self, currentFocus):
control = self._findControlOnRight(currentFocus)
if control is not None:
self.setFocus(control)
elif control is None:
self.viewStartDate += datetime.timedelta(hours=2)
self.focusPoint.x = self.epgView.left
self.onRedrawEPG(self.channelIdx, self.viewStartDate, focusFunction=self._findControlOnRight)
def _up(self, currentFocus):
currentFocus.x = self.focusPoint.x
control = self._findControlAbove(currentFocus)
if control is not None:
self.setFocus(control)
elif control is None:
self.focusPoint.y = self.epgView.bottom
self.onRedrawEPG(self.channelIdx - CHANNELS_PER_PAGE, self.viewStartDate,
focusFunction=self._findControlAbove)
def _down(self, currentFocus):
currentFocus.x = self.focusPoint.x
control = self._findControlBelow(currentFocus)
if control is not None:
self.setFocus(control)
elif control is None:
self.focusPoint.y = self.epgView.top
self.onRedrawEPG(self.channelIdx + CHANNELS_PER_PAGE, self.viewStartDate,
focusFunction=self._findControlBelow)
def _nextDay(self):
self.viewStartDate += datetime.timedelta(days=1)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
def _previousDay(self):
self.viewStartDate -= datetime.timedelta(days=1)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
def _moveUp(self, count=1, scrollEvent=False):
if scrollEvent:
self.onRedrawEPG(self.channelIdx - count, self.viewStartDate)
else:
self.focusPoint.y = self.epgView.bottom
self.onRedrawEPG(self.channelIdx - count, self.viewStartDate, focusFunction=self._findControlAbove)
def _moveDown(self, count=1, scrollEvent=False):
if scrollEvent:
self.onRedrawEPG(self.channelIdx + count, self.viewStartDate)
else:
self.focusPoint.y = self.epgView.top
self.onRedrawEPG(self.channelIdx + count, self.viewStartDate, focusFunction=self._findControlBelow)
def _channelUp(self):
channel = self.database.getNextChannel(self.currentChannel)
self.playChannel(channel)
def _channelDown(self):
channel = self.database.getPreviousChannel(self.currentChannel)
self.playChannel(channel)
def playChannel(self, channel):
self.currentChannel = channel
wasPlaying = self.player.isPlaying()
url = self.database.getStreamUrl(channel)
if url:
if url[0:9] == 'plugin://':
if self.alternativePlayback:
xbmc.executebuiltin('XBMC.RunPlugin(%s)' % url)
elif self.osdEnabled:
xbmc.executebuiltin('PlayMedia(%s,1)' % url)
else:
xbmc.executebuiltin('PlayMedia(%s)' % url)
else:
self.player.play(item=url, windowed=self.osdEnabled)
if not wasPlaying:
self._hideEpg()
threading.Timer(1, self.waitForPlayBackStopped).start()
self.osdProgram = self.database.getCurrentProgram(self.currentChannel)
return url is not None
def waitForPlayBackStopped(self):
for retry in range(0, 100):
time.sleep(0.1)
if self.player.isPlaying():
break
while self.player.isPlaying() and not xbmc.abortRequested and not self.isClosing:
time.sleep(0.5)
self.onPlayBackStopped()
def _showOsd(self):
if not self.osdEnabled:
return
if self.mode != MODE_OSD:
self.osdChannel = self.currentChannel
if self.osdProgram is not None:
self.setControlLabel(self.C_MAIN_OSD_TITLE, '[B]%s[/B]' % self.osdProgram.title)
if self.osdProgram.startDate or self.osdProgram.endDate:
self.setControlLabel(self.C_MAIN_OSD_TIME, '[B]%s - %s[/B]' % (
self.formatTime(self.osdProgram.startDate), self.formatTime(self.osdProgram.endDate)))
else:
self.setControlLabel(self.C_MAIN_OSD_TIME, '')
self.setControlText(self.C_MAIN_OSD_DESCRIPTION, self.osdProgram.description)
self.setControlLabel(self.C_MAIN_OSD_CHANNEL_TITLE, self.osdChannel.title)
if self.osdProgram.channel.logo is not None:
self.setControlImage(self.C_MAIN_OSD_CHANNEL_LOGO, self.osdProgram.channel.logo)
else:
self.setControlImage(self.C_MAIN_OSD_CHANNEL_LOGO, '')
self.mode = MODE_OSD
self._showControl(self.C_MAIN_OSD)
def _hideOsd(self):
self.mode = MODE_TV
self._hideControl(self.C_MAIN_OSD)
def _hideEpg(self):
self._hideControl(self.C_MAIN_EPG)
self.mode = MODE_TV
self._clearEpg()
def onRedrawEPG(self, channelStart, startTime, focusFunction=None):
if self.redrawingEPG or (self.database is not None and self.database.updateInProgress) or self.isClosing:
debug('onRedrawEPG - already redrawing')
return
debug('onRedrawEPG')
self.redrawingEPG = True
self.mode = MODE_EPG
self._showControl(self.C_MAIN_EPG)
self.updateTimebar(scheduleTimer=False)
self.setControlLabel(self.C_MAIN_LOADING_TIME_LEFT, strings(CALCULATING_REMAINING_TIME))
self._showControl(self.C_MAIN_LOADING)
self.setFocusId(self.C_MAIN_LOADING_CANCEL)
self._clearEpg()
try:
self.channelIdx, channels, programs = self.database.getEPGView(channelStart, startTime, self.onSourceProgressUpdate, clearExistingProgramList=False)
except src.SourceException:
self.onEPGLoadError()
return
channelsWithoutPrograms = list(channels)
self.setControlLabel(self.C_MAIN_DATE, self.formatDate(self.viewStartDate, False))
self.setControlLabel(self.C_MAIN_DATE_LONG, self.formatDate(self.viewStartDate, True))
for col in range(1, 5):
self.setControlLabel(4000 + col, self.formatTime(startTime))
startTime += HALF_HOUR
if programs is None:
self.onEPGLoadError()
return
showLogo = ADDON.getSetting('logos.enabled') == 'true'
for idx in range(0, CHANNELS_PER_PAGE):
if idx >= len(channels):
self.setControlImage(4110 + idx, ' ')
self.setControlLabel(4010 + idx, ' ')
else:
channel = channels[idx]
self.setControlLabel(4010 + idx, channel.title)
if (channel.logo is not None and showLogo == True):
self.setControlImage(4110 + idx, channel.logo)
else:
self.setControlImage(4110 + idx, ' ')
for program in programs:
idx = channels.index(program.channel)
if program.channel in channelsWithoutPrograms:
channelsWithoutPrograms.remove(program.channel)
startDelta = program.startDate - self.viewStartDate
stopDelta = program.endDate - self.viewStartDate
cellStart = self._secondsToXposition(startDelta.seconds)
if startDelta.days < 0:
cellStart = self.epgView.left
cellWidth = self._secondsToXposition(stopDelta.seconds) - cellStart
if cellStart + cellWidth > self.epgView.right:
cellWidth = self.epgView.right - cellStart
if cellWidth > 1:
if program.notificationScheduled:
noFocusTexture = 'tvguide-program-red.png'
focusTexture = 'tvguide-program-red-focus.png'
else:
noFocusTexture = 'tvguide-program-grey.png'
focusTexture = 'tvguide-program-grey-focus.png'
if cellWidth < 25:
title = ''
else:
title = program.title
control = xbmcgui.ControlButton(
cellStart,
self.epgView.top + self.epgView.cellHeight * idx,
cellWidth - 2,
self.epgView.cellHeight - 2,
title,
noFocusTexture=noFocusTexture,
focusTexture=focusTexture
)
self.controlAndProgramList.append(ControlAndProgram(control, program))
for channel in channelsWithoutPrograms:
idx = channels.index(channel)
control = xbmcgui.ControlButton(
self.epgView.left,
self.epgView.top + self.epgView.cellHeight * idx,
(self.epgView.right - self.epgView.left) - 2,
self.epgView.cellHeight - 2,
strings(NO_PROGRAM_AVAILABLE),
noFocusTexture='tvguide-program-grey.png',
focusTexture='tvguide-program-grey-focus.png'
)
program = src.Program(channel, strings(NO_PROGRAM_AVAILABLE), None, None, None)
self.controlAndProgramList.append(ControlAndProgram(control, program))
if focusFunction is None:
focusFunction = self._findControlAt
focusControl = focusFunction(self.focusPoint)
controls = [elem.control for elem in self.controlAndProgramList]
self.addControls(controls)
if focusControl is not None:
debug('onRedrawEPG - setFocus %d' % focusControl.getId())
self.setFocus(focusControl)
self.ignoreMissingControlIds.extend([elem.control.getId() for elem in self.controlAndProgramList])
if focusControl is None and len(self.controlAndProgramList) > 0:
self.setFocus(self.controlAndProgramList[0].control)
self._hideControl(self.C_MAIN_LOADING)
self.redrawingEPG = False
def _clearEpg(self):
controls = [elem.control for elem in self.controlAndProgramList]
try:
self.removeControls(controls)
except RuntimeError:
for elem in self.controlAndProgramList:
try:
self.removeControl(elem.control)
except RuntimeError:
pass
del self.controlAndProgramList[:]
def onEPGLoadError(self):
self.redrawingEPG = False
self._hideControl(self.C_MAIN_LOADING)
xbmcgui.Dialog().ok(strings(LOAD_ERROR_TITLE), strings(LOAD_ERROR_LINE1), strings(LOAD_ERROR_LINE2))
self.close()
def onSourceNotConfigured(self):
self.redrawingEPG = False
self._hideControl(self.C_MAIN_LOADING)
xbmcgui.Dialog().ok(strings(LOAD_ERROR_TITLE), strings(LOAD_ERROR_LINE1), strings(CONFIGURATION_ERROR_LINE2))
self.close()
def isSourceInitializationCancelled(self):
return xbmc.abortRequested or self.isClosing
def onSourceInitialized(self, success):
if success:
self.notification = Notification(self.database, ADDON.getAddonInfo('path'))
self.onRedrawEPG(0, self.viewStartDate)
def onSourceProgressUpdate(self, percentageComplete):
control = self.getControl(self.C_MAIN_LOADING_PROGRESS)
if percentageComplete < 1:
if control:
control.setPercent(1)
self.progressStartTime = datetime.datetime.now()
self.progressPreviousPercentage = percentageComplete
elif percentageComplete != self.progressPreviousPercentage:
if control:
control.setPercent(percentageComplete)
self.progressPreviousPercentage = percentageComplete
delta = datetime.datetime.now() - self.progressStartTime
if percentageComplete < 20:
self.setControlLabel(self.C_MAIN_LOADING_TIME_LEFT, strings(CALCULATING_REMAINING_TIME))
else:
secondsLeft = int(delta.seconds) / float(percentageComplete) * (100.0 - percentageComplete)
if secondsLeft > 30:
secondsLeft -= secondsLeft % 10
self.setControlLabel(self.C_MAIN_LOADING_TIME_LEFT, strings(TIME_LEFT) % secondsLeft)
return not xbmc.abortRequested and not self.isClosing
def onPlayBackStopped(self):
if not self.player.isPlaying() and not self.isClosing:
self._hideControl(self.C_MAIN_OSD)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
def _secondsToXposition(self, seconds):
return self.epgView.left + (seconds * self.epgView.width / 7200)
def _findControlOnRight(self, point):
distanceToNearest = 10000
nearestControl = None
for elem in self.controlAndProgramList:
control = elem.control
(left, top) = control.getPosition()
x = left + (control.getWidth() / 2)
y = top + (control.getHeight() / 2)
if point.x < x and point.y == y:
distance = abs(point.x - x)
if distance < distanceToNearest:
distanceToNearest = distance
nearestControl = control
return nearestControl
def _findControlOnLeft(self, point):
distanceToNearest = 10000
nearestControl = None
for elem in self.controlAndProgramList:
control = elem.control
(left, top) = control.getPosition()
x = left + (control.getWidth() / 2)
y = top + (control.getHeight() / 2)
if point.x > x and point.y == y:
distance = abs(point.x - x)
if distance < distanceToNearest:
distanceToNearest = distance
nearestControl = control
return nearestControl
def _findControlBelow(self, point):
nearestControl = None
for elem in self.controlAndProgramList:
control = elem.control
(leftEdge, top) = control.getPosition()
y = top + (control.getHeight() / 2)
if point.y < y:
rightEdge = leftEdge + control.getWidth()
if leftEdge <= point.x < rightEdge and (nearestControl is None or nearestControl.getPosition()[1] > top):
nearestControl = control
return nearestControl
def _findControlAbove(self, point):
nearestControl = None
for elem in self.controlAndProgramList:
control = elem.control
(leftEdge, top) = control.getPosition()
y = top + (control.getHeight() / 2)
if point.y > y:
rightEdge = leftEdge + control.getWidth()
if leftEdge <= point.x < rightEdge and (nearestControl is None or nearestControl.getPosition()[1] < top):
nearestControl = control
return nearestControl
def _findControlAt(self, point):
for elem in self.controlAndProgramList:
control = elem.control
(left, top) = control.getPosition()
bottom = top + control.getHeight()
right = left + control.getWidth()
if left <= point.x <= right and top <= point.y <= bottom:
return control
return None
def _getProgramFromControl(self, control):
for elem in self.controlAndProgramList:
if elem.control == control:
return elem.program
return None
def _hideControl(self, *controlIds):
for controlId in controlIds:
control = self.getControl(controlId)
if control:
control.setVisible(True)
def _showControl(self, *controlIds):
for controlId in controlIds:
control = self.getControl(controlId)
if control:
control.setVisible(False)
def formatTime(self, timestamp):
if timestamp:
format = xbmc.getRegion('time').replace(':%S', '').replace('%H%H', '%H')
return timestamp.strftime(format)
else:
return ''
def formatDate(self, timestamp, longdate=False):
if timestamp:
if longdate == True:
format = xbmc.getRegion('datelong')
else:
format = xbmc.getRegion('dateshort')
return timestamp.strftime(format)
else:
return ''
def setControlImage(self, controlId, image):
control = self.getControl(controlId)
if control:
control.setImage(image.encode('utf-8'))
def setControlLabel(self, controlId, label):
control = self.getControl(controlId)
if control and label:
control.setLabel(label)
def setControlText(self, controlId, text):
control = self.getControl(controlId)
if control:
control.setText(text)
def updateTimebar(self, scheduleTimer=True):
# move timebar to current time
timeDelta = datetime.datetime.today() - self.viewStartDate
control = self.getControl(self.C_MAIN_TIMEBAR)
if control:
(x, y) = control.getPosition()
try:
# Sometimes raises:
# exceptions.RuntimeError: Unknown exception thrown from the call "setVisible"
control.setVisible(timeDelta.days == 0)
except:
pass
control.setPosition(self._secondsToXposition(timeDelta.seconds), y)
if scheduleTimer and not xbmc.abortRequested and not self.isClosing:
threading.Timer(1, self.updateTimebar).start()
class PopupMenu(xbmcgui.WindowXMLDialog):
C_POPUP_PLAY = 4000
C_POPUP_CHOOSE_STREAM = 4001
C_POPUP_REMIND = 4002
C_POPUP_CHANNELS = 4003
C_POPUP_QUIT = 4004
C_POPUP_CHANNEL_LOGO = 4100
C_POPUP_CHANNEL_TITLE = 4101
C_POPUP_PROGRAM_TITLE = 4102
C_POPUP_LIBMOV = 80000
C_POPUP_LIBTV = 80001
C_POPUP_VIDEOADDONS = 80002
def __new__(cls, database, program, showRemind):
return super(PopupMenu, cls).__new__(cls, 'script-tvguide-menu.xml', ADDON.getAddonInfo('path'), SKIN)
def __init__(self, database, program, showRemind):
super(PopupMenu, self).__init__()
self.database = database
self.program = program
self.showRemind = showRemind
self.buttonClicked = None
def onInit(self):
playControl = self.getControl(self.C_POPUP_PLAY)
remindControl = self.getControl(self.C_POPUP_REMIND)
channelLogoControl = self.getControl(self.C_POPUP_CHANNEL_LOGO)
channelTitleControl = self.getControl(self.C_POPUP_CHANNEL_TITLE)
programTitleControl = self.getControl(self.C_POPUP_PROGRAM_TITLE)
playControl.setLabel(strings(WATCH_CHANNEL, self.program.channel.title))
if not self.program.channel.isPlayable():
playControl.setEnabled(False)
self.setFocusId(self.C_POPUP_CHOOSE_STREAM)
if self.database.getCustomStreamUrl(self.program.channel):
chooseStrmControl = self.getControl(self.C_POPUP_CHOOSE_STREAM)
chooseStrmControl.setLabel(strings(REMOVE_STRM_FILE))
if self.program.channel.logo is not None:
channelLogoControl.setImage(self.program.channel.logo)
channelTitleControl.setVisible(False)
else:
channelTitleControl.setLabel(self.program.channel.title)
channelLogoControl.setVisible(False)
programTitleControl.setLabel(self.program.title)
if self.program.startDate:
remindControl.setEnabled(True)
if self.showRemind:
remindControl.setLabel(strings(REMIND_PROGRAM))
else:
remindControl.setLabel(strings(DONT_REMIND_PROGRAM))
else:
remindControl.setEnabled(False)
def onAction(self, action):
if action.getId() in [ACTION_PARENT_DIR, ACTION_PREVIOUS_MENU, KEY_NAV_BACK, KEY_CONTEXT_MENU]:
self.close()
return
def onClick(self, controlId):
if controlId == self.C_POPUP_CHOOSE_STREAM and self.database.getCustomStreamUrl(self.program.channel):
self.database.deleteCustomStreamUrl(self.program.channel)
chooseStrmControl = self.getControl(self.C_POPUP_CHOOSE_STREAM)
chooseStrmControl.setLabel(strings(CHOOSE_STRM_FILE))
if not self.program.channel.isPlayable():
playControl = self.getControl(self.C_POPUP_PLAY)
playControl.setEnabled(False)
else:
self.buttonClicked = controlId
self.close()
def onFocus(self, controlId):
pass
class ChannelsMenu(xbmcgui.WindowXMLDialog):
C_CHANNELS_LIST = 6000
C_CHANNELS_SELECTION_VISIBLE = 6001
C_CHANNELS_SELECTION = 6002
C_CHANNELS_SAVE = 6003
C_CHANNELS_CANCEL = 6004
def __new__(cls, database):
return super(ChannelsMenu, cls).__new__(cls, 'script-tvguide-channels.xml', ADDON.getAddonInfo('path'), SKIN)
def __init__(self, database):
super(ChannelsMenu, self).__init__()
self.database = database
self.channelList = database.getChannelList(onlyVisible=False)
self.swapInProgress = False
self.selectedChannel = 0
def onInit(self):
self.updateChannelList()
self.setFocusId(self.C_CHANNELS_LIST)
def onAction(self, action):
if action.getId() in [ACTION_PARENT_DIR, KEY_NAV_BACK]:
self.close()
return
if self.getFocusId() == self.C_CHANNELS_LIST and action.getId() in [ACTION_PREVIOUS_MENU, KEY_CONTEXT_MENU, ACTION_LEFT]:
listControl = self.getControl(self.C_CHANNELS_LIST)
idx = listControl.getSelectedPosition()
self.selectedChannel = idx
buttonControl = self.getControl(self.C_CHANNELS_SELECTION)
buttonControl.setLabel('[B]%s[/B]' % self.channelList[idx].title)
self.getControl(self.C_CHANNELS_SELECTION_VISIBLE).setVisible(False)
self.setFocusId(self.C_CHANNELS_SELECTION)
elif self.getFocusId() == self.C_CHANNELS_SELECTION and action.getId() in [ACTION_RIGHT, ACTION_SELECT_ITEM]:
self.getControl(self.C_CHANNELS_SELECTION_VISIBLE).setVisible(True)
xbmc.sleep(350)
self.setFocusId(self.C_CHANNELS_LIST)
elif self.getFocusId() == self.C_CHANNELS_SELECTION and action.getId() in [ACTION_PREVIOUS_MENU, KEY_CONTEXT_MENU]:
listControl = self.getControl(self.C_CHANNELS_LIST)
idx = listControl.getSelectedPosition()
self.swapChannels(self.selectedChannel, idx)
self.getControl(self.C_CHANNELS_SELECTION_VISIBLE).setVisible(True)
xbmc.sleep(350)
self.setFocusId(self.C_CHANNELS_LIST)
elif self.getFocusId() == self.C_CHANNELS_SELECTION and action.getId() == ACTION_UP:
listControl = self.getControl(self.C_CHANNELS_LIST)
idx = listControl.getSelectedPosition()
if idx > 0:
self.swapChannels(idx, idx - 1)
elif self.getFocusId() == self.C_CHANNELS_SELECTION and action.getId() == ACTION_DOWN:
listControl = self.getControl(self.C_CHANNELS_LIST)
idx = listControl.getSelectedPosition()
if idx < listControl.size() - 1:
self.swapChannels(idx, idx + 1)
def onClick(self, controlId):
if controlId == self.C_CHANNELS_LIST:
listControl = self.getControl(self.C_CHANNELS_LIST)
item = listControl.getSelectedItem()
channel = self.channelList[int(item.getProperty('idx'))]
channel.visible = not channel.visible
if channel.visible:
iconImage = 'tvguide-channel-visible.png'
else:
iconImage = 'tvguide-channel-hidden.png'
item.setIconImage(iconImage)
elif controlId == self.C_CHANNELS_SAVE:
self.database.saveChannelList(self.close, self.channelList)
elif controlId == self.C_CHANNELS_CANCEL:
self.close()
def onFocus(self, controlId):
pass
def updateChannelList(self):
listControl = self.getControl(self.C_CHANNELS_LIST)
listControl.reset()
for idx, channel in enumerate(self.channelList):
if channel.visible:
iconImage = 'tvguide-channel-visible.png'
else:
iconImage = 'tvguide-channel-hidden.png'
item = xbmcgui.ListItem('%3d. %s' % (idx + 1, channel.title), iconImage=iconImage)
item.setProperty('idx', str(idx))
listControl.addItem(item)
def updateListItem(self, idx, item):
channel = self.channelList[idx]
item.setLabel('%3d. %s' % (idx + 1, channel.title))
if channel.visible:
iconImage = 'tvguide-channel-visible.png'
else:
iconImage = 'tvguide-channel-hidden.png'
item.setIconImage(iconImage)
item.setProperty('idx', str(idx))
def swapChannels(self, fromIdx, toIdx):
if self.swapInProgress:
return
self.swapInProgress = True
c = self.channelList[fromIdx]
self.channelList[fromIdx] = self.channelList[toIdx]
self.channelList[toIdx] = c
# recalculate weight
for idx, channel in enumerate(self.channelList):
channel.weight = idx
listControl = self.getControl(self.C_CHANNELS_LIST)
self.updateListItem(fromIdx, listControl.getListItem(fromIdx))
self.updateListItem(toIdx, listControl.getListItem(toIdx))
listControl.selectItem(toIdx)
xbmc.sleep(50)
self.swapInProgress = False
class StreamSetupDialog(xbmcgui.WindowXMLDialog):
C_STREAM_STRM_TAB = 101
C_STREAM_FAVOURITES_TAB = 102
C_STREAM_ADDONS_TAB = 103
C_STREAM_STRM_BROWSE = 1001
C_STREAM_STRM_FILE_LABEL = 1005
C_STREAM_STRM_PREVIEW = 1002
C_STREAM_STRM_OK = 1003
C_STREAM_STRM_CANCEL = 1004
C_STREAM_FAVOURITES = 2001
C_STREAM_FAVOURITES_PREVIEW = 2002
C_STREAM_FAVOURITES_OK = 2003
C_STREAM_FAVOURITES_CANCEL = 2004
C_STREAM_ADDONS = 3001
C_STREAM_ADDONS_STREAMS = 3002
C_STREAM_ADDONS_NAME = 3003
C_STREAM_ADDONS_DESCRIPTION = 3004
C_STREAM_ADDONS_PREVIEW = 3005
C_STREAM_ADDONS_OK = 3006
C_STREAM_ADDONS_CANCEL = 3007
C_STREAM_VISIBILITY_MARKER = 100
VISIBLE_STRM = 'strm'
VISIBLE_FAVOURITES = 'favourites'
VISIBLE_ADDONS = 'addons'
def __new__(cls, database, channel):
return super(StreamSetupDialog, cls).__new__(cls, 'script-tvguide-streamsetup.xml', ADDON.getAddonInfo('path'), SKIN)
def __init__(self, database, channel):
super(StreamSetupDialog, self).__init__()
self.database = database
self.channel = channel
self.player = xbmc.Player()
self.previousAddonId = None
self.strmFile = None
self.streamingService = streaming.StreamsService(ADDON)
def close(self):
if self.player.isPlaying():
self.player.stop()
super(StreamSetupDialog, self).close()
def onInit(self):
self.getControl(self.C_STREAM_VISIBILITY_MARKER).setLabel(self.VISIBLE_STRM)
favourites = self.streamingService.loadFavourites()
items = list()
for label, value in favourites:
item = xbmcgui.ListItem(label)
item.setProperty('stream', value)
items.append(item)
listControl = self.getControl(StreamSetupDialog.C_STREAM_FAVOURITES)
listControl.addItems(items)
items = list()
for id in self.streamingService.getAddons():
try:
addon = xbmcaddon.Addon(id) # raises Exception if addon is not installed
item = xbmcgui.ListItem(addon.getAddonInfo('name'), iconImage=addon.getAddonInfo('icon'))
item.setProperty('addon_id', id)
items.append(item)
except Exception:
pass
listControl = self.getControl(StreamSetupDialog.C_STREAM_ADDONS)
listControl.addItems(items)
self.updateAddonInfo()
def onAction(self, action):
if action.getId() in [ACTION_PARENT_DIR, ACTION_PREVIOUS_MENU, KEY_NAV_BACK, KEY_CONTEXT_MENU]:
self.close()
return
elif self.getFocusId() == self.C_STREAM_ADDONS:
self.updateAddonInfo()
def onClick(self, controlId):
if controlId == self.C_STREAM_STRM_BROWSE:
stream = xbmcgui.Dialog().browse(1, ADDON.getLocalizedString(30304), 'video', '.strm')
if stream:
self.database.setCustomStreamUrl(self.channel, stream)
self.getControl(self.C_STREAM_STRM_FILE_LABEL).setText(stream)
self.strmFile = stream
elif controlId == self.C_STREAM_ADDONS_OK:
listControl = self.getControl(self.C_STREAM_ADDONS_STREAMS)
item = listControl.getSelectedItem()
if item:
stream = item.getProperty('stream')
self.database.setCustomStreamUrl(self.channel, stream)
self.close()
elif controlId == self.C_STREAM_FAVOURITES_OK:
listControl = self.getControl(self.C_STREAM_FAVOURITES)
item = listControl.getSelectedItem()
if item:
stream = item.getProperty('stream')
self.database.setCustomStreamUrl(self.channel, stream)
self.close()
elif controlId == self.C_STREAM_STRM_OK:
self.database.setCustomStreamUrl(self.channel, self.strmFile)
self.close()
elif controlId in [self.C_STREAM_ADDONS_CANCEL, self.C_STREAM_FAVOURITES_CANCEL, self.C_STREAM_STRM_CANCEL]:
self.close()
elif controlId in [self.C_STREAM_ADDONS_PREVIEW, self.C_STREAM_FAVOURITES_PREVIEW, self.C_STREAM_STRM_PREVIEW]:
if self.player.isPlaying():
self.player.stop()
self.getControl(self.C_STREAM_ADDONS_PREVIEW).setLabel(strings(PREVIEW_STREAM))
self.getControl(self.C_STREAM_FAVOURITES_PREVIEW).setLabel(strings(PREVIEW_STREAM))
self.getControl(self.C_STREAM_STRM_PREVIEW).setLabel(strings(PREVIEW_STREAM))
return
stream = None
visible = self.getControl(self.C_STREAM_VISIBILITY_MARKER).getLabel()
if visible == self.VISIBLE_ADDONS:
listControl = self.getControl(self.C_STREAM_ADDONS_STREAMS)
item = listControl.getSelectedItem()
if item:
stream = item.getProperty('stream')
elif visible == self.VISIBLE_FAVOURITES:
listControl = self.getControl(self.C_STREAM_FAVOURITES)
item = listControl.getSelectedItem()
if item:
stream = item.getProperty('stream')
elif visible == self.VISIBLE_STRM:
stream = self.strmFile
if stream is not None:
self.player.play(item=stream, windowed=True)
if self.player.isPlaying():
self.getControl(self.C_STREAM_ADDONS_PREVIEW).setLabel(strings(STOP_PREVIEW))
self.getControl(self.C_STREAM_FAVOURITES_PREVIEW).setLabel(strings(STOP_PREVIEW))
self.getControl(self.C_STREAM_STRM_PREVIEW).setLabel(strings(STOP_PREVIEW))
def onFocus(self, controlId):
if controlId == self.C_STREAM_STRM_TAB:
self.getControl(self.C_STREAM_VISIBILITY_MARKER).setLabel(self.VISIBLE_STRM)
elif controlId == self.C_STREAM_FAVOURITES_TAB:
self.getControl(self.C_STREAM_VISIBILITY_MARKER).setLabel(self.VISIBLE_FAVOURITES)
elif controlId == self.C_STREAM_ADDONS_TAB:
self.getControl(self.C_STREAM_VISIBILITY_MARKER).setLabel(self.VISIBLE_ADDONS)
def updateAddonInfo(self):
listControl = self.getControl(self.C_STREAM_ADDONS)
item = listControl.getSelectedItem()
if item is None:
return
if item.getProperty('addon_id') == self.previousAddonId:
return
self.previousAddonId = item.getProperty('addon_id')
addon = xbmcaddon.Addon(id=item.getProperty('addon_id'))
self.getControl(self.C_STREAM_ADDONS_NAME).setLabel('[B]%s[/B]' % addon.getAddonInfo('name'))
self.getControl(self.C_STREAM_ADDONS_DESCRIPTION).setText(addon.getAddonInfo('description'))
streams = self.streamingService.getAddonStreams(item.getProperty('addon_id'))
items = list()
for (label, stream) in streams:
item = xbmcgui.ListItem(label)
item.setProperty('stream', stream)
items.append(item)
listControl = self.getControl(StreamSetupDialog.C_STREAM_ADDONS_STREAMS)
listControl.reset()
listControl.addItems(items)
class ChooseStreamAddonDialog(xbmcgui.WindowXMLDialog):
C_SELECTION_LIST = 1000
def __new__(cls, addons):
return super(ChooseStreamAddonDialog, cls).__new__(cls, 'script-tvguide-streamaddon.xml', ADDON.getAddonInfo('path'), SKIN)
def __init__(self, addons):
super(ChooseStreamAddonDialog, self).__init__()
self.addons = addons
self.stream = None
def onInit(self):
items = list()
for id, label, url in self.addons:
addon = xbmcaddon.Addon(id)
item = xbmcgui.ListItem(label, addon.getAddonInfo('name'), addon.getAddonInfo('icon'))
item.setProperty('stream', url)
items.append(item)
listControl = self.getControl(ChooseStreamAddonDialog.C_SELECTION_LIST)
listControl.addItems(items)
self.setFocus(listControl)
def onAction(self, action):
if action.getId() in [ACTION_PARENT_DIR, ACTION_PREVIOUS_MENU, KEY_NAV_BACK]:
self.close()
def onClick(self, controlId):
if controlId == ChooseStreamAddonDialog.C_SELECTION_LIST:
listControl = self.getControl(ChooseStreamAddonDialog.C_SELECTION_LIST)
self.stream = listControl.getSelectedItem().getProperty('stream')
self.close()
def onFocus(self, controlId):
pass
| true | true |
f72fd4f07817e53144026d6614cb8968d8cb124e | 2,873 | py | Python | setup.py | rupanshi-chawda/nm-theme | d909d7f89d6b0bca49d6d90ed50d087bab41b912 | [
"BSD-3-Clause"
] | null | null | null | setup.py | rupanshi-chawda/nm-theme | d909d7f89d6b0bca49d6d90ed50d087bab41b912 | [
"BSD-3-Clause"
] | null | null | null | setup.py | rupanshi-chawda/nm-theme | d909d7f89d6b0bca49d6d90ed50d087bab41b912 | [
"BSD-3-Clause"
] | null | null | null | """
nm-theme setup
"""
import json
import sys
from pathlib import Path
import setuptools
HERE = Path(__file__).parent.resolve()
# Get the package info from package.json
pkg_json = json.loads((HERE / "package.json").read_bytes())
# The name of the project
name = "nm-theme"
lab_path = (HERE / pkg_json["jupyterlab"]["outputDir"])
# Representative files that should exist after a successful build
ensured_targets = [
str(lab_path / "package.json"),
str(lab_path / "static/style.js")
]
labext_name = pkg_json["name"]
data_files_spec = [
("share/jupyter/labextensions/%s" % labext_name, str(lab_path.relative_to(HERE)), "**"),
("share/jupyter/labextensions/%s" % labext_name, str("."), "install.json"),
]
long_description = (HERE / "README.md").read_text()
version = (
pkg_json["version"]
.replace("-alpha.", "a")
.replace("-beta.", "b")
.replace("-rc.", "rc")
)
setup_args = dict(
name=name,
version=version,
url=pkg_json["homepage"],
author=pkg_json["author"]["name"],
author_email=pkg_json["author"]["email"],
description=pkg_json["description"],
license=pkg_json["license"],
license_file="LICENSE",
long_description=long_description,
long_description_content_type="text/markdown",
packages=setuptools.find_packages(),
install_requires=[],
zip_safe=False,
include_package_data=True,
python_requires=">=3.7",
platforms="Linux, Mac OS X, Windows",
keywords=["Jupyter", "JupyterLab", "JupyterLab3"],
classifiers=[
"License :: OSI Approved :: BSD License",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Framework :: Jupyter",
"Framework :: Jupyter :: JupyterLab",
"Framework :: Jupyter :: JupyterLab :: 3",
"Framework :: Jupyter :: JupyterLab :: Extensions",
"Framework :: Jupyter :: JupyterLab :: Extensions :: Prebuilt",
],
)
try:
from jupyter_packaging import (
wrap_installers,
npm_builder,
get_data_files
)
post_develop = npm_builder(
build_cmd="install:extension", source_dir="src", build_dir=lab_path
)
setup_args["cmdclass"] = wrap_installers(post_develop=post_develop, ensured_targets=ensured_targets)
setup_args["data_files"] = get_data_files(data_files_spec)
except ImportError as e:
import logging
logging.basicConfig(format="%(levelname)s: %(message)s")
logging.warning("Build tool `jupyter-packaging` is missing. Install it with pip or conda.")
if not ("--name" in sys.argv or "--version" in sys.argv):
raise e
if __name__ == "__main__":
setuptools.setup(**setup_args)
| 29.927083 | 104 | 0.655761 | import json
import sys
from pathlib import Path
import setuptools
HERE = Path(__file__).parent.resolve()
pkg_json = json.loads((HERE / "package.json").read_bytes())
name = "nm-theme"
lab_path = (HERE / pkg_json["jupyterlab"]["outputDir"])
ensured_targets = [
str(lab_path / "package.json"),
str(lab_path / "static/style.js")
]
labext_name = pkg_json["name"]
data_files_spec = [
("share/jupyter/labextensions/%s" % labext_name, str(lab_path.relative_to(HERE)), "**"),
("share/jupyter/labextensions/%s" % labext_name, str("."), "install.json"),
]
long_description = (HERE / "README.md").read_text()
version = (
pkg_json["version"]
.replace("-alpha.", "a")
.replace("-beta.", "b")
.replace("-rc.", "rc")
)
setup_args = dict(
name=name,
version=version,
url=pkg_json["homepage"],
author=pkg_json["author"]["name"],
author_email=pkg_json["author"]["email"],
description=pkg_json["description"],
license=pkg_json["license"],
license_file="LICENSE",
long_description=long_description,
long_description_content_type="text/markdown",
packages=setuptools.find_packages(),
install_requires=[],
zip_safe=False,
include_package_data=True,
python_requires=">=3.7",
platforms="Linux, Mac OS X, Windows",
keywords=["Jupyter", "JupyterLab", "JupyterLab3"],
classifiers=[
"License :: OSI Approved :: BSD License",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Framework :: Jupyter",
"Framework :: Jupyter :: JupyterLab",
"Framework :: Jupyter :: JupyterLab :: 3",
"Framework :: Jupyter :: JupyterLab :: Extensions",
"Framework :: Jupyter :: JupyterLab :: Extensions :: Prebuilt",
],
)
try:
from jupyter_packaging import (
wrap_installers,
npm_builder,
get_data_files
)
post_develop = npm_builder(
build_cmd="install:extension", source_dir="src", build_dir=lab_path
)
setup_args["cmdclass"] = wrap_installers(post_develop=post_develop, ensured_targets=ensured_targets)
setup_args["data_files"] = get_data_files(data_files_spec)
except ImportError as e:
import logging
logging.basicConfig(format="%(levelname)s: %(message)s")
logging.warning("Build tool `jupyter-packaging` is missing. Install it with pip or conda.")
if not ("--name" in sys.argv or "--version" in sys.argv):
raise e
if __name__ == "__main__":
setuptools.setup(**setup_args)
| true | true |
f72fd53943b50711edaa6f2f5b0e426773997a03 | 8,917 | py | Python | service_clients/aws/s3_client.py | radzhome/python-service-clients | dd17e74217a9412b1b78c90433bfced08733fd88 | [
"BSD-2-Clause"
] | 2 | 2019-04-18T05:29:32.000Z | 2019-11-01T22:58:56.000Z | service_clients/aws/s3_client.py | radzhome/python-service-clients | dd17e74217a9412b1b78c90433bfced08733fd88 | [
"BSD-2-Clause"
] | null | null | null | service_clients/aws/s3_client.py | radzhome/python-service-clients | dd17e74217a9412b1b78c90433bfced08733fd88 | [
"BSD-2-Clause"
] | null | null | null | from __future__ import unicode_literals
"""
S3 bucket CRUD operations core module
"""
import logging
import time
import boto3
import botocore
from botocore.client import Config
class S3Client: # pragma: no cover
"""
S3 class encapsulates uploading,
downloading & other s3 file ops and handling errors
This is not covered in unit test test coverage,
but in integration tests since its an external process
"""
S3_DATE_FORMAT = '%Y-%m-%dT%H:%M:%S.000Z' # Not used
RECONNECT_SLEEP_SECS = 0.5
CONN_RETRIES = 10
CONN_CONFIG = Config(connect_timeout=5, retries={'max_attempts': 0})
def __init__(self, config, reconnect_sleep_secs=RECONNECT_SLEEP_SECS, conn_retries=CONN_RETRIES):
"""
Load config from passed params or override with defaults
:param config: dict, config with access_key_id, secret_access_key, bucket name
:return: None
"""
self.config = config
self.access_key_id = self.config['access_key_id']
self.secret_access_key = self.config['secret_access_key']
self.aws_region = self.config['aws_region']
self.bucket_name = self.config.get('bucket_name') # Optional bucket name
self.RECONNECT_SLEEP_SECS = reconnect_sleep_secs
self.CONN_RETRIES = conn_retries
self.connection_attempt = 0
self.connection = None
self.bucket = None
self.connect(run_get_bucket=bool(self.bucket_name))
def connect(self, run_get_bucket=False):
"""
Creates object connection to the designated region (self.boto.cli_region).
The connection is established on the first call for this instance (lazy) and cached.
:param run_get_bucket: bool, run (or skip) getting the bucket object
:return: None
"""
try:
self.connection_attempt += 1
self.connection = boto3.resource('s3', region_name=self.aws_region,
aws_access_key_id=self.access_key_id,
aws_secret_access_key=self.secret_access_key,
config=self.CONN_CONFIG)
if run_get_bucket:
self.bucket = self._get_bucket()
except Exception as e:
logging.exception("S3Client.connect failed with params {}, error {}".format(self.config, e))
if self.connection_attempt >= self.CONN_RETRIES:
raise
def _get_bucket(self, bucket_name=None):
"""
Uses S3 Connection and return connection to queue
S3 used for getting the listing file in the SQS message
:param bucket_name: str, bucket name (optional)
:return: None
"""
try:
# It also looks like at times, the bucket object is made even if not exists until you query it
# getting a NoSuchBucket error, see list
bucket = self.connection.Bucket(name=bucket_name or self.bucket_name)
except Exception as e:
# I.e. gaierror: [Errno -2] Name or service not known
logging.exception("S3Client.get_bucket unable to get bucket {}, error {}".format(self.bucket_name, e))
raise
return bucket
def list(self, bucket_name=None):
"""
List contents of a bucket
:param bucket_name: str, bucket name (optional)
:return: list of s3.ObjectSummary
"""
if bucket_name:
bucket = self._get_bucket(bucket_name)
else:
bucket = self.bucket
if not bucket:
logging.warning("S3Client.remove bucket not found, {}".format(bucket_name or self.bucket_name))
result = None
else:
try:
result = list(bucket.objects.all())
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] == "NoSuchBucket":
logging.warning("S3Client.list no such bucket {}".format(bucket_name or self.bucket_name))
result = None
else:
raise
return result
def read(self, key, bucket_name=None):
"""
Get bucket key value, return contents
Get contents of a file from S3
:param key: str, bucket key filename
:param bucket_name: str, bucket name (optional)
:return: str, contents of key
"""
try:
obj = self.connection.Object(key=key, bucket_name=bucket_name or self.bucket_name, )
contents = obj.get()['Body'].read()
try:
contents = contents.decode('utf-8')
except UnicodeDecodeError:
logging.debug("S3Client.read key cannot be decoded using utf-8, leaving raw. {}".format(key))
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] == "NoSuchKey":
logging.warning("S3Client.read no such key {}".format(key))
contents = None
else:
raise
except Exception as e: # Retry in-case we have a connection error
logging.exception("S3Client.read failed for key {}, error {}".format(key, e))
time.sleep(self.RECONNECT_SLEEP_SECS)
self.connect()
contents = self.read(key)
return contents
def write(self, key, contents, bucket_name=None):
"""
Create bucket key from string
Write content to a file in S3
:param contents: str, contents to save to a file
:param key: str, bucket key filename
:param bucket_name: str, bucket name (optional)
:return: dict, output
"""
output = response = None
try:
response = self.connection.Object(key=key, bucket_name=bucket_name or self.bucket_name).put(Body=contents)
output = {
'file_name': key,
# 'is_new': not k.exists(),
}
except Exception as e:
logging.exception("S3Client.write failed for key {}, error {}, response {}".format(key, e, response))
return output
def upload(self, key, origin_path, bucket_name=None):
"""
Create bucket key from filename
Upload a file to S3 from origin file
:param origin_path: str, path to origin filename
:param key: str, bucket key filename
:param bucket_name: str, bucket name (optional)
:return: bool, success
"""
result = True
try:
file_body = open(origin_path, 'rb')
self.connection.Bucket(bucket_name or self.bucket_name).put_object(Key=key, Body=file_body)
except Exception as e:
logging.exception("S3Client.upload failed for key {}, error {} ".format(key, e))
result = False
return result
def download(self, key, destination, bucket_name=None):
"""
Get key
Download a file from S3 to destination
:param destination: str, path to local file name
:param key: str, bucket key filename
:param bucket_name: str, bucket name (optional)
:return: bool, success
"""
result = True
if bucket_name:
bucket = self._get_bucket(bucket_name)
else:
bucket = self.bucket
if not bucket:
logging.warning("S3Client.remove bucket not found, {}".format(bucket_name or self.bucket_name))
result = False
try:
bucket.download_file(key, destination)
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] == "404":
logging.error("S3Client.download bucket missing key file {}".format(key))
else:
raise
except Exception as e:
logging.warning("S3Client.download failed for key {} to {}, error {}, retrying".format(key, destination, e))
time.sleep(self.RECONNECT_SLEEP_SECS)
self.connect()
result = self.download(key, destination)
return result
def remove(self, keys, bucket_name=None):
"""
Deletes the given keys from the given bucket.
:param keys: list, list of key names
:param bucket_name: str, bucket name (optional)
:return: bool, success
"""
result = True
if bucket_name:
bucket = self._get_bucket(bucket_name)
else:
bucket = self.bucket
if not bucket:
logging.warning("S3Client.remove bucket not found, {}".format(bucket_name or self.bucket_name))
result = False
logging.warning("S3Client.remove deleting keys {}".format(keys))
objects = [{'Key': key} for key in keys]
bucket.delete_objects(Delete={'Objects': objects})
return result
| 37.309623 | 120 | 0.596165 | from __future__ import unicode_literals
import logging
import time
import boto3
import botocore
from botocore.client import Config
class S3Client:
S3_DATE_FORMAT = '%Y-%m-%dT%H:%M:%S.000Z'
RECONNECT_SLEEP_SECS = 0.5
CONN_RETRIES = 10
CONN_CONFIG = Config(connect_timeout=5, retries={'max_attempts': 0})
def __init__(self, config, reconnect_sleep_secs=RECONNECT_SLEEP_SECS, conn_retries=CONN_RETRIES):
self.config = config
self.access_key_id = self.config['access_key_id']
self.secret_access_key = self.config['secret_access_key']
self.aws_region = self.config['aws_region']
self.bucket_name = self.config.get('bucket_name')
self.RECONNECT_SLEEP_SECS = reconnect_sleep_secs
self.CONN_RETRIES = conn_retries
self.connection_attempt = 0
self.connection = None
self.bucket = None
self.connect(run_get_bucket=bool(self.bucket_name))
def connect(self, run_get_bucket=False):
try:
self.connection_attempt += 1
self.connection = boto3.resource('s3', region_name=self.aws_region,
aws_access_key_id=self.access_key_id,
aws_secret_access_key=self.secret_access_key,
config=self.CONN_CONFIG)
if run_get_bucket:
self.bucket = self._get_bucket()
except Exception as e:
logging.exception("S3Client.connect failed with params {}, error {}".format(self.config, e))
if self.connection_attempt >= self.CONN_RETRIES:
raise
def _get_bucket(self, bucket_name=None):
try:
bucket = self.connection.Bucket(name=bucket_name or self.bucket_name)
except Exception as e:
logging.exception("S3Client.get_bucket unable to get bucket {}, error {}".format(self.bucket_name, e))
raise
return bucket
def list(self, bucket_name=None):
if bucket_name:
bucket = self._get_bucket(bucket_name)
else:
bucket = self.bucket
if not bucket:
logging.warning("S3Client.remove bucket not found, {}".format(bucket_name or self.bucket_name))
result = None
else:
try:
result = list(bucket.objects.all())
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] == "NoSuchBucket":
logging.warning("S3Client.list no such bucket {}".format(bucket_name or self.bucket_name))
result = None
else:
raise
return result
def read(self, key, bucket_name=None):
try:
obj = self.connection.Object(key=key, bucket_name=bucket_name or self.bucket_name, )
contents = obj.get()['Body'].read()
try:
contents = contents.decode('utf-8')
except UnicodeDecodeError:
logging.debug("S3Client.read key cannot be decoded using utf-8, leaving raw. {}".format(key))
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] == "NoSuchKey":
logging.warning("S3Client.read no such key {}".format(key))
contents = None
else:
raise
except Exception as e:
logging.exception("S3Client.read failed for key {}, error {}".format(key, e))
time.sleep(self.RECONNECT_SLEEP_SECS)
self.connect()
contents = self.read(key)
return contents
def write(self, key, contents, bucket_name=None):
output = response = None
try:
response = self.connection.Object(key=key, bucket_name=bucket_name or self.bucket_name).put(Body=contents)
output = {
'file_name': key,
}
except Exception as e:
logging.exception("S3Client.write failed for key {}, error {}, response {}".format(key, e, response))
return output
def upload(self, key, origin_path, bucket_name=None):
result = True
try:
file_body = open(origin_path, 'rb')
self.connection.Bucket(bucket_name or self.bucket_name).put_object(Key=key, Body=file_body)
except Exception as e:
logging.exception("S3Client.upload failed for key {}, error {} ".format(key, e))
result = False
return result
def download(self, key, destination, bucket_name=None):
result = True
if bucket_name:
bucket = self._get_bucket(bucket_name)
else:
bucket = self.bucket
if not bucket:
logging.warning("S3Client.remove bucket not found, {}".format(bucket_name or self.bucket_name))
result = False
try:
bucket.download_file(key, destination)
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] == "404":
logging.error("S3Client.download bucket missing key file {}".format(key))
else:
raise
except Exception as e:
logging.warning("S3Client.download failed for key {} to {}, error {}, retrying".format(key, destination, e))
time.sleep(self.RECONNECT_SLEEP_SECS)
self.connect()
result = self.download(key, destination)
return result
def remove(self, keys, bucket_name=None):
result = True
if bucket_name:
bucket = self._get_bucket(bucket_name)
else:
bucket = self.bucket
if not bucket:
logging.warning("S3Client.remove bucket not found, {}".format(bucket_name or self.bucket_name))
result = False
logging.warning("S3Client.remove deleting keys {}".format(keys))
objects = [{'Key': key} for key in keys]
bucket.delete_objects(Delete={'Objects': objects})
return result
| true | true |
f72fd5b91881f72c58b41d9a2321dc53142923f3 | 1,234 | py | Python | acmicpc/9093/9093-1.py | love-adela/algorithm | 4ccd02173c96f8369962f1fd4e5166a221690fa2 | [
"MIT"
] | 3 | 2019-03-09T05:19:23.000Z | 2019-04-06T09:26:36.000Z | acmicpc/9093/9093-1.py | love-adela/algorithm | 4ccd02173c96f8369962f1fd4e5166a221690fa2 | [
"MIT"
] | 1 | 2020-02-23T10:38:04.000Z | 2020-02-23T10:38:04.000Z | acmicpc/9093/9093-1.py | love-adela/algorithm | 4ccd02173c96f8369962f1fd4e5166a221690fa2 | [
"MIT"
] | 1 | 2019-05-22T13:47:53.000Z | 2019-05-22T13:47:53.000Z | # Stack 활용해서 풀기
N = int(input())
class Node(object):
def __init__(self, value=None, next=None):
self.value = value
self.next = next
class Stack(object):
def __init__(self):
self.head = None
self.count = 0
def is_empty(self):
return not bool(self.head)
def push(self, item):
self.head = Node(item, self.head)
self.count += 1
def pop(self):
if self.count > 0:
node = self.head
self.head = node.next
self.count -= 1
return node.value
else:
print('Stack is empty')
def peek(self):
if self.count > 0:
return self.head.value
else:
print('Stack is empty')
def size(self):
return self.size
def reverse_with_stack(sentence):
s = Stack()
for i in range(len(sentence)):
if sentence[i] == ' ' or sentence[i]=='\n':
while not s.is_empty():
print(s.peek(), end='')
s.pop()
print(sentence[i], end='')
else:
s.push(sentence[i])
while N:
sentence = input()
sentence += '\n'
reverse_with_stack(sentence)
N-=1
| 21.649123 | 51 | 0.502431 |
N = int(input())
class Node(object):
def __init__(self, value=None, next=None):
self.value = value
self.next = next
class Stack(object):
def __init__(self):
self.head = None
self.count = 0
def is_empty(self):
return not bool(self.head)
def push(self, item):
self.head = Node(item, self.head)
self.count += 1
def pop(self):
if self.count > 0:
node = self.head
self.head = node.next
self.count -= 1
return node.value
else:
print('Stack is empty')
def peek(self):
if self.count > 0:
return self.head.value
else:
print('Stack is empty')
def size(self):
return self.size
def reverse_with_stack(sentence):
s = Stack()
for i in range(len(sentence)):
if sentence[i] == ' ' or sentence[i]=='\n':
while not s.is_empty():
print(s.peek(), end='')
s.pop()
print(sentence[i], end='')
else:
s.push(sentence[i])
while N:
sentence = input()
sentence += '\n'
reverse_with_stack(sentence)
N-=1
| true | true |
f72fd5e243d2a0ee9ab66cb14a1e4f2f75b8f2b5 | 15,989 | py | Python | lib/surface/container/node_pools/create.py | google-cloud-sdk-unofficial/google-cloud-sdk | 2a48a04df14be46c8745050f98768e30474a1aac | [
"Apache-2.0"
] | 2 | 2019-11-10T09:17:07.000Z | 2019-12-18T13:44:08.000Z | lib/surface/container/node_pools/create.py | google-cloud-sdk-unofficial/google-cloud-sdk | 2a48a04df14be46c8745050f98768e30474a1aac | [
"Apache-2.0"
] | null | null | null | lib/surface/container/node_pools/create.py | google-cloud-sdk-unofficial/google-cloud-sdk | 2a48a04df14be46c8745050f98768e30474a1aac | [
"Apache-2.0"
] | 1 | 2020-07-25T01:40:19.000Z | 2020-07-25T01:40:19.000Z | # -*- coding: utf-8 -*- #
# Copyright 2014 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Create node pool command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from apitools.base.py import exceptions as apitools_exceptions
from googlecloudsdk.api_lib.compute import metadata_utils
from googlecloudsdk.api_lib.compute import utils
from googlecloudsdk.api_lib.container import api_adapter
from googlecloudsdk.api_lib.container import util
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.calliope import base
from googlecloudsdk.calliope import exceptions
from googlecloudsdk.command_lib.container import constants
from googlecloudsdk.command_lib.container import container_command_util as cmd_util
from googlecloudsdk.command_lib.container import flags
from googlecloudsdk.core import log
DETAILED_HELP = {
'DESCRIPTION':
"""\
*{command}* facilitates the creation of a node pool in a Google
Kubernetes Engine cluster. A variety of options exists to customize the
node configuration and the number of nodes created.
""",
'EXAMPLES':
"""\
To create a new node pool "node-pool-1" with the default options in the
cluster "sample-cluster", run:
$ {command} node-pool-1 --cluster=sample-cluster
The new node pool will show up in the cluster after all the nodes have
been provisioned.
To create a node pool with 5 nodes, run:
$ {command} node-pool-1 --cluster=sample-cluster --num-nodes=5
""",
}
WARN_WINDOWS_SAC_SUPPORT_LIFECYCLE = (
'Windows SAC node pools must be upgraded regularly to remain operational. '
'Please refer to https://cloud.google.com/kubernetes-engine/docs/how-to/'
'creating-a-cluster-windows#choose_your_windows_server_node_image for more '
'information.')
def _Args(parser):
"""Register flags for this command.
Args:
parser: An argparse.ArgumentParser-like object. It is mocked out in order to
capture some information, but behaves like an ArgumentParser.
"""
flags.AddNodePoolNameArg(parser, 'The name of the node pool to create.')
flags.AddNodePoolClusterFlag(parser, 'The cluster to add the node pool to.')
# Timeout in seconds for operation
parser.add_argument(
'--timeout',
type=int,
default=1800,
hidden=True,
help='THIS ARGUMENT NEEDS HELP TEXT.')
parser.add_argument(
'--num-nodes',
type=int,
help='The number of nodes in the node pool in each of the '
'cluster\'s zones.',
default=3)
flags.AddMachineTypeFlag(parser)
parser.add_argument(
'--disk-size',
type=arg_parsers.BinarySize(lower_bound='10GB'),
help='Size for node VM boot disks in GB. Defaults to 100GB.')
flags.AddImageTypeFlag(parser, 'node pool')
flags.AddImageFlag(parser, hidden=True)
flags.AddImageProjectFlag(parser, hidden=True)
flags.AddImageFamilyFlag(parser, hidden=True)
flags.AddNodeLabelsFlag(parser, for_node_pool=True)
flags.AddTagsFlag(
parser, """\
Applies the given Compute Engine tags (comma separated) on all nodes in the new
node-pool. Example:
$ {command} node-pool-1 --cluster=example-cluster --tags=tag1,tag2
New nodes, including ones created by resize or recreate, will have these tags
on the Compute Engine API instance object and can be used in firewall rules.
See https://cloud.google.com/sdk/gcloud/reference/compute/firewall-rules/create
for examples.
""")
parser.display_info.AddFormat(util.NODEPOOLS_FORMAT)
flags.AddNodeVersionFlag(parser)
flags.AddDiskTypeFlag(parser)
flags.AddMetadataFlags(parser)
flags.AddShieldedInstanceFlags(parser)
flags.AddNetworkConfigFlags(parser)
flags.AddThreadsPerCore(parser)
def ParseCreateNodePoolOptionsBase(args):
"""Parses the flags provided with the node pool creation command."""
enable_autorepair = cmd_util.GetAutoRepair(args)
flags.WarnForNodeModification(args, enable_autorepair)
flags.ValidateSurgeUpgradeSettings(args)
metadata = metadata_utils.ConstructMetadataDict(args.metadata,
args.metadata_from_file)
return api_adapter.CreateNodePoolOptions(
accelerators=args.accelerator,
boot_disk_kms_key=args.boot_disk_kms_key,
machine_type=args.machine_type,
disk_size_gb=utils.BytesToGb(args.disk_size),
scopes=args.scopes,
node_version=args.node_version,
num_nodes=args.num_nodes,
local_ssd_count=args.local_ssd_count,
tags=args.tags,
threads_per_core=args.threads_per_core,
node_labels=args.node_labels,
node_taints=args.node_taints,
enable_autoscaling=args.enable_autoscaling,
max_nodes=args.max_nodes,
min_cpu_platform=args.min_cpu_platform,
min_nodes=args.min_nodes,
image_type=args.image_type,
image=args.image,
image_project=args.image_project,
image_family=args.image_family,
preemptible=args.preemptible,
enable_autorepair=enable_autorepair,
enable_autoupgrade=cmd_util.GetAutoUpgrade(args),
service_account=args.service_account,
disk_type=args.disk_type,
metadata=metadata,
max_pods_per_node=args.max_pods_per_node,
enable_autoprovisioning=args.enable_autoprovisioning,
workload_metadata=args.workload_metadata,
workload_metadata_from_node=args.workload_metadata_from_node,
shielded_secure_boot=args.shielded_secure_boot,
shielded_integrity_monitoring=args.shielded_integrity_monitoring,
reservation_affinity=args.reservation_affinity,
reservation=args.reservation,
sandbox=args.sandbox,
max_surge_upgrade=args.max_surge_upgrade,
max_unavailable_upgrade=args.max_unavailable_upgrade,
node_group=args.node_group,
system_config_from_file=args.system_config_from_file,
pod_ipv4_range=args.pod_ipv4_range,
create_pod_ipv4_range=args.create_pod_ipv4_range,
gvnic=args.enable_gvnic,
enable_image_streaming=args.enable_image_streaming,
spot=args.spot)
@base.ReleaseTracks(base.ReleaseTrack.GA)
class Create(base.CreateCommand):
"""Create a node pool in a running cluster."""
@staticmethod
def Args(parser):
_Args(parser)
flags.AddAcceleratorArgs(
parser, enable_gpu_partition=True, enable_gpu_time_sharing=False)
flags.AddBootDiskKmsKeyFlag(parser)
flags.AddClusterAutoscalingFlags(parser)
flags.AddLocalSSDFlag(parser)
flags.AddPreemptibleFlag(parser, for_node_pool=True)
flags.AddEnableAutoRepairFlag(parser, for_node_pool=True, for_create=True)
flags.AddMinCpuPlatformFlag(parser, for_node_pool=True)
flags.AddWorkloadMetadataFlag(parser)
flags.AddNodeTaintsFlag(parser, for_node_pool=True)
flags.AddNodePoolNodeIdentityFlags(parser)
flags.AddNodePoolAutoprovisioningFlag(parser, hidden=False)
flags.AddMaxPodsPerNodeFlag(parser, for_node_pool=True)
flags.AddEnableAutoUpgradeFlag(parser, for_node_pool=True, default=True)
flags.AddReservationAffinityFlags(parser, for_node_pool=True)
flags.AddSandboxFlag(parser)
flags.AddNodePoolLocationsFlag(parser, for_create=True)
flags.AddSurgeUpgradeFlag(parser, for_node_pool=True)
flags.AddMaxUnavailableUpgradeFlag(
parser, for_node_pool=True, is_create=True)
flags.AddSystemConfigFlag(parser, hidden=False)
flags.AddNodeGroupFlag(parser)
flags.AddEnableGvnicFlag(parser)
flags.AddEnableImageStreamingFlag(parser, for_node_pool=True)
flags.AddSpotFlag(parser, for_node_pool=True, hidden=True)
def ParseCreateNodePoolOptions(self, args):
ops = ParseCreateNodePoolOptionsBase(args)
ops.node_locations = args.node_locations
return ops
def Run(self, args):
"""This is what gets called when the user runs this command.
Args:
args: an argparse namespace. All the arguments that were provided to this
command invocation.
Returns:
Cluster message for the successfully created node pool.
Raises:
util.Error, if creation failed.
"""
adapter = self.context['api_adapter']
location_get = self.context['location_get']
location = location_get(args)
try:
pool_ref = adapter.ParseNodePool(args.name, location)
options = self.ParseCreateNodePoolOptions(args)
if options.accelerators is not None:
log.status.Print(constants.KUBERNETES_GPU_LIMITATION_MSG)
if not options.image_type:
log.warning('Starting with version 1.19, newly created node-pools '
'will have COS_CONTAINERD as the default node image '
'when no image type is specified.')
elif options.image_type.upper() == 'WINDOWS_SAC':
log.warning(WARN_WINDOWS_SAC_SUPPORT_LIFECYCLE)
operation_ref = adapter.CreateNodePool(pool_ref, options)
adapter.WaitForOperation(
operation_ref,
'Creating node pool {0}'.format(pool_ref.nodePoolId),
timeout_s=args.timeout)
pool = adapter.GetNodePool(pool_ref)
except apitools_exceptions.HttpError as error:
raise exceptions.HttpException(error, util.HTTP_ERROR_FORMAT)
log.CreatedResource(pool_ref)
return [pool]
@base.ReleaseTracks(base.ReleaseTrack.BETA)
class CreateBeta(Create):
"""Create a node pool in a running cluster."""
@staticmethod
def Args(parser):
_Args(parser)
flags.AddAcceleratorArgs(
parser, enable_gpu_partition=True, enable_gpu_time_sharing=True)
flags.AddClusterAutoscalingFlags(parser)
flags.AddLocalSSDsBetaFlags(parser, for_node_pool=True)
flags.AddBootDiskKmsKeyFlag(parser)
flags.AddPreemptibleFlag(parser, for_node_pool=True)
flags.AddEnableAutoRepairFlag(parser, for_node_pool=True, for_create=True)
flags.AddMinCpuPlatformFlag(parser, for_node_pool=True)
flags.AddWorkloadMetadataFlag(parser, use_mode=False)
flags.AddNodeTaintsFlag(parser, for_node_pool=True)
flags.AddNodePoolNodeIdentityFlags(parser)
flags.AddNodePoolAutoprovisioningFlag(parser, hidden=False)
flags.AddMaxPodsPerNodeFlag(parser, for_node_pool=True)
flags.AddEnableAutoUpgradeFlag(parser, for_node_pool=True, default=True)
flags.AddSandboxFlag(parser)
flags.AddNodePoolLocationsFlag(parser, for_create=True)
flags.AddSurgeUpgradeFlag(parser, for_node_pool=True, default=1)
flags.AddMaxUnavailableUpgradeFlag(
parser, for_node_pool=True, is_create=True)
flags.AddReservationAffinityFlags(parser, for_node_pool=True)
flags.AddSystemConfigFlag(parser, hidden=False)
flags.AddNodeGroupFlag(parser)
flags.AddEnableGcfsFlag(parser, for_node_pool=True)
flags.AddEnableImageStreamingFlag(parser, for_node_pool=True)
flags.AddNodePoolEnablePrivateNodes(parser, hidden=True)
flags.AddEnableGvnicFlag(parser)
flags.AddSpotFlag(parser, for_node_pool=True)
flags.AddPlacementTypeFlag(parser, for_node_pool=True, hidden=True)
flags.AddEnableRollingUpdateFlag(parser)
flags.AddEnableBlueGreenUpdateFlag(parser)
flags.AddStandardRolloutPolicyFlag(parser)
flags.AddNodePoolSoakDurationFlag(parser)
flags.AddMaintenanceIntervalFlag(parser, for_node_pool=True, hidden=True)
def ParseCreateNodePoolOptions(self, args):
ops = ParseCreateNodePoolOptionsBase(args)
flags.WarnForNodeVersionAutoUpgrade(args)
flags.ValidateSurgeUpgradeSettings(args)
ops.boot_disk_kms_key = args.boot_disk_kms_key
ops.sandbox = args.sandbox
ops.node_locations = args.node_locations
ops.system_config_from_file = args.system_config_from_file
ops.enable_gcfs = args.enable_gcfs
ops.enable_image_streaming = args.enable_image_streaming
ops.ephemeral_storage = args.ephemeral_storage
ops.enable_private_nodes = args.enable_private_nodes
ops.spot = args.spot
ops.placement_type = args.placement_type
ops.enable_blue_green_update = args.enable_blue_green_update
ops.enable_rolling_update = args.enable_rolling_update
ops.node_pool_soak_duration = args.node_pool_soak_duration
ops.standard_rollout_policy = args.standard_rollout_policy
ops.maintenance_interval = args.maintenance_interval
return ops
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class CreateAlpha(Create):
"""Create a node pool in a running cluster."""
def ParseCreateNodePoolOptions(self, args):
ops = ParseCreateNodePoolOptionsBase(args)
flags.WarnForNodeVersionAutoUpgrade(args)
flags.ValidateSurgeUpgradeSettings(args)
ops.local_ssd_volume_configs = args.local_ssd_volumes
ops.ephemeral_storage = args.ephemeral_storage
ops.boot_disk_kms_key = args.boot_disk_kms_key
ops.sandbox = args.sandbox
ops.linux_sysctls = args.linux_sysctls
ops.node_locations = args.node_locations
ops.system_config_from_file = args.system_config_from_file
ops.enable_gcfs = args.enable_gcfs
ops.enable_image_streaming = args.enable_image_streaming
ops.enable_private_nodes = args.enable_private_nodes
ops.spot = args.spot
ops.placement_type = args.placement_type
ops.enable_blue_green_update = args.enable_blue_green_update
ops.enable_rolling_update = args.enable_rolling_update
ops.node_pool_soak_duration = args.node_pool_soak_duration
ops.standard_rollout_policy = args.standard_rollout_policy
ops.maintenance_interval = args.maintenance_interval
return ops
@staticmethod
def Args(parser):
_Args(parser)
flags.AddAcceleratorArgs(
parser, enable_gpu_partition=True, enable_gpu_time_sharing=True)
flags.AddClusterAutoscalingFlags(parser)
flags.AddNodePoolAutoprovisioningFlag(parser, hidden=False)
flags.AddLocalSSDsAlphaFlags(parser, for_node_pool=True)
flags.AddBootDiskKmsKeyFlag(parser)
flags.AddPreemptibleFlag(parser, for_node_pool=True)
flags.AddEnableAutoRepairFlag(parser, for_node_pool=True, for_create=True)
flags.AddMinCpuPlatformFlag(parser, for_node_pool=True)
flags.AddWorkloadMetadataFlag(parser, use_mode=False)
flags.AddNodeTaintsFlag(parser, for_node_pool=True)
flags.AddNodePoolNodeIdentityFlags(parser)
flags.AddMaxPodsPerNodeFlag(parser, for_node_pool=True)
flags.AddSandboxFlag(parser)
flags.AddNodeGroupFlag(parser)
flags.AddEnableAutoUpgradeFlag(parser, for_node_pool=True, default=True)
flags.AddLinuxSysctlFlags(parser, for_node_pool=True)
flags.AddSurgeUpgradeFlag(parser, for_node_pool=True, default=1)
flags.AddMaxUnavailableUpgradeFlag(
parser, for_node_pool=True, is_create=True)
flags.AddNodePoolLocationsFlag(parser, for_create=True)
flags.AddSystemConfigFlag(parser, hidden=False)
flags.AddReservationAffinityFlags(parser, for_node_pool=True)
flags.AddEnableGcfsFlag(parser, for_node_pool=True)
flags.AddEnableImageStreamingFlag(parser, for_node_pool=True)
flags.AddNodePoolEnablePrivateNodes(parser, hidden=True)
flags.AddEnableGvnicFlag(parser)
flags.AddSpotFlag(parser, for_node_pool=True)
flags.AddPlacementTypeFlag(parser, for_node_pool=True, hidden=True)
flags.AddEnableRollingUpdateFlag(parser)
flags.AddEnableBlueGreenUpdateFlag(parser)
flags.AddStandardRolloutPolicyFlag(parser, for_node_pool=True)
flags.AddNodePoolSoakDurationFlag(parser, for_node_pool=True)
flags.AddMaintenanceIntervalFlag(parser, for_node_pool=True, hidden=True)
Create.detailed_help = DETAILED_HELP
| 41.52987 | 83 | 0.766965 |
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from apitools.base.py import exceptions as apitools_exceptions
from googlecloudsdk.api_lib.compute import metadata_utils
from googlecloudsdk.api_lib.compute import utils
from googlecloudsdk.api_lib.container import api_adapter
from googlecloudsdk.api_lib.container import util
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.calliope import base
from googlecloudsdk.calliope import exceptions
from googlecloudsdk.command_lib.container import constants
from googlecloudsdk.command_lib.container import container_command_util as cmd_util
from googlecloudsdk.command_lib.container import flags
from googlecloudsdk.core import log
DETAILED_HELP = {
'DESCRIPTION':
"""\
*{command}* facilitates the creation of a node pool in a Google
Kubernetes Engine cluster. A variety of options exists to customize the
node configuration and the number of nodes created.
""",
'EXAMPLES':
"""\
To create a new node pool "node-pool-1" with the default options in the
cluster "sample-cluster", run:
$ {command} node-pool-1 --cluster=sample-cluster
The new node pool will show up in the cluster after all the nodes have
been provisioned.
To create a node pool with 5 nodes, run:
$ {command} node-pool-1 --cluster=sample-cluster --num-nodes=5
""",
}
WARN_WINDOWS_SAC_SUPPORT_LIFECYCLE = (
'Windows SAC node pools must be upgraded regularly to remain operational. '
'Please refer to https://cloud.google.com/kubernetes-engine/docs/how-to/'
'creating-a-cluster-windows#choose_your_windows_server_node_image for more '
'information.')
def _Args(parser):
flags.AddNodePoolNameArg(parser, 'The name of the node pool to create.')
flags.AddNodePoolClusterFlag(parser, 'The cluster to add the node pool to.')
parser.add_argument(
'--timeout',
type=int,
default=1800,
hidden=True,
help='THIS ARGUMENT NEEDS HELP TEXT.')
parser.add_argument(
'--num-nodes',
type=int,
help='The number of nodes in the node pool in each of the '
'cluster\'s zones.',
default=3)
flags.AddMachineTypeFlag(parser)
parser.add_argument(
'--disk-size',
type=arg_parsers.BinarySize(lower_bound='10GB'),
help='Size for node VM boot disks in GB. Defaults to 100GB.')
flags.AddImageTypeFlag(parser, 'node pool')
flags.AddImageFlag(parser, hidden=True)
flags.AddImageProjectFlag(parser, hidden=True)
flags.AddImageFamilyFlag(parser, hidden=True)
flags.AddNodeLabelsFlag(parser, for_node_pool=True)
flags.AddTagsFlag(
parser, """\
Applies the given Compute Engine tags (comma separated) on all nodes in the new
node-pool. Example:
$ {command} node-pool-1 --cluster=example-cluster --tags=tag1,tag2
New nodes, including ones created by resize or recreate, will have these tags
on the Compute Engine API instance object and can be used in firewall rules.
See https://cloud.google.com/sdk/gcloud/reference/compute/firewall-rules/create
for examples.
""")
parser.display_info.AddFormat(util.NODEPOOLS_FORMAT)
flags.AddNodeVersionFlag(parser)
flags.AddDiskTypeFlag(parser)
flags.AddMetadataFlags(parser)
flags.AddShieldedInstanceFlags(parser)
flags.AddNetworkConfigFlags(parser)
flags.AddThreadsPerCore(parser)
def ParseCreateNodePoolOptionsBase(args):
enable_autorepair = cmd_util.GetAutoRepair(args)
flags.WarnForNodeModification(args, enable_autorepair)
flags.ValidateSurgeUpgradeSettings(args)
metadata = metadata_utils.ConstructMetadataDict(args.metadata,
args.metadata_from_file)
return api_adapter.CreateNodePoolOptions(
accelerators=args.accelerator,
boot_disk_kms_key=args.boot_disk_kms_key,
machine_type=args.machine_type,
disk_size_gb=utils.BytesToGb(args.disk_size),
scopes=args.scopes,
node_version=args.node_version,
num_nodes=args.num_nodes,
local_ssd_count=args.local_ssd_count,
tags=args.tags,
threads_per_core=args.threads_per_core,
node_labels=args.node_labels,
node_taints=args.node_taints,
enable_autoscaling=args.enable_autoscaling,
max_nodes=args.max_nodes,
min_cpu_platform=args.min_cpu_platform,
min_nodes=args.min_nodes,
image_type=args.image_type,
image=args.image,
image_project=args.image_project,
image_family=args.image_family,
preemptible=args.preemptible,
enable_autorepair=enable_autorepair,
enable_autoupgrade=cmd_util.GetAutoUpgrade(args),
service_account=args.service_account,
disk_type=args.disk_type,
metadata=metadata,
max_pods_per_node=args.max_pods_per_node,
enable_autoprovisioning=args.enable_autoprovisioning,
workload_metadata=args.workload_metadata,
workload_metadata_from_node=args.workload_metadata_from_node,
shielded_secure_boot=args.shielded_secure_boot,
shielded_integrity_monitoring=args.shielded_integrity_monitoring,
reservation_affinity=args.reservation_affinity,
reservation=args.reservation,
sandbox=args.sandbox,
max_surge_upgrade=args.max_surge_upgrade,
max_unavailable_upgrade=args.max_unavailable_upgrade,
node_group=args.node_group,
system_config_from_file=args.system_config_from_file,
pod_ipv4_range=args.pod_ipv4_range,
create_pod_ipv4_range=args.create_pod_ipv4_range,
gvnic=args.enable_gvnic,
enable_image_streaming=args.enable_image_streaming,
spot=args.spot)
@base.ReleaseTracks(base.ReleaseTrack.GA)
class Create(base.CreateCommand):
@staticmethod
def Args(parser):
_Args(parser)
flags.AddAcceleratorArgs(
parser, enable_gpu_partition=True, enable_gpu_time_sharing=False)
flags.AddBootDiskKmsKeyFlag(parser)
flags.AddClusterAutoscalingFlags(parser)
flags.AddLocalSSDFlag(parser)
flags.AddPreemptibleFlag(parser, for_node_pool=True)
flags.AddEnableAutoRepairFlag(parser, for_node_pool=True, for_create=True)
flags.AddMinCpuPlatformFlag(parser, for_node_pool=True)
flags.AddWorkloadMetadataFlag(parser)
flags.AddNodeTaintsFlag(parser, for_node_pool=True)
flags.AddNodePoolNodeIdentityFlags(parser)
flags.AddNodePoolAutoprovisioningFlag(parser, hidden=False)
flags.AddMaxPodsPerNodeFlag(parser, for_node_pool=True)
flags.AddEnableAutoUpgradeFlag(parser, for_node_pool=True, default=True)
flags.AddReservationAffinityFlags(parser, for_node_pool=True)
flags.AddSandboxFlag(parser)
flags.AddNodePoolLocationsFlag(parser, for_create=True)
flags.AddSurgeUpgradeFlag(parser, for_node_pool=True)
flags.AddMaxUnavailableUpgradeFlag(
parser, for_node_pool=True, is_create=True)
flags.AddSystemConfigFlag(parser, hidden=False)
flags.AddNodeGroupFlag(parser)
flags.AddEnableGvnicFlag(parser)
flags.AddEnableImageStreamingFlag(parser, for_node_pool=True)
flags.AddSpotFlag(parser, for_node_pool=True, hidden=True)
def ParseCreateNodePoolOptions(self, args):
ops = ParseCreateNodePoolOptionsBase(args)
ops.node_locations = args.node_locations
return ops
def Run(self, args):
adapter = self.context['api_adapter']
location_get = self.context['location_get']
location = location_get(args)
try:
pool_ref = adapter.ParseNodePool(args.name, location)
options = self.ParseCreateNodePoolOptions(args)
if options.accelerators is not None:
log.status.Print(constants.KUBERNETES_GPU_LIMITATION_MSG)
if not options.image_type:
log.warning('Starting with version 1.19, newly created node-pools '
'will have COS_CONTAINERD as the default node image '
'when no image type is specified.')
elif options.image_type.upper() == 'WINDOWS_SAC':
log.warning(WARN_WINDOWS_SAC_SUPPORT_LIFECYCLE)
operation_ref = adapter.CreateNodePool(pool_ref, options)
adapter.WaitForOperation(
operation_ref,
'Creating node pool {0}'.format(pool_ref.nodePoolId),
timeout_s=args.timeout)
pool = adapter.GetNodePool(pool_ref)
except apitools_exceptions.HttpError as error:
raise exceptions.HttpException(error, util.HTTP_ERROR_FORMAT)
log.CreatedResource(pool_ref)
return [pool]
@base.ReleaseTracks(base.ReleaseTrack.BETA)
class CreateBeta(Create):
@staticmethod
def Args(parser):
_Args(parser)
flags.AddAcceleratorArgs(
parser, enable_gpu_partition=True, enable_gpu_time_sharing=True)
flags.AddClusterAutoscalingFlags(parser)
flags.AddLocalSSDsBetaFlags(parser, for_node_pool=True)
flags.AddBootDiskKmsKeyFlag(parser)
flags.AddPreemptibleFlag(parser, for_node_pool=True)
flags.AddEnableAutoRepairFlag(parser, for_node_pool=True, for_create=True)
flags.AddMinCpuPlatformFlag(parser, for_node_pool=True)
flags.AddWorkloadMetadataFlag(parser, use_mode=False)
flags.AddNodeTaintsFlag(parser, for_node_pool=True)
flags.AddNodePoolNodeIdentityFlags(parser)
flags.AddNodePoolAutoprovisioningFlag(parser, hidden=False)
flags.AddMaxPodsPerNodeFlag(parser, for_node_pool=True)
flags.AddEnableAutoUpgradeFlag(parser, for_node_pool=True, default=True)
flags.AddSandboxFlag(parser)
flags.AddNodePoolLocationsFlag(parser, for_create=True)
flags.AddSurgeUpgradeFlag(parser, for_node_pool=True, default=1)
flags.AddMaxUnavailableUpgradeFlag(
parser, for_node_pool=True, is_create=True)
flags.AddReservationAffinityFlags(parser, for_node_pool=True)
flags.AddSystemConfigFlag(parser, hidden=False)
flags.AddNodeGroupFlag(parser)
flags.AddEnableGcfsFlag(parser, for_node_pool=True)
flags.AddEnableImageStreamingFlag(parser, for_node_pool=True)
flags.AddNodePoolEnablePrivateNodes(parser, hidden=True)
flags.AddEnableGvnicFlag(parser)
flags.AddSpotFlag(parser, for_node_pool=True)
flags.AddPlacementTypeFlag(parser, for_node_pool=True, hidden=True)
flags.AddEnableRollingUpdateFlag(parser)
flags.AddEnableBlueGreenUpdateFlag(parser)
flags.AddStandardRolloutPolicyFlag(parser)
flags.AddNodePoolSoakDurationFlag(parser)
flags.AddMaintenanceIntervalFlag(parser, for_node_pool=True, hidden=True)
def ParseCreateNodePoolOptions(self, args):
ops = ParseCreateNodePoolOptionsBase(args)
flags.WarnForNodeVersionAutoUpgrade(args)
flags.ValidateSurgeUpgradeSettings(args)
ops.boot_disk_kms_key = args.boot_disk_kms_key
ops.sandbox = args.sandbox
ops.node_locations = args.node_locations
ops.system_config_from_file = args.system_config_from_file
ops.enable_gcfs = args.enable_gcfs
ops.enable_image_streaming = args.enable_image_streaming
ops.ephemeral_storage = args.ephemeral_storage
ops.enable_private_nodes = args.enable_private_nodes
ops.spot = args.spot
ops.placement_type = args.placement_type
ops.enable_blue_green_update = args.enable_blue_green_update
ops.enable_rolling_update = args.enable_rolling_update
ops.node_pool_soak_duration = args.node_pool_soak_duration
ops.standard_rollout_policy = args.standard_rollout_policy
ops.maintenance_interval = args.maintenance_interval
return ops
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class CreateAlpha(Create):
def ParseCreateNodePoolOptions(self, args):
ops = ParseCreateNodePoolOptionsBase(args)
flags.WarnForNodeVersionAutoUpgrade(args)
flags.ValidateSurgeUpgradeSettings(args)
ops.local_ssd_volume_configs = args.local_ssd_volumes
ops.ephemeral_storage = args.ephemeral_storage
ops.boot_disk_kms_key = args.boot_disk_kms_key
ops.sandbox = args.sandbox
ops.linux_sysctls = args.linux_sysctls
ops.node_locations = args.node_locations
ops.system_config_from_file = args.system_config_from_file
ops.enable_gcfs = args.enable_gcfs
ops.enable_image_streaming = args.enable_image_streaming
ops.enable_private_nodes = args.enable_private_nodes
ops.spot = args.spot
ops.placement_type = args.placement_type
ops.enable_blue_green_update = args.enable_blue_green_update
ops.enable_rolling_update = args.enable_rolling_update
ops.node_pool_soak_duration = args.node_pool_soak_duration
ops.standard_rollout_policy = args.standard_rollout_policy
ops.maintenance_interval = args.maintenance_interval
return ops
@staticmethod
def Args(parser):
_Args(parser)
flags.AddAcceleratorArgs(
parser, enable_gpu_partition=True, enable_gpu_time_sharing=True)
flags.AddClusterAutoscalingFlags(parser)
flags.AddNodePoolAutoprovisioningFlag(parser, hidden=False)
flags.AddLocalSSDsAlphaFlags(parser, for_node_pool=True)
flags.AddBootDiskKmsKeyFlag(parser)
flags.AddPreemptibleFlag(parser, for_node_pool=True)
flags.AddEnableAutoRepairFlag(parser, for_node_pool=True, for_create=True)
flags.AddMinCpuPlatformFlag(parser, for_node_pool=True)
flags.AddWorkloadMetadataFlag(parser, use_mode=False)
flags.AddNodeTaintsFlag(parser, for_node_pool=True)
flags.AddNodePoolNodeIdentityFlags(parser)
flags.AddMaxPodsPerNodeFlag(parser, for_node_pool=True)
flags.AddSandboxFlag(parser)
flags.AddNodeGroupFlag(parser)
flags.AddEnableAutoUpgradeFlag(parser, for_node_pool=True, default=True)
flags.AddLinuxSysctlFlags(parser, for_node_pool=True)
flags.AddSurgeUpgradeFlag(parser, for_node_pool=True, default=1)
flags.AddMaxUnavailableUpgradeFlag(
parser, for_node_pool=True, is_create=True)
flags.AddNodePoolLocationsFlag(parser, for_create=True)
flags.AddSystemConfigFlag(parser, hidden=False)
flags.AddReservationAffinityFlags(parser, for_node_pool=True)
flags.AddEnableGcfsFlag(parser, for_node_pool=True)
flags.AddEnableImageStreamingFlag(parser, for_node_pool=True)
flags.AddNodePoolEnablePrivateNodes(parser, hidden=True)
flags.AddEnableGvnicFlag(parser)
flags.AddSpotFlag(parser, for_node_pool=True)
flags.AddPlacementTypeFlag(parser, for_node_pool=True, hidden=True)
flags.AddEnableRollingUpdateFlag(parser)
flags.AddEnableBlueGreenUpdateFlag(parser)
flags.AddStandardRolloutPolicyFlag(parser, for_node_pool=True)
flags.AddNodePoolSoakDurationFlag(parser, for_node_pool=True)
flags.AddMaintenanceIntervalFlag(parser, for_node_pool=True, hidden=True)
Create.detailed_help = DETAILED_HELP
| true | true |
f72fd770b3c890aabd12bd755ed60cdc88efa9e5 | 8,995 | py | Python | custom/m4change/reports/ld_hmis_report.py | dimagilg/commcare-hq | ea1786238eae556bb7f1cbd8d2460171af1b619c | [
"BSD-3-Clause"
] | 471 | 2015-01-10T02:55:01.000Z | 2022-03-29T18:07:18.000Z | custom/m4change/reports/ld_hmis_report.py | dimagilg/commcare-hq | ea1786238eae556bb7f1cbd8d2460171af1b619c | [
"BSD-3-Clause"
] | 14,354 | 2015-01-01T07:38:23.000Z | 2022-03-31T20:55:14.000Z | custom/m4change/reports/ld_hmis_report.py | dimagilg/commcare-hq | ea1786238eae556bb7f1cbd8d2460171af1b619c | [
"BSD-3-Clause"
] | 175 | 2015-01-06T07:16:47.000Z | 2022-03-29T13:27:01.000Z | from django.utils.translation import ugettext as _
from corehq.apps.locations.permissions import location_safe
from corehq.apps.reports.datatables import DataTablesHeader, DataTablesColumn, NumericColumn
from corehq.apps.reports.filters.select import MonthFilter, YearFilter
from corehq.apps.reports.standard import MonthYearMixin
from corehq.apps.reports.standard.cases.basic import CaseListReport
from custom.common.filters import RestrictedAsyncLocationFilter
from custom.m4change.reports import validate_report_parameters, get_location_hierarchy_by_id
from custom.m4change.reports.reports import M4ChangeReport
from custom.m4change.reports.sql_data import LdHmisCaseSqlData
def _get_row(row_data, form_data, key):
data = form_data.get(key)
rows = dict([(row_key, data.get(row_key, 0)) for row_key in row_data])
for key in rows:
if rows.get(key) == None:
rows[key] = 0
return rows
@location_safe
class LdHmisReport(MonthYearMixin, CaseListReport, M4ChangeReport):
ajax_pagination = False
asynchronous = True
exportable = True
emailable = False
name = "Facility L&D HMIS Report"
slug = "facility_ld_hmis_report"
default_rows = 25
base_template = "m4change/report.html"
report_template_path = "m4change/report_content.html"
fields = [
RestrictedAsyncLocationFilter,
MonthFilter,
YearFilter
]
@classmethod
def get_report_data(cls, config):
validate_report_parameters(["domain", "location_id", "datespan"], config)
domain = config["domain"]
location_id = config["location_id"]
user = config["user"]
sql_data = LdHmisCaseSqlData(domain=domain, datespan=config["datespan"]).data
locations = get_location_hierarchy_by_id(location_id, domain, user)
row_data = LdHmisReport.get_initial_row_data()
for location_id in locations:
key = (domain, location_id)
if key in sql_data:
report_rows = _get_row(row_data, sql_data, key)
for key in report_rows:
row_data.get(key)["value"] += report_rows.get(key)
return sorted([(key, row_data[key]) for key in row_data], key=lambda t: t[1].get("hmis_code"))
@classmethod
def get_initial_row_data(cls):
return {
"deliveries_total": {
"hmis_code": 19, "label": _("Deliveries - Total"), "value": 0
},
"deliveries_svd_total": {
"hmis_code": 20, "label": _("Deliveries - SVD"), "value": 0
},
"deliveries_assisted_total": {
"hmis_code": 21, "label": _("Deliveries - Assisted"), "value": 0
},
"deliveries_caesarean_section_total": {
"hmis_code": 22, "label": _("Deliveries caesarean section"), "value": 0
},
"deliveries_complications_total": {
"hmis_code": 23, "label": _("Deliveries - Complications"), "value": 0
},
'deliveries_preterm_total': {
"hmis_code": 24, "label": _("Deliveries - Preterm"), "value": 0
},
'deliveries_hiv_positive_women_total': {
"hmis_code": 25, "label": _("Deliveries - HIV positive women"), "value": 0
},
'live_birth_hiv_positive_women_total': {
"hmis_code": 26, "label": _("LiveBirth - HIV positive women"), "value": 0
},
'deliveries_hiv_positive_booked_women_total': {
"hmis_code": 27, "label": _("Deliveries - HIV positive booked women"), "value": 0
},
'deliveries_hiv_positive_unbooked_women_total': {
"hmis_code": 28, "label": _("Deliveries - HIV positive unbooked women"), "value": 0
},
'deliveries_monitored_using_partograph_total': {
"hmis_code": 29, "label": _("Deliveries - Monitored using Partograph"), "value": 0
},
'deliveries_skilled_birth_attendant_total': {
"hmis_code": 30, "label": _("Deliveries taken by skilled birth attendant"), "value": 0
},
'tt1_total': {
"hmis_code": 31, "label": _("TT1"), "value": 0
},
'tt2_total': {
"hmis_code": 32, "label": _("TT2"), "value": 0
},
'live_births_male_female_total': {
"hmis_code": 36, "label": _("Live Births(Male, Female, < 2.5kg, >= 2.5k g)"), "value": 0
},
'male_lt_2_5kg_total': {
"hmis_code": 36.1, "label": _("Male, < 2.5kg"), "value": 0
},
'male_gte_2_5kg_total': {
"hmis_code": 36.2, "label": _("Male, >= 2.5kg"), "value": 0
},
'female_lt_2_5kg_total': {
"hmis_code": 36.3, "label": _("Female, < 2.5kg"), "value": 0
},
'female_gte_2_5kg_total': {
"hmis_code": 36.4, "label": _("Female, >= 2.5kg"), "value": 0
},
'still_births_total': {
"hmis_code": 37, "label": _("Still Births total"), "value": 0
},
'fresh_still_births_total': {
"hmis_code": 38.1, "label": _("Fresh Still Births"), "value": 0
},
'other_still_births_total': {
"hmis_code": 38.2, "label": _("Other still Births"), "value": 0
},
'abortion_induced_total': {
"hmis_code": 39.1, "label": _("Abortion Induced"), "value": 0
},
'other_abortions_total': {
"hmis_code": 39.2, "label": _("Other Abortions"), "value": 0
},
'total_abortions_total': {
"hmis_code": 40, "label": _("Total Abortions"), "value": 0
},
'birth_asphyxia_total': {
"hmis_code": 41, "label": _("Birth Asphyxia - Total"), "value": 0
},
'birth_asphyxia_male_total': {
"hmis_code": 41.1, "label": _("Birth Asphyxia - Male"), "value": 0
},
'birth_asphyxia_female_total': {
"hmis_code": 41.2, "label": _("Birth Asphyxia - Female"), "value": 0
},
'neonatal_sepsis_total': {
"hmis_code": 42, "label": _("Neonatal Sepsis - Total"), "value": 0
},
'neonatal_sepsis_male_total': {
"hmis_code": 42.1, "label": _("Neonatal Sepsis - Male"), "value": 0
},
'neonatal_sepsis_female_total': {
"hmis_code": 42.2, "label": _("Neonatal Sepsis - Female"), "value": 0
},
'neonatal_tetanus_total': {
"hmis_code": 43, "label": _("Neonatal Tetanus - Total"), "value": 0
},
'neonatal_tetanus_male_total': {
"hmis_code": 43.1, "label": _("Neonatal Tetanus - Male"), "value": 0
},
'neonatal_tetanus_female_total': {
"hmis_code": 43.2, "label": _("Neonatal Tetanus - Female"), "value": 0
},
'neonatal_jaundice_total': {
"hmis_code": 44, "label": _("Neonatal Jaundice - Total"), "value": 0
},
'neonatal_jaundice_male_total': {
"hmis_code": 44.1, "label": _("Neonatal Jaundice - Male"), "value": 0
},
'neonatal_jaundice_female_total': {
"hmis_code": 44.2, "label": _("Neonatal Jaundice - Female"), "value": 0
},
'low_birth_weight_babies_in_kmc_total': {
"hmis_code": 45, "label": _("Low birth weight babies placed in KMC - Total"), "value": 0
},
'low_birth_weight_babies_in_kmc_male_total': {
"hmis_code": 45.1, "label": _("Low birth weight babies placed in KMC - Male"), "value": 0
},
'low_birth_weight_babies_in_kmc_female_total': {
"hmis_code": 45.2, "label": _("Low birth weight babies placed in KMC - Female"), "value": 0
}
}
@property
def headers(self):
headers = DataTablesHeader(NumericColumn(_("HMIS code")),
DataTablesColumn(_("Data Point")),
NumericColumn(_("Total")))
return headers
@property
def rows(self):
row_data = LdHmisReport.get_report_data({
"location_id": self.request.GET.get("location_id", None),
"datespan": self.datespan,
"domain": str(self.domain),
"user": self.request.couch_user
})
for row in row_data:
yield [
self.table_cell(row[1].get("hmis_code")),
self.table_cell(row[1].get("label")),
self.table_cell(row[1].get("value"))
]
@property
def rendered_report_title(self):
return self.name
| 42.429245 | 107 | 0.545859 | from django.utils.translation import ugettext as _
from corehq.apps.locations.permissions import location_safe
from corehq.apps.reports.datatables import DataTablesHeader, DataTablesColumn, NumericColumn
from corehq.apps.reports.filters.select import MonthFilter, YearFilter
from corehq.apps.reports.standard import MonthYearMixin
from corehq.apps.reports.standard.cases.basic import CaseListReport
from custom.common.filters import RestrictedAsyncLocationFilter
from custom.m4change.reports import validate_report_parameters, get_location_hierarchy_by_id
from custom.m4change.reports.reports import M4ChangeReport
from custom.m4change.reports.sql_data import LdHmisCaseSqlData
def _get_row(row_data, form_data, key):
data = form_data.get(key)
rows = dict([(row_key, data.get(row_key, 0)) for row_key in row_data])
for key in rows:
if rows.get(key) == None:
rows[key] = 0
return rows
@location_safe
class LdHmisReport(MonthYearMixin, CaseListReport, M4ChangeReport):
ajax_pagination = False
asynchronous = True
exportable = True
emailable = False
name = "Facility L&D HMIS Report"
slug = "facility_ld_hmis_report"
default_rows = 25
base_template = "m4change/report.html"
report_template_path = "m4change/report_content.html"
fields = [
RestrictedAsyncLocationFilter,
MonthFilter,
YearFilter
]
@classmethod
def get_report_data(cls, config):
validate_report_parameters(["domain", "location_id", "datespan"], config)
domain = config["domain"]
location_id = config["location_id"]
user = config["user"]
sql_data = LdHmisCaseSqlData(domain=domain, datespan=config["datespan"]).data
locations = get_location_hierarchy_by_id(location_id, domain, user)
row_data = LdHmisReport.get_initial_row_data()
for location_id in locations:
key = (domain, location_id)
if key in sql_data:
report_rows = _get_row(row_data, sql_data, key)
for key in report_rows:
row_data.get(key)["value"] += report_rows.get(key)
return sorted([(key, row_data[key]) for key in row_data], key=lambda t: t[1].get("hmis_code"))
@classmethod
def get_initial_row_data(cls):
return {
"deliveries_total": {
"hmis_code": 19, "label": _("Deliveries - Total"), "value": 0
},
"deliveries_svd_total": {
"hmis_code": 20, "label": _("Deliveries - SVD"), "value": 0
},
"deliveries_assisted_total": {
"hmis_code": 21, "label": _("Deliveries - Assisted"), "value": 0
},
"deliveries_caesarean_section_total": {
"hmis_code": 22, "label": _("Deliveries caesarean section"), "value": 0
},
"deliveries_complications_total": {
"hmis_code": 23, "label": _("Deliveries - Complications"), "value": 0
},
'deliveries_preterm_total': {
"hmis_code": 24, "label": _("Deliveries - Preterm"), "value": 0
},
'deliveries_hiv_positive_women_total': {
"hmis_code": 25, "label": _("Deliveries - HIV positive women"), "value": 0
},
'live_birth_hiv_positive_women_total': {
"hmis_code": 26, "label": _("LiveBirth - HIV positive women"), "value": 0
},
'deliveries_hiv_positive_booked_women_total': {
"hmis_code": 27, "label": _("Deliveries - HIV positive booked women"), "value": 0
},
'deliveries_hiv_positive_unbooked_women_total': {
"hmis_code": 28, "label": _("Deliveries - HIV positive unbooked women"), "value": 0
},
'deliveries_monitored_using_partograph_total': {
"hmis_code": 29, "label": _("Deliveries - Monitored using Partograph"), "value": 0
},
'deliveries_skilled_birth_attendant_total': {
"hmis_code": 30, "label": _("Deliveries taken by skilled birth attendant"), "value": 0
},
'tt1_total': {
"hmis_code": 31, "label": _("TT1"), "value": 0
},
'tt2_total': {
"hmis_code": 32, "label": _("TT2"), "value": 0
},
'live_births_male_female_total': {
"hmis_code": 36, "label": _("Live Births(Male, Female, < 2.5kg, >= 2.5k g)"), "value": 0
},
'male_lt_2_5kg_total': {
"hmis_code": 36.1, "label": _("Male, < 2.5kg"), "value": 0
},
'male_gte_2_5kg_total': {
"hmis_code": 36.2, "label": _("Male, >= 2.5kg"), "value": 0
},
'female_lt_2_5kg_total': {
"hmis_code": 36.3, "label": _("Female, < 2.5kg"), "value": 0
},
'female_gte_2_5kg_total': {
"hmis_code": 36.4, "label": _("Female, >= 2.5kg"), "value": 0
},
'still_births_total': {
"hmis_code": 37, "label": _("Still Births total"), "value": 0
},
'fresh_still_births_total': {
"hmis_code": 38.1, "label": _("Fresh Still Births"), "value": 0
},
'other_still_births_total': {
"hmis_code": 38.2, "label": _("Other still Births"), "value": 0
},
'abortion_induced_total': {
"hmis_code": 39.1, "label": _("Abortion Induced"), "value": 0
},
'other_abortions_total': {
"hmis_code": 39.2, "label": _("Other Abortions"), "value": 0
},
'total_abortions_total': {
"hmis_code": 40, "label": _("Total Abortions"), "value": 0
},
'birth_asphyxia_total': {
"hmis_code": 41, "label": _("Birth Asphyxia - Total"), "value": 0
},
'birth_asphyxia_male_total': {
"hmis_code": 41.1, "label": _("Birth Asphyxia - Male"), "value": 0
},
'birth_asphyxia_female_total': {
"hmis_code": 41.2, "label": _("Birth Asphyxia - Female"), "value": 0
},
'neonatal_sepsis_total': {
"hmis_code": 42, "label": _("Neonatal Sepsis - Total"), "value": 0
},
'neonatal_sepsis_male_total': {
"hmis_code": 42.1, "label": _("Neonatal Sepsis - Male"), "value": 0
},
'neonatal_sepsis_female_total': {
"hmis_code": 42.2, "label": _("Neonatal Sepsis - Female"), "value": 0
},
'neonatal_tetanus_total': {
"hmis_code": 43, "label": _("Neonatal Tetanus - Total"), "value": 0
},
'neonatal_tetanus_male_total': {
"hmis_code": 43.1, "label": _("Neonatal Tetanus - Male"), "value": 0
},
'neonatal_tetanus_female_total': {
"hmis_code": 43.2, "label": _("Neonatal Tetanus - Female"), "value": 0
},
'neonatal_jaundice_total': {
"hmis_code": 44, "label": _("Neonatal Jaundice - Total"), "value": 0
},
'neonatal_jaundice_male_total': {
"hmis_code": 44.1, "label": _("Neonatal Jaundice - Male"), "value": 0
},
'neonatal_jaundice_female_total': {
"hmis_code": 44.2, "label": _("Neonatal Jaundice - Female"), "value": 0
},
'low_birth_weight_babies_in_kmc_total': {
"hmis_code": 45, "label": _("Low birth weight babies placed in KMC - Total"), "value": 0
},
'low_birth_weight_babies_in_kmc_male_total': {
"hmis_code": 45.1, "label": _("Low birth weight babies placed in KMC - Male"), "value": 0
},
'low_birth_weight_babies_in_kmc_female_total': {
"hmis_code": 45.2, "label": _("Low birth weight babies placed in KMC - Female"), "value": 0
}
}
@property
def headers(self):
headers = DataTablesHeader(NumericColumn(_("HMIS code")),
DataTablesColumn(_("Data Point")),
NumericColumn(_("Total")))
return headers
@property
def rows(self):
row_data = LdHmisReport.get_report_data({
"location_id": self.request.GET.get("location_id", None),
"datespan": self.datespan,
"domain": str(self.domain),
"user": self.request.couch_user
})
for row in row_data:
yield [
self.table_cell(row[1].get("hmis_code")),
self.table_cell(row[1].get("label")),
self.table_cell(row[1].get("value"))
]
@property
def rendered_report_title(self):
return self.name
| true | true |
f72fd7ec1ff8566fe5149edae2c9a1ef77dfb47b | 66 | py | Python | server.py | sigu1011/gameinn | 6c314fa5deefdc2780356900a4d6fa55317a18cd | [
"MIT"
] | null | null | null | server.py | sigu1011/gameinn | 6c314fa5deefdc2780356900a4d6fa55317a18cd | [
"MIT"
] | 1 | 2019-11-27T23:46:36.000Z | 2019-11-27T23:46:36.000Z | server.py | sigu1011/gameinn | 6c314fa5deefdc2780356900a4d6fa55317a18cd | [
"MIT"
] | null | null | null | from gameinn import app
if __name__ == '__main__':
app.run()
| 13.2 | 26 | 0.666667 | from gameinn import app
if __name__ == '__main__':
app.run()
| true | true |
f72fd8714765a1fe1b575242873790f455b95c4d | 3,480 | py | Python | text_features_extraction.py | maxgreat/dsve-loc | dd6807d02c0d5fd3e215be8e5c7a88e73102e561 | [
"BSD-3-Clause-Clear"
] | null | null | null | text_features_extraction.py | maxgreat/dsve-loc | dd6807d02c0d5fd3e215be8e5c7a88e73102e561 | [
"BSD-3-Clause-Clear"
] | null | null | null | text_features_extraction.py | maxgreat/dsve-loc | dd6807d02c0d5fd3e215be8e5c7a88e73102e561 | [
"BSD-3-Clause-Clear"
] | null | null | null | """
****************** COPYRIGHT AND CONFIDENTIALITY INFORMATION ******************
Copyright (c) 2018 [Thomson Licensing]
All Rights Reserved
This program contains proprietary information which is a trade secret/business \
secret of [Thomson Licensing] and is protected, even if unpublished, under \
applicable Copyright laws (including French droit d'auteur) and/or may be \
subject to one or more patent(s).
Recipient is to retain this program in confidence and is not permitted to use \
or make copies thereof other than as permitted in a written agreement with \
[Thomson Licensing] unless otherwise expressly allowed by applicable laws or \
by [Thomson Licensing] under express agreement.
Thomson Licensing is a company of the group TECHNICOLOR
*******************************************************************************
This scripts permits one to reproduce training and experiments of:
Engilberge, M., Chevallier, L., Pérez, P., & Cord, M. (2018, April).
Finding beans in burgers: Deep semantic-visual embedding with localization.
In Proceedings of CVPR (pp. 3984-3993)
Author: Martin Engilberge
"""
import argparse
import time
import numpy as np
import torch
from misc.dataset import TextDataset
from misc.model import joint_embedding
from misc.utils import save_obj, collate_fn_cap_padded
from torch.utils.data import DataLoader
device = torch.device("cuda")
# device = torch.device("cpu") # uncomment to run with cpu
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Extract embedding representation for images')
parser.add_argument("-p", '--path', dest="model_path", help='Path to the weights of the model to evaluate', required=True)
parser.add_argument("-d", '--data', dest="data_path", help='path to the file containing the sentence to embed')
parser.add_argument("-o", '--output', dest="output_path", help='path of the output file', default="./text_embedding")
parser.add_argument("-bs", "--batch_size", help="The size of the batches", type=int, default=1)
args = parser.parse_args()
print("Loading model from:", args.model_path)
checkpoint = torch.load(args.model_path, map_location=lambda storage, loc: storage)
join_emb = joint_embedding(checkpoint['args_dict'])
join_emb.load_state_dict(checkpoint["state_dict"])
for param in join_emb.parameters():
param.requires_grad = False
join_emb.to(device)
join_emb.eval()
dataset = TextDataset(args.data_path)
print("Dataset size: ", len(dataset))
dataset_loader = DataLoader(dataset, batch_size=args.batch_size, num_workers=3, pin_memory=True, collate_fn=collate_fn_cap_padded)
caps_enc = list()
print("### Starting sentence embedding ###")
end = time.time()
for i, (caps, length) in enumerate(dataset_loader, 0):
input_caps = caps.to(device)
with torch.no_grad():
_, output_emb = join_emb(None, input_caps, length)
caps_enc.append(output_emb.cpu().data.numpy())
if i % 100 == 99:
print(str((i + 1) * args.batch_size) + "/" + str(len(dataset)) + " captions encoded - Time per batch: " + str((time.time() - end)) + "s")
end = time.time()
print("Processing done -> saving")
caps_stack = np.vstack(caps_enc)
save_obj(caps_stack, args.output_path)
print("The data has been save to ", args.output_path)
| 39.101124 | 150 | 0.675287 |
import argparse
import time
import numpy as np
import torch
from misc.dataset import TextDataset
from misc.model import joint_embedding
from misc.utils import save_obj, collate_fn_cap_padded
from torch.utils.data import DataLoader
device = torch.device("cuda")
_':
parser = argparse.ArgumentParser(description='Extract embedding representation for images')
parser.add_argument("-p", '--path', dest="model_path", help='Path to the weights of the model to evaluate', required=True)
parser.add_argument("-d", '--data', dest="data_path", help='path to the file containing the sentence to embed')
parser.add_argument("-o", '--output', dest="output_path", help='path of the output file', default="./text_embedding")
parser.add_argument("-bs", "--batch_size", help="The size of the batches", type=int, default=1)
args = parser.parse_args()
print("Loading model from:", args.model_path)
checkpoint = torch.load(args.model_path, map_location=lambda storage, loc: storage)
join_emb = joint_embedding(checkpoint['args_dict'])
join_emb.load_state_dict(checkpoint["state_dict"])
for param in join_emb.parameters():
param.requires_grad = False
join_emb.to(device)
join_emb.eval()
dataset = TextDataset(args.data_path)
print("Dataset size: ", len(dataset))
dataset_loader = DataLoader(dataset, batch_size=args.batch_size, num_workers=3, pin_memory=True, collate_fn=collate_fn_cap_padded)
caps_enc = list()
print("### Starting sentence embedding ###")
end = time.time()
for i, (caps, length) in enumerate(dataset_loader, 0):
input_caps = caps.to(device)
with torch.no_grad():
_, output_emb = join_emb(None, input_caps, length)
caps_enc.append(output_emb.cpu().data.numpy())
if i % 100 == 99:
print(str((i + 1) * args.batch_size) + "/" + str(len(dataset)) + " captions encoded - Time per batch: " + str((time.time() - end)) + "s")
end = time.time()
print("Processing done -> saving")
caps_stack = np.vstack(caps_enc)
save_obj(caps_stack, args.output_path)
print("The data has been save to ", args.output_path)
| true | true |
f72fd9650f220263368abc650314f11467ad9ad0 | 117 | py | Python | FRCScouting/Contact/urls.py | xNovax/FRCScouting.ca | caf2774e5854a7386eceb21e57b68c1f9c1f7d2d | [
"MIT"
] | 1 | 2019-06-13T03:07:15.000Z | 2019-06-13T03:07:15.000Z | FRCScouting/Contact/urls.py | xNovax/FRCScouting.ca | caf2774e5854a7386eceb21e57b68c1f9c1f7d2d | [
"MIT"
] | 8 | 2019-07-04T16:19:06.000Z | 2019-07-12T17:37:51.000Z | FRCScouting/Contact/urls.py | xNovax/FRCScouting.ca | caf2774e5854a7386eceb21e57b68c1f9c1f7d2d | [
"MIT"
] | null | null | null | from django.urls import path
from . import views
urlpatterns = [
path('', views.contactus, name= 'contactus')
]
| 16.714286 | 48 | 0.692308 | from django.urls import path
from . import views
urlpatterns = [
path('', views.contactus, name= 'contactus')
]
| true | true |
f72fda32958488cb17ecc7633d36804837bdf534 | 7,499 | py | Python | flsim/utils/config_utils.py | karthikprasad/FLSim | 3c62fe83de2f06feffb9ed65ce9f71803bbd6027 | [
"Apache-2.0"
] | null | null | null | flsim/utils/config_utils.py | karthikprasad/FLSim | 3c62fe83de2f06feffb9ed65ce9f71803bbd6027 | [
"Apache-2.0"
] | null | null | null | flsim/utils/config_utils.py | karthikprasad/FLSim | 3c62fe83de2f06feffb9ed65ce9f71803bbd6027 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.
import argparse
import collections
import json
from typing import Any, Dict, List, Tuple, Type
from hydra.experimental import initialize, compose
from omegaconf import OmegaConf, DictConfig
def fullclassname(cls: Type[Any]) -> str:
"""
Returns the fully qualified class name of the input class.
"""
module = cls.__module__
name = cls.__qualname__
if module is not None and module != "__builtin__":
name = module + "." + name
return name
def _validate_cfg(component_class: Type[Any], cfg: Any):
"""
Validate that cfg doesn't have MISSING fields. This needs to be done only after
all defaults are set, typically in the base class.
We do this by making sure none of the parents have ``_set_defaults_in_cfg`` method.
"""
if not any(
hasattr(parent, "_set_defaults_in_cfg") for parent in component_class.__bases__
):
# looping over the config fields throws incase of missing field
for _ in cfg.items():
pass
def init_self_cfg(
component_obj: Any,
*,
component_class: Type,
config_class: Type,
**kwargs,
):
"""
Initialize FL component config by constructing OmegaConf object,
setting defaults, and validating config.
"""
cfg = (
config_class(**kwargs)
if not hasattr(component_obj, "cfg")
else component_obj.cfg
)
cfg = OmegaConf.create(cfg) # convert any structure to OmegaConf
component_class._set_defaults_in_cfg(cfg) # set default cfg params for this class
# convert any structure to OmegaConf again, after setting defaults
cfg = OmegaConf.create(cfg) # pyre-ignore [6]
_validate_cfg(component_class, cfg) # validate the config
component_obj.cfg = cfg
# trainer config utils for consuming hydra configs
def _flatten_dict(
d: collections.MutableMapping, parent_key="", sep="."
) -> Dict[str, str]:
"""
Changes json of style
```
{
"trainer" : {
"_base_": "base_sync_trainer",
"aggregator": {
"_base_": "base_fed_avg_with_lr_sync_aggregator",
"lr": 0.1
}
}
}
```
to
```
{
"trainer._base_": "base_sync_trainer",
"trainer.aggregator._base_": "base_fed_avg_with_lr_sync_aggregator",
"trainer.aggregator.lr": 0.1,
}
```
"""
items = []
for k, v in d.items():
new_key = parent_key + sep + k if parent_key else k
# if value is not a dict and is mutable, extend the items and flatten again.
# > hacky way of preserving dict values by checking if key has _dict as suffix.
if not new_key.endswith("_dict") and isinstance(v, collections.MutableMapping):
items.extend(_flatten_dict(v, new_key, sep=sep).items())
else:
# check if a number needs to be retained as a string
# the repalce with one dot is needed to handle floats
if type(v) is str and v.replace(".", "", 1).isdigit():
v = f'"{v}"' # enclose it with quotes if so.
items.append((new_key, v))
return dict(items)
def _handle_values_for_overrides_list(v: Any) -> Any:
"""
Handle the special massaging of some values of JSON need to for it to be supplied
to Hydra's overrides list.
"""
# python's None --> cmd line null for override list
v = "null" if v is None else v
# if value is a dict, convert it to string to work with override list.
# dump twice to escape quotes correctly.
v = json.dumps(json.dumps(v)) if type(v) is dict else v
# escape = char in value when present
v = v.replace(r"=", r"\=") if type(v) is str else v
return v
def _hydra_merge_order(dotlist_entry: str) -> Tuple:
"""
The override list needs to be ordered as the last one wins in case of
duplicates: https://hydra.cc/docs/advanced/defaults_list#composition-order
This function arranges the list so that _base_ is at the top, and we
proceed with overrides from top to bottom.
"""
key = dotlist_entry.split("=")[0]
# presence of "@" => it is a _base_ override
default_list_item_indicator = key.count("@") # 1 if true, 0 otherwise
# level in hierarchy; based on number of "."
hierarchy_level = key.count(".")
# multiply by -1 to keep the default list items on top
return (-1 * default_list_item_indicator, hierarchy_level, dotlist_entry)
def fl_json_to_dotlist(
json_config: Dict[str, Any], append_or_override: bool = True
) -> List[str]:
"""
Changes
```
{
"trainer._base_": "base_sync_trainer",
"trainer.aggregator._base_": "base_fed_avg_with_lr_sync_aggregator",
"trainer.aggregator.lr": 0.1,
}
```
to
```
[
"+trainer@trainer=base_sync_trainer",
"+aggregator@trainer.aggregator=base_fed_avg_with_lr_sync_aggregator",
"trainer.aggregator.lr=0.1",
]
```
The override list grammar for reference:
https://hydra.cc/docs/advanced/override_grammar/basic
"""
dotlist_dict = _flatten_dict(json_config)
dotlist_list = []
for k, v in dotlist_dict.items():
if k.endswith("._base_"):
# trainer.aggregator._base_ --> trainer.aggregator
k = k.replace("._base_", "")
# extract aggregator from trainer.aggregator
config_group = k.split(".")[-1]
# trainer.aggregator --> +aggregator@trainer.aggregator
k = f"+{config_group}@{k}"
# +aggregator@trainer.aggregator=base_fed_avg_with_lr_sync_aggregator
dotlist_list.append(f"{k}={v}")
else:
v = _handle_values_for_overrides_list(v)
prefix = "++" if append_or_override else ""
dotlist_list.append(f"{prefix}{k}={v}")
sorted_dotlist_list = sorted(dotlist_list, key=_hydra_merge_order)
return sorted_dotlist_list
def fl_config_from_json(
json_config: Dict[str, Any], append_or_override: bool = True
) -> DictConfig:
"""
Accepts the FLSim config in json format and constructs a Hydra config object.
"""
with initialize(config_path=None):
cfg = compose(
config_name=None,
overrides=fl_json_to_dotlist(json_config, append_or_override),
)
return cfg
def maybe_parse_json_config():
"""
Parse the command line args and build a config object if json config is supplied.
This comes in handy when we want to supply a json config file during to buck run.
This function will no longer be relevant once FLSim entirely moves to YAML configs.
"""
cfg = None
parser = argparse.ArgumentParser(description="Run training loop for FL example")
parser.add_argument("--config-file", type=str, default=None, help="JSON config")
args, _ = parser.parse_known_args()
# if JSON config is specified, build a DictConfig
if args.config_file is not None:
with open(args.config_file, "r") as config_file:
json_config = json.load(config_file)
cfg = fl_config_from_json(json_config["config"])
# else: assume yaml config, and let hydra handle config construction
return cfg
def is_target(config, cls):
return config._target_ == cls._target_
| 34.557604 | 87 | 0.648887 |
import argparse
import collections
import json
from typing import Any, Dict, List, Tuple, Type
from hydra.experimental import initialize, compose
from omegaconf import OmegaConf, DictConfig
def fullclassname(cls: Type[Any]) -> str:
module = cls.__module__
name = cls.__qualname__
if module is not None and module != "__builtin__":
name = module + "." + name
return name
def _validate_cfg(component_class: Type[Any], cfg: Any):
if not any(
hasattr(parent, "_set_defaults_in_cfg") for parent in component_class.__bases__
):
for _ in cfg.items():
pass
def init_self_cfg(
component_obj: Any,
*,
component_class: Type,
config_class: Type,
**kwargs,
):
cfg = (
config_class(**kwargs)
if not hasattr(component_obj, "cfg")
else component_obj.cfg
)
cfg = OmegaConf.create(cfg)
component_class._set_defaults_in_cfg(cfg)
cfg = OmegaConf.create(cfg)
_validate_cfg(component_class, cfg)
component_obj.cfg = cfg
def _flatten_dict(
d: collections.MutableMapping, parent_key="", sep="."
) -> Dict[str, str]:
items = []
for k, v in d.items():
new_key = parent_key + sep + k if parent_key else k
if not new_key.endswith("_dict") and isinstance(v, collections.MutableMapping):
items.extend(_flatten_dict(v, new_key, sep=sep).items())
else:
if type(v) is str and v.replace(".", "", 1).isdigit():
v = f'"{v}"'
items.append((new_key, v))
return dict(items)
def _handle_values_for_overrides_list(v: Any) -> Any:
v = "null" if v is None else v
# if value is a dict, convert it to string to work with override list.
# dump twice to escape quotes correctly.
v = json.dumps(json.dumps(v)) if type(v) is dict else v
# escape = char in value when present
v = v.replace(r"=", r"\=") if type(v) is str else v
return v
def _hydra_merge_order(dotlist_entry: str) -> Tuple:
key = dotlist_entry.split("=")[0]
# presence of "@" => it is a _base_ override
default_list_item_indicator = key.count("@") # 1 if true, 0 otherwise
# level in hierarchy; based on number of "."
hierarchy_level = key.count(".")
# multiply by -1 to keep the default list items on top
return (-1 * default_list_item_indicator, hierarchy_level, dotlist_entry)
def fl_json_to_dotlist(
json_config: Dict[str, Any], append_or_override: bool = True
) -> List[str]:
dotlist_dict = _flatten_dict(json_config)
dotlist_list = []
for k, v in dotlist_dict.items():
if k.endswith("._base_"):
# trainer.aggregator._base_ --> trainer.aggregator
k = k.replace("._base_", "")
# extract aggregator from trainer.aggregator
config_group = k.split(".")[-1]
# trainer.aggregator --> +aggregator@trainer.aggregator
k = f"+{config_group}@{k}"
# +aggregator@trainer.aggregator=base_fed_avg_with_lr_sync_aggregator
dotlist_list.append(f"{k}={v}")
else:
v = _handle_values_for_overrides_list(v)
prefix = "++" if append_or_override else ""
dotlist_list.append(f"{prefix}{k}={v}")
sorted_dotlist_list = sorted(dotlist_list, key=_hydra_merge_order)
return sorted_dotlist_list
def fl_config_from_json(
json_config: Dict[str, Any], append_or_override: bool = True
) -> DictConfig:
with initialize(config_path=None):
cfg = compose(
config_name=None,
overrides=fl_json_to_dotlist(json_config, append_or_override),
)
return cfg
def maybe_parse_json_config():
cfg = None
parser = argparse.ArgumentParser(description="Run training loop for FL example")
parser.add_argument("--config-file", type=str, default=None, help="JSON config")
args, _ = parser.parse_known_args()
# if JSON config is specified, build a DictConfig
if args.config_file is not None:
with open(args.config_file, "r") as config_file:
json_config = json.load(config_file)
cfg = fl_config_from_json(json_config["config"])
# else: assume yaml config, and let hydra handle config construction
return cfg
def is_target(config, cls):
return config._target_ == cls._target_
| true | true |
f72fda7d11cd1da25e984d8313329f9d5e6cc36b | 12,611 | py | Python | py3.1/multiprocess/queues.py | geofft/multiprocess | d998ffea9e82d17662b12b94a236182e7fde46d5 | [
"BSD-3-Clause"
] | 356 | 2015-06-21T21:05:10.000Z | 2022-03-30T11:57:08.000Z | py3.1/multiprocess/queues.py | geofft/multiprocess | d998ffea9e82d17662b12b94a236182e7fde46d5 | [
"BSD-3-Clause"
] | 103 | 2015-06-22T01:44:14.000Z | 2022-03-01T03:44:25.000Z | py3.1/multiprocess/queues.py | geofft/multiprocess | d998ffea9e82d17662b12b94a236182e7fde46d5 | [
"BSD-3-Clause"
] | 72 | 2015-09-02T14:10:24.000Z | 2022-03-25T06:49:43.000Z | #
# Module implementing queues
#
# multiprocessing/queues.py
#
# Copyright (c) 2006-2008, R Oudkerk
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of author nor the names of any contributors may be
# used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
#
__all__ = ['Queue', 'SimpleQueue', 'JoinableQueue']
import sys
import os
import threading
import collections
import time
import atexit
import weakref
from queue import Empty, Full
try:
import _multiprocess as _multiprocessing
except ImportError:
import _multiprocessing
from multiprocess import Pipe
from multiprocess.synchronize import Lock, BoundedSemaphore, Semaphore, Condition
from multiprocess.util import debug, info, Finalize, register_after_fork
from multiprocess.forking import assert_spawning
#
# Queue type using a pipe, buffer and thread
#
class Queue(object):
def __init__(self, maxsize=0):
if maxsize <= 0:
maxsize = _multiprocessing.SemLock.SEM_VALUE_MAX
self._maxsize = maxsize
self._reader, self._writer = Pipe(duplex=False)
self._rlock = Lock()
self._opid = os.getpid()
if sys.platform == 'win32':
self._wlock = None
else:
self._wlock = Lock()
self._sem = BoundedSemaphore(maxsize)
self._after_fork()
if sys.platform != 'win32':
register_after_fork(self, Queue._after_fork)
def __getstate__(self):
assert_spawning(self)
return (self._maxsize, self._reader, self._writer,
self._rlock, self._wlock, self._sem, self._opid)
def __setstate__(self, state):
(self._maxsize, self._reader, self._writer,
self._rlock, self._wlock, self._sem, self._opid) = state
self._after_fork()
def _after_fork(self):
debug('Queue._after_fork()')
self._notempty = threading.Condition(threading.Lock())
self._buffer = collections.deque()
self._thread = None
self._jointhread = None
self._joincancelled = False
self._closed = False
self._close = None
self._send = self._writer.send
self._recv = self._reader.recv
self._poll = self._reader.poll
def put(self, obj, block=True, timeout=None):
assert not self._closed
if not self._sem.acquire(block, timeout):
raise Full
self._notempty.acquire()
try:
if self._thread is None:
self._start_thread()
self._buffer.append(obj)
self._notempty.notify()
finally:
self._notempty.release()
def get(self, block=True, timeout=None):
if block and timeout is None:
self._rlock.acquire()
try:
res = self._recv()
self._sem.release()
return res
finally:
self._rlock.release()
else:
if block:
deadline = time.time() + timeout
if not self._rlock.acquire(block, timeout):
raise Empty
try:
if not self._poll(block and (deadline-time.time()) or 0.0):
raise Empty
res = self._recv()
self._sem.release()
return res
finally:
self._rlock.release()
def qsize(self):
# Raises NotImplementedError on Mac OSX because of broken sem_getvalue()
return self._maxsize - self._sem._semlock._get_value()
def empty(self):
return not self._poll()
def full(self):
return self._sem._semlock._is_zero()
def get_nowait(self):
return self.get(False)
def put_nowait(self, obj):
return self.put(obj, False)
def close(self):
self._closed = True
self._reader.close()
if self._close:
self._close()
def join_thread(self):
debug('Queue.join_thread()')
assert self._closed
if self._jointhread:
self._jointhread()
def cancel_join_thread(self):
debug('Queue.cancel_join_thread()')
self._joincancelled = True
try:
self._jointhread.cancel()
except AttributeError:
pass
def _start_thread(self):
debug('Queue._start_thread()')
# Start thread which transfers data from buffer to pipe
self._buffer.clear()
self._thread = threading.Thread(
target=Queue._feed,
args=(self._buffer, self._notempty, self._send,
self._wlock, self._writer.close),
name='QueueFeederThread'
)
self._thread.daemon = True
debug('doing self._thread.start()')
self._thread.start()
debug('... done self._thread.start()')
# On process exit we will wait for data to be flushed to pipe.
#
# However, if this process created the queue then all
# processes which use the queue will be descendants of this
# process. Therefore waiting for the queue to be flushed
# is pointless once all the child processes have been joined.
created_by_this_process = (self._opid == os.getpid())
if not self._joincancelled and not created_by_this_process:
self._jointhread = Finalize(
self._thread, Queue._finalize_join,
[weakref.ref(self._thread)],
exitpriority=-5
)
# Send sentinel to the thread queue object when garbage collected
self._close = Finalize(
self, Queue._finalize_close,
[self._buffer, self._notempty],
exitpriority=10
)
@staticmethod
def _finalize_join(twr):
debug('joining queue thread')
thread = twr()
if thread is not None:
thread.join()
debug('... queue thread joined')
else:
debug('... queue thread already dead')
@staticmethod
def _finalize_close(buffer, notempty):
debug('telling queue thread to quit')
notempty.acquire()
try:
buffer.append(_sentinel)
notempty.notify()
finally:
notempty.release()
@staticmethod
def _feed(buffer, notempty, send, writelock, close):
debug('starting thread to feed data to pipe')
from .util import is_exiting
nacquire = notempty.acquire
nrelease = notempty.release
nwait = notempty.wait
bpopleft = buffer.popleft
sentinel = _sentinel
if sys.platform != 'win32':
wacquire = writelock.acquire
wrelease = writelock.release
else:
wacquire = None
try:
while 1:
nacquire()
try:
if not buffer:
nwait()
finally:
nrelease()
try:
while 1:
obj = bpopleft()
if obj is sentinel:
debug('feeder thread got sentinel -- exiting')
close()
return
if wacquire is None:
send(obj)
else:
wacquire()
try:
send(obj)
finally:
wrelease()
except IndexError:
pass
except Exception as e:
# Since this runs in a daemon thread the resources it uses
# may be become unusable while the process is cleaning up.
# We ignore errors which happen after the process has
# started to cleanup.
try:
if is_exiting():
info('error in queue thread: %s', e)
else:
import traceback
traceback.print_exc()
except Exception:
pass
_sentinel = object()
#
# A queue type which also supports join() and task_done() methods
#
# Note that if you do not call task_done() for each finished task then
# eventually the counter's semaphore may overflow causing Bad Things
# to happen.
#
class JoinableQueue(Queue):
def __init__(self, maxsize=0):
Queue.__init__(self, maxsize)
self._unfinished_tasks = Semaphore(0)
self._cond = Condition()
def __getstate__(self):
return Queue.__getstate__(self) + (self._cond, self._unfinished_tasks)
def __setstate__(self, state):
Queue.__setstate__(self, state[:-2])
self._cond, self._unfinished_tasks = state[-2:]
def put(self, obj, block=True, timeout=None):
assert not self._closed
if not self._sem.acquire(block, timeout):
raise Full
self._notempty.acquire()
self._cond.acquire()
try:
if self._thread is None:
self._start_thread()
self._buffer.append(obj)
self._unfinished_tasks.release()
self._notempty.notify()
finally:
self._cond.release()
self._notempty.release()
def task_done(self):
self._cond.acquire()
try:
if not self._unfinished_tasks.acquire(False):
raise ValueError('task_done() called too many times')
if self._unfinished_tasks._semlock._is_zero():
self._cond.notify_all()
finally:
self._cond.release()
def join(self):
self._cond.acquire()
try:
if not self._unfinished_tasks._semlock._is_zero():
self._cond.wait()
finally:
self._cond.release()
#
# Simplified Queue type -- really just a locked pipe
#
class SimpleQueue(object):
def __init__(self):
self._reader, self._writer = Pipe(duplex=False)
self._rlock = Lock()
if sys.platform == 'win32':
self._wlock = None
else:
self._wlock = Lock()
self._make_methods()
def empty(self):
return not self._reader.poll()
def __getstate__(self):
assert_spawning(self)
return (self._reader, self._writer, self._rlock, self._wlock)
def __setstate__(self, state):
(self._reader, self._writer, self._rlock, self._wlock) = state
self._make_methods()
def _make_methods(self):
recv = self._reader.recv
racquire, rrelease = self._rlock.acquire, self._rlock.release
def get():
racquire()
try:
return recv()
finally:
rrelease()
self.get = get
if self._wlock is None:
# writes to a message oriented win32 pipe are atomic
self.put = self._writer.send
else:
send = self._writer.send
wacquire, wrelease = self._wlock.acquire, self._wlock.release
def put(obj):
wacquire()
try:
return send(obj)
finally:
wrelease()
self.put = put
| 31.606516 | 81 | 0.581714 |
__all__ = ['Queue', 'SimpleQueue', 'JoinableQueue']
import sys
import os
import threading
import collections
import time
import atexit
import weakref
from queue import Empty, Full
try:
import _multiprocess as _multiprocessing
except ImportError:
import _multiprocessing
from multiprocess import Pipe
from multiprocess.synchronize import Lock, BoundedSemaphore, Semaphore, Condition
from multiprocess.util import debug, info, Finalize, register_after_fork
from multiprocess.forking import assert_spawning
class Queue(object):
def __init__(self, maxsize=0):
if maxsize <= 0:
maxsize = _multiprocessing.SemLock.SEM_VALUE_MAX
self._maxsize = maxsize
self._reader, self._writer = Pipe(duplex=False)
self._rlock = Lock()
self._opid = os.getpid()
if sys.platform == 'win32':
self._wlock = None
else:
self._wlock = Lock()
self._sem = BoundedSemaphore(maxsize)
self._after_fork()
if sys.platform != 'win32':
register_after_fork(self, Queue._after_fork)
def __getstate__(self):
assert_spawning(self)
return (self._maxsize, self._reader, self._writer,
self._rlock, self._wlock, self._sem, self._opid)
def __setstate__(self, state):
(self._maxsize, self._reader, self._writer,
self._rlock, self._wlock, self._sem, self._opid) = state
self._after_fork()
def _after_fork(self):
debug('Queue._after_fork()')
self._notempty = threading.Condition(threading.Lock())
self._buffer = collections.deque()
self._thread = None
self._jointhread = None
self._joincancelled = False
self._closed = False
self._close = None
self._send = self._writer.send
self._recv = self._reader.recv
self._poll = self._reader.poll
def put(self, obj, block=True, timeout=None):
assert not self._closed
if not self._sem.acquire(block, timeout):
raise Full
self._notempty.acquire()
try:
if self._thread is None:
self._start_thread()
self._buffer.append(obj)
self._notempty.notify()
finally:
self._notempty.release()
def get(self, block=True, timeout=None):
if block and timeout is None:
self._rlock.acquire()
try:
res = self._recv()
self._sem.release()
return res
finally:
self._rlock.release()
else:
if block:
deadline = time.time() + timeout
if not self._rlock.acquire(block, timeout):
raise Empty
try:
if not self._poll(block and (deadline-time.time()) or 0.0):
raise Empty
res = self._recv()
self._sem.release()
return res
finally:
self._rlock.release()
def qsize(self):
return self._maxsize - self._sem._semlock._get_value()
def empty(self):
return not self._poll()
def full(self):
return self._sem._semlock._is_zero()
def get_nowait(self):
return self.get(False)
def put_nowait(self, obj):
return self.put(obj, False)
def close(self):
self._closed = True
self._reader.close()
if self._close:
self._close()
def join_thread(self):
debug('Queue.join_thread()')
assert self._closed
if self._jointhread:
self._jointhread()
def cancel_join_thread(self):
debug('Queue.cancel_join_thread()')
self._joincancelled = True
try:
self._jointhread.cancel()
except AttributeError:
pass
def _start_thread(self):
debug('Queue._start_thread()')
self._buffer.clear()
self._thread = threading.Thread(
target=Queue._feed,
args=(self._buffer, self._notempty, self._send,
self._wlock, self._writer.close),
name='QueueFeederThread'
)
self._thread.daemon = True
debug('doing self._thread.start()')
self._thread.start()
debug('... done self._thread.start()')
created_by_this_process = (self._opid == os.getpid())
if not self._joincancelled and not created_by_this_process:
self._jointhread = Finalize(
self._thread, Queue._finalize_join,
[weakref.ref(self._thread)],
exitpriority=-5
)
self._close = Finalize(
self, Queue._finalize_close,
[self._buffer, self._notempty],
exitpriority=10
)
@staticmethod
def _finalize_join(twr):
debug('joining queue thread')
thread = twr()
if thread is not None:
thread.join()
debug('... queue thread joined')
else:
debug('... queue thread already dead')
@staticmethod
def _finalize_close(buffer, notempty):
debug('telling queue thread to quit')
notempty.acquire()
try:
buffer.append(_sentinel)
notempty.notify()
finally:
notempty.release()
@staticmethod
def _feed(buffer, notempty, send, writelock, close):
debug('starting thread to feed data to pipe')
from .util import is_exiting
nacquire = notempty.acquire
nrelease = notempty.release
nwait = notempty.wait
bpopleft = buffer.popleft
sentinel = _sentinel
if sys.platform != 'win32':
wacquire = writelock.acquire
wrelease = writelock.release
else:
wacquire = None
try:
while 1:
nacquire()
try:
if not buffer:
nwait()
finally:
nrelease()
try:
while 1:
obj = bpopleft()
if obj is sentinel:
debug('feeder thread got sentinel -- exiting')
close()
return
if wacquire is None:
send(obj)
else:
wacquire()
try:
send(obj)
finally:
wrelease()
except IndexError:
pass
except Exception as e:
try:
if is_exiting():
info('error in queue thread: %s', e)
else:
import traceback
traceback.print_exc()
except Exception:
pass
_sentinel = object()
# to happen.
#
class JoinableQueue(Queue):
def __init__(self, maxsize=0):
Queue.__init__(self, maxsize)
self._unfinished_tasks = Semaphore(0)
self._cond = Condition()
def __getstate__(self):
return Queue.__getstate__(self) + (self._cond, self._unfinished_tasks)
def __setstate__(self, state):
Queue.__setstate__(self, state[:-2])
self._cond, self._unfinished_tasks = state[-2:]
def put(self, obj, block=True, timeout=None):
assert not self._closed
if not self._sem.acquire(block, timeout):
raise Full
self._notempty.acquire()
self._cond.acquire()
try:
if self._thread is None:
self._start_thread()
self._buffer.append(obj)
self._unfinished_tasks.release()
self._notempty.notify()
finally:
self._cond.release()
self._notempty.release()
def task_done(self):
self._cond.acquire()
try:
if not self._unfinished_tasks.acquire(False):
raise ValueError('task_done() called too many times')
if self._unfinished_tasks._semlock._is_zero():
self._cond.notify_all()
finally:
self._cond.release()
def join(self):
self._cond.acquire()
try:
if not self._unfinished_tasks._semlock._is_zero():
self._cond.wait()
finally:
self._cond.release()
#
# Simplified Queue type -- really just a locked pipe
#
class SimpleQueue(object):
def __init__(self):
self._reader, self._writer = Pipe(duplex=False)
self._rlock = Lock()
if sys.platform == 'win32':
self._wlock = None
else:
self._wlock = Lock()
self._make_methods()
def empty(self):
return not self._reader.poll()
def __getstate__(self):
assert_spawning(self)
return (self._reader, self._writer, self._rlock, self._wlock)
def __setstate__(self, state):
(self._reader, self._writer, self._rlock, self._wlock) = state
self._make_methods()
def _make_methods(self):
recv = self._reader.recv
racquire, rrelease = self._rlock.acquire, self._rlock.release
def get():
racquire()
try:
return recv()
finally:
rrelease()
self.get = get
if self._wlock is None:
# writes to a message oriented win32 pipe are atomic
self.put = self._writer.send
else:
send = self._writer.send
wacquire, wrelease = self._wlock.acquire, self._wlock.release
def put(obj):
wacquire()
try:
return send(obj)
finally:
wrelease()
self.put = put
| true | true |
f72fdb09c91a65da5dcb94cfe00e07d00f7cf5cf | 3,669 | py | Python | python/oneflow/test/modules/test_chunk.py | triple-Mu/oneflow | 395da40885016d0b899f8a1eb87e5311a556a9b8 | [
"Apache-2.0"
] | 1 | 2022-03-14T11:17:56.000Z | 2022-03-14T11:17:56.000Z | python/oneflow/test/modules/test_chunk.py | triple-Mu/oneflow | 395da40885016d0b899f8a1eb87e5311a556a9b8 | [
"Apache-2.0"
] | null | null | null | python/oneflow/test/modules/test_chunk.py | triple-Mu/oneflow | 395da40885016d0b899f8a1eb87e5311a556a9b8 | [
"Apache-2.0"
] | null | null | null | """
Copyright 2020 The OneFlow Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
from collections import OrderedDict
from random import shuffle
import numpy as np
from random import shuffle
import oneflow as flow
import oneflow.unittest
from oneflow.test_utils.automated_test_util import *
@flow.unittest.skip_unless_1n1d()
class TestChunk(flow.unittest.TestCase):
@autotest(n=5, check_graph=True)
def test_flow_chunk_list_with_random_data(test_case):
device = random_device()
dim = random(1, 4).to(int)
x = random_tensor(
ndim=4,
dim1=random(low=4, high=8).to(int),
dim2=random(low=4, high=8).to(int),
dim3=random(low=4, high=8).to(int),
).to(device)
y = torch.chunk(x, chunks=random(low=1, high=5).to(int), dim=dim)
z = torch.cat(y, dim=dim)
return z
@autotest(n=10)
def test_flow_chunk_list_with_random_data(test_case):
device = random_device()
dim = random(1, 4).to(int)
x = random_tensor(
ndim=4,
dim1=random(low=4, high=8).to(int),
dim2=random(low=4, high=8).to(int),
dim3=random(low=4, high=8).to(int),
).to(device)
permute_list = [0, 1, 2, 3]
shuffle(permute_list)
y = x.permute(permute_list)
z = torch.chunk(y, chunks=random(low=1, high=5).to(int), dim=dim)
return torch.cat(z, dim=dim)
@autotest(n=5, auto_backward=False, check_graph=True)
def test_flow_chunk_list_with_stride(test_case):
device = random_device()
dim = random(1, 4).to(int)
x = random_tensor(
ndim=4,
dim1=random(low=4, high=8).to(int),
dim2=random(low=4, high=8).to(int),
dim3=random(low=4, high=8).to(int),
).to(device)
perm = [0, 1, 2, 3]
shuffle(perm)
y = x.permute(perm)
z = torch.chunk(y, chunks=random(low=1, high=5).to(int), dim=dim)
return torch.cat(z, dim=dim)
@autotest(n=5, auto_backward=False, check_graph=True)
def test_flow_chunk_list_bool_with_random_data(test_case):
device = random_device()
dim = random(1, 4).to(int)
x = random_tensor(
ndim=4,
dim1=random(low=4, high=8).to(int),
dim2=random(low=4, high=8).to(int),
dim3=random(low=4, high=8).to(int),
).to(device, torch.bool)
y = torch.chunk(x, chunks=random(low=1, high=5).to(int), dim=dim)
z = torch.cat(y, dim=dim)
return z
@autotest(n=5, check_graph=True)
def test_flow_chunk_list_with_random_data_negative_dim(test_case):
device = random_device()
dim = random(1, 3).to(int)
x = random_tensor(
ndim=4,
dim0=random(low=4, high=8).to(int),
dim1=random(low=4, high=8).to(int),
dim2=random(low=4, high=8).to(int),
dim3=random(low=4, high=8).to(int),
).to(device)
y = torch.chunk(x, chunks=4, dim=-1)
z = torch.cat(y, dim=-1)
return z
if __name__ == "__main__":
unittest.main()
| 33.354545 | 73 | 0.613791 |
import unittest
from collections import OrderedDict
from random import shuffle
import numpy as np
from random import shuffle
import oneflow as flow
import oneflow.unittest
from oneflow.test_utils.automated_test_util import *
@flow.unittest.skip_unless_1n1d()
class TestChunk(flow.unittest.TestCase):
@autotest(n=5, check_graph=True)
def test_flow_chunk_list_with_random_data(test_case):
device = random_device()
dim = random(1, 4).to(int)
x = random_tensor(
ndim=4,
dim1=random(low=4, high=8).to(int),
dim2=random(low=4, high=8).to(int),
dim3=random(low=4, high=8).to(int),
).to(device)
y = torch.chunk(x, chunks=random(low=1, high=5).to(int), dim=dim)
z = torch.cat(y, dim=dim)
return z
@autotest(n=10)
def test_flow_chunk_list_with_random_data(test_case):
device = random_device()
dim = random(1, 4).to(int)
x = random_tensor(
ndim=4,
dim1=random(low=4, high=8).to(int),
dim2=random(low=4, high=8).to(int),
dim3=random(low=4, high=8).to(int),
).to(device)
permute_list = [0, 1, 2, 3]
shuffle(permute_list)
y = x.permute(permute_list)
z = torch.chunk(y, chunks=random(low=1, high=5).to(int), dim=dim)
return torch.cat(z, dim=dim)
@autotest(n=5, auto_backward=False, check_graph=True)
def test_flow_chunk_list_with_stride(test_case):
device = random_device()
dim = random(1, 4).to(int)
x = random_tensor(
ndim=4,
dim1=random(low=4, high=8).to(int),
dim2=random(low=4, high=8).to(int),
dim3=random(low=4, high=8).to(int),
).to(device)
perm = [0, 1, 2, 3]
shuffle(perm)
y = x.permute(perm)
z = torch.chunk(y, chunks=random(low=1, high=5).to(int), dim=dim)
return torch.cat(z, dim=dim)
@autotest(n=5, auto_backward=False, check_graph=True)
def test_flow_chunk_list_bool_with_random_data(test_case):
device = random_device()
dim = random(1, 4).to(int)
x = random_tensor(
ndim=4,
dim1=random(low=4, high=8).to(int),
dim2=random(low=4, high=8).to(int),
dim3=random(low=4, high=8).to(int),
).to(device, torch.bool)
y = torch.chunk(x, chunks=random(low=1, high=5).to(int), dim=dim)
z = torch.cat(y, dim=dim)
return z
@autotest(n=5, check_graph=True)
def test_flow_chunk_list_with_random_data_negative_dim(test_case):
device = random_device()
dim = random(1, 3).to(int)
x = random_tensor(
ndim=4,
dim0=random(low=4, high=8).to(int),
dim1=random(low=4, high=8).to(int),
dim2=random(low=4, high=8).to(int),
dim3=random(low=4, high=8).to(int),
).to(device)
y = torch.chunk(x, chunks=4, dim=-1)
z = torch.cat(y, dim=-1)
return z
if __name__ == "__main__":
unittest.main()
| true | true |
f72fdba810e4acacfce8c3f39354b4ef1f6e88b2 | 2,774 | py | Python | src/application/analysis/english_analysis.py | jagoPG/-restaurant-ml-inspector | 4efc7855401cc8cfa9d5e470c14685158a607448 | [
"Apache-2.0"
] | 1 | 2018-07-10T12:53:35.000Z | 2018-07-10T12:53:35.000Z | src/application/analysis/english_analysis.py | jagoPG/-restaurant-ml-inspector | 4efc7855401cc8cfa9d5e470c14685158a607448 | [
"Apache-2.0"
] | null | null | null | src/application/analysis/english_analysis.py | jagoPG/-restaurant-ml-inspector | 4efc7855401cc8cfa9d5e470c14685158a607448 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env
# -*- coding: utf-8 -*-
"""
Copyright 2017-2018 Jagoba Pérez-Gómez
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import logging
from textblob import TextBlob
from src.application.analysis.evaluated_word import EvaluatedWord
class EnglishAnalysis(object):
"""
Receives an array of reviews and analyses them. The results are stored in
an array of words that matches with the keyword repository list. A global
score of all reviews is stored in the $karma$ variable.
"""
def __init__(self, keyword_repository, reviews):
self._keyword_repository = keyword_repository
self._reviews = reviews
self._words = {}
self._karma = 0
def analyse(self):
"""
Analyses the reviews that have been set
"""
for review in self._reviews:
self.__process_english_review(review)
def get_results(self):
"""
:return: Gets the words analysis result
:returns: EvaluatedWord
"""
return self._words
def get_karma(self):
"""
:return: Gets the global score of all reviews
:returns: float
"""
return self._karma
def __process_english_review(self, review):
body = TextBlob(review.review_body)
for sentence in body.sentences:
logging.debug('Polarity: %s' % sentence.sentiment.polarity)
self._karma = (sentence.sentiment.polarity + self._karma) / 2
for sentence in body.sentences:
for smaller_word in sentence.split(' '):
logging.debug('Word: %s' % smaller_word)
self.__process_word(smaller_word, sentence.sentiment.polarity, review.reference)
def __process_word(self, word, karma, review_id):
word = word.lower()
if not self.__is_keyword(word):
return
if word in self._words:
word = self._words[word]
word.add_karma(karma)
if review_id not in word.appearances:
word.add_appearance(review_id)
else:
word = EvaluatedWord(word, karma, [review_id])
self._words[word.word] = word
def __is_keyword(self, word):
return self._keyword_repository.get_of_name(word, 'en') is not None
| 32.255814 | 96 | 0.655732 |
import logging
from textblob import TextBlob
from src.application.analysis.evaluated_word import EvaluatedWord
class EnglishAnalysis(object):
def __init__(self, keyword_repository, reviews):
self._keyword_repository = keyword_repository
self._reviews = reviews
self._words = {}
self._karma = 0
def analyse(self):
for review in self._reviews:
self.__process_english_review(review)
def get_results(self):
return self._words
def get_karma(self):
return self._karma
def __process_english_review(self, review):
body = TextBlob(review.review_body)
for sentence in body.sentences:
logging.debug('Polarity: %s' % sentence.sentiment.polarity)
self._karma = (sentence.sentiment.polarity + self._karma) / 2
for sentence in body.sentences:
for smaller_word in sentence.split(' '):
logging.debug('Word: %s' % smaller_word)
self.__process_word(smaller_word, sentence.sentiment.polarity, review.reference)
def __process_word(self, word, karma, review_id):
word = word.lower()
if not self.__is_keyword(word):
return
if word in self._words:
word = self._words[word]
word.add_karma(karma)
if review_id not in word.appearances:
word.add_appearance(review_id)
else:
word = EvaluatedWord(word, karma, [review_id])
self._words[word.word] = word
def __is_keyword(self, word):
return self._keyword_repository.get_of_name(word, 'en') is not None
| true | true |
f72fdc1d6884bbc99ff86fadd0864d05af6b34ab | 2,186 | py | Python | method_of_moments/continuous/_loc_scale.py | AlbertFarkhutdinov/method_of_moments | 0a69c63197d7f88a3b57356620b4d84e76543177 | [
"MIT"
] | null | null | null | method_of_moments/continuous/_loc_scale.py | AlbertFarkhutdinov/method_of_moments | 0a69c63197d7f88a3b57356620b4d84e76543177 | [
"MIT"
] | null | null | null | method_of_moments/continuous/_loc_scale.py | AlbertFarkhutdinov/method_of_moments | 0a69c63197d7f88a3b57356620b4d84e76543177 | [
"MIT"
] | null | null | null | """
This module contains description of class for probability distributions
from location-scale family.
"""
from method_of_moments.continuous._base_continuous import BaseContinuous
class LocScale(BaseContinuous):
"""
Class for probability distributions from location-scale family.
Parameters
----------
loc : float, optional, default: 0.0
Location parameter of a probability distribution.
scale : float, optional, default: 1.0
Scale parameter of a probability distribution.
**kwargs : `base.BaseDistribution` properties.
Methods
-------
get_standard_mean(mean)
Return mean value for standard distribution in location-scale family.
get_standard_variance(variance)
Return variance for standard distribution in location-scale family.
Raises
------
ValueError
If `scale` is non-positive number.
"""
def __init__(self, loc: float = 0.0, scale: float = 1.0, **kwargs) -> None:
"""Initialize self. See help(type(self)) for accurate signature."""
super().__init__(**kwargs)
self.loc = loc
self.scale = scale
@property
def loc(self) -> float:
"""Return location parameter of a probability distribution."""
return self.__loc
@loc.setter
def loc(self, loc: float = 0.0) -> None:
"""Property setter for `self.loc`."""
self.__loc = loc
@property
def scale(self) -> float:
"""Return scale parameter of a probability distribution."""
return self.__scale
@scale.setter
def scale(self, scale: float = 1.0) -> None:
"""Property setter for `self.scale`."""
if scale <= 0:
raise ValueError('`scale` value must be positive.')
self.__scale = scale
def get_standard_mean(self, mean: float):
"""
Return mean value for standard distribution in location-scale family.
"""
return (mean - self.loc) / self.scale
def get_standard_variance(self, variance: float):
"""
Return variance for standard distribution in location-scale family.
"""
return variance / self.scale ** 2
| 28.025641 | 79 | 0.63312 |
from method_of_moments.continuous._base_continuous import BaseContinuous
class LocScale(BaseContinuous):
def __init__(self, loc: float = 0.0, scale: float = 1.0, **kwargs) -> None:
super().__init__(**kwargs)
self.loc = loc
self.scale = scale
@property
def loc(self) -> float:
return self.__loc
@loc.setter
def loc(self, loc: float = 0.0) -> None:
self.__loc = loc
@property
def scale(self) -> float:
return self.__scale
@scale.setter
def scale(self, scale: float = 1.0) -> None:
if scale <= 0:
raise ValueError('`scale` value must be positive.')
self.__scale = scale
def get_standard_mean(self, mean: float):
return (mean - self.loc) / self.scale
def get_standard_variance(self, variance: float):
return variance / self.scale ** 2
| true | true |
f72fdcd3421f334ce1bfe3c860ab1e55aab23f82 | 1,226 | py | Python | test/test_jcampdx.py | MIRCen/brukerapi-python | 5455800895924c69bf839fa621fa7a06d343b4ff | [
"MIT"
] | 7 | 2020-06-30T16:09:20.000Z | 2022-03-09T13:27:55.000Z | test/test_jcampdx.py | MIRCen/brukerapi-python | 5455800895924c69bf839fa621fa7a06d343b4ff | [
"MIT"
] | 2 | 2020-09-06T19:29:36.000Z | 2021-03-15T08:03:46.000Z | test/test_jcampdx.py | MIRCen/brukerapi-python | 5455800895924c69bf839fa621fa7a06d343b4ff | [
"MIT"
] | 1 | 2022-01-20T09:43:45.000Z | 2022-01-20T09:43:45.000Z | from brukerapi.jcampdx import JCAMPDX
import numpy as np
from pathlib import Path
import pytest
@pytest.mark.skip(reason="in progress")
def test_jcampdx(test_jcampdx_data):
j = JCAMPDX(Path(test_jcampdx_data[1]) / test_jcampdx_data[0]['path'])
for key, ref in test_jcampdx_data[0]['parameters'].items():
parameter_test = j.get_parameter(key)
size_test= parameter_test.size
value_test= parameter_test.value
type_test = value_test.__class__
value_ref = ref['value']
size_ref = ref['size']
type_ref = ref['type']
#test SIZE
if size_ref == 'None':
size_ref = None
if isinstance(size_ref, list):
size_ref = tuple(size_ref)
elif isinstance(size_ref, int):
size_ref = (size_ref,)
assert size_ref == size_test
#test TYPE
assert type_ref == type_test.__name__
#test VALUE
if isinstance(value_test, np.ndarray):
value_ref = np.array(value_ref)
assert np.array_equal(value_ref, value_test)
elif isinstance(value_test, list):
assert value_test == value_ref
else:
assert value_ref == value_test
| 29.902439 | 74 | 0.626427 | from brukerapi.jcampdx import JCAMPDX
import numpy as np
from pathlib import Path
import pytest
@pytest.mark.skip(reason="in progress")
def test_jcampdx(test_jcampdx_data):
j = JCAMPDX(Path(test_jcampdx_data[1]) / test_jcampdx_data[0]['path'])
for key, ref in test_jcampdx_data[0]['parameters'].items():
parameter_test = j.get_parameter(key)
size_test= parameter_test.size
value_test= parameter_test.value
type_test = value_test.__class__
value_ref = ref['value']
size_ref = ref['size']
type_ref = ref['type']
if size_ref == 'None':
size_ref = None
if isinstance(size_ref, list):
size_ref = tuple(size_ref)
elif isinstance(size_ref, int):
size_ref = (size_ref,)
assert size_ref == size_test
assert type_ref == type_test.__name__
if isinstance(value_test, np.ndarray):
value_ref = np.array(value_ref)
assert np.array_equal(value_ref, value_test)
elif isinstance(value_test, list):
assert value_test == value_ref
else:
assert value_ref == value_test
| true | true |
f72fdd762dd6a686c705479e1165f5735db40a61 | 1,055 | py | Python | src/lib/telegram/parsemode.py | thonkify/thonkify | 2cb4493d796746cb46c8519a100ef3ef128a761a | [
"MIT"
] | 17 | 2017-08-04T15:41:05.000Z | 2020-10-16T18:02:41.000Z | src/lib/telegram/parsemode.py | thonkify/thonkify | 2cb4493d796746cb46c8519a100ef3ef128a761a | [
"MIT"
] | 3 | 2017-08-04T23:37:37.000Z | 2017-08-04T23:38:34.000Z | src/lib/telegram/parsemode.py | thonkify/thonkify | 2cb4493d796746cb46c8519a100ef3ef128a761a | [
"MIT"
] | 3 | 2017-12-07T16:30:59.000Z | 2019-06-16T02:48:28.000Z | #!/usr/bin/env python
# pylint: disable=R0903
#
# A library that provides a Python interface to the Telegram Bot API
# Copyright (C) 2015-2017
# Leandro Toledo de Souza <devs@python-telegram-bot.org>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser Public License for more details.
#
# You should have received a copy of the GNU Lesser Public License
# along with this program. If not, see [http://www.gnu.org/licenses/].
"""This module contains an object that represents a Telegram
Message Parse Modes."""
class ParseMode(object):
"""This object represents a Telegram Message Parse Modes."""
MARKDOWN = 'Markdown'
HTML = 'HTML'
| 36.37931 | 71 | 0.747867 |
class ParseMode(object):
MARKDOWN = 'Markdown'
HTML = 'HTML'
| true | true |
f72fdd985d4e4c0bdc2d66e73fde136c53658738 | 3,108 | py | Python | openaerostruct/structures/section_properties_tube.py | lkelvinm/OpenAeroStruct | 395075d28783c1b99b4ab25ddf034000caf9cd0d | [
"Apache-2.0"
] | null | null | null | openaerostruct/structures/section_properties_tube.py | lkelvinm/OpenAeroStruct | 395075d28783c1b99b4ab25ddf034000caf9cd0d | [
"Apache-2.0"
] | null | null | null | openaerostruct/structures/section_properties_tube.py | lkelvinm/OpenAeroStruct | 395075d28783c1b99b4ab25ddf034000caf9cd0d | [
"Apache-2.0"
] | null | null | null | from __future__ import division, print_function
import numpy as np
from openmdao.api import ExplicitComponent
class SectionPropertiesTube(ExplicitComponent):
"""
Compute geometric properties for a tube element.
The thicknesses are added to the interior of the element, so the
'radius' value is the outer radius of the tube.
parameters
----------
radius : numpy array
Outer radii for each FEM element.
thickness : numpy array
Tube thickness for each FEM element.
Returns
-------
A : numpy array
Cross-sectional area for each FEM element.
Iy : numpy array
Area moment of inertia around the y-axis for each FEM element.
Iz : numpy array
Area moment of inertia around the z-axis for each FEM element.
J : numpy array
Polar moment of inertia for each FEM element.
"""
def initialize(self):
self.options.declare('surface', types=dict)
def setup(self):
self.surface = surface = self.options['surface']
self.ny = surface['num_y']
self.add_input('radius', val=np.ones((self.ny - 1)), units='m')
self.add_input('thickness', val=np.ones((self.ny - 1)) * .1, units='m')
self.add_output('A', val=np.zeros((self.ny - 1)), units='m**2')
self.add_output('Iy', val=np.zeros((self.ny - 1)), units='m**4')
self.add_output('Iz', val=np.zeros((self.ny - 1)), units='m**4')
self.add_output('J', val=np.zeros((self.ny - 1)), units='m**4')
a = np.arange((self.ny - 1))
self.declare_partials('*', '*', rows=a, cols=a)
self.set_check_partial_options(wrt='*', method='cs')
def compute(self, inputs, outputs):
pi = np.pi
# Add thickness to the interior of the radius.
# The outer radius is the inputs['radius'] amount.
r1 = inputs['radius'] - inputs['thickness']
r2 = inputs['radius']
# Compute the area, area moments of inertia, and polar moment of inertia
outputs['A'] = pi * (r2**2 - r1**2)
outputs['Iy'] = pi * (r2**4 - r1**4) / 4.
outputs['Iz'] = pi * (r2**4 - r1**4) / 4.
outputs['J'] = pi * (r2**4 - r1**4) / 2.
def compute_partials(self, inputs, partials):
pi = np.pi
radius = inputs['radius'].real
t = inputs['thickness'].real
r1 = radius - t
r2 = radius
dr1_dr = 1.
dr2_dr = 1.
dr1_dt = -1.
dr2_dt = 0.
r1_3 = r1**3
r2_3 = r2**3
partials['A', 'radius'] = 2 * pi * (r2 * dr2_dr - r1 * dr1_dr)
partials['A', 'thickness'] = 2 * pi * (r2 * dr2_dt - r1 * dr1_dt)
partials['Iy', 'radius'] = pi * (r2_3 * dr2_dr - r1_3 * dr1_dr)
partials['Iy', 'thickness'] = pi * (r2_3 * dr2_dt - r1_3 * dr1_dt)
partials['Iz', 'radius'] = pi * (r2_3 * dr2_dr - r1_3 * dr1_dr)
partials['Iz', 'thickness'] = pi * (r2_3 * dr2_dt - r1_3 * dr1_dt)
partials['J', 'radius'] = 2 * pi * (r2_3 * dr2_dr - r1_3 * dr1_dr)
partials['J', 'thickness'] = 2 * pi * (r2_3 * dr2_dt - r1_3 * dr1_dt)
| 35.724138 | 80 | 0.568855 | from __future__ import division, print_function
import numpy as np
from openmdao.api import ExplicitComponent
class SectionPropertiesTube(ExplicitComponent):
def initialize(self):
self.options.declare('surface', types=dict)
def setup(self):
self.surface = surface = self.options['surface']
self.ny = surface['num_y']
self.add_input('radius', val=np.ones((self.ny - 1)), units='m')
self.add_input('thickness', val=np.ones((self.ny - 1)) * .1, units='m')
self.add_output('A', val=np.zeros((self.ny - 1)), units='m**2')
self.add_output('Iy', val=np.zeros((self.ny - 1)), units='m**4')
self.add_output('Iz', val=np.zeros((self.ny - 1)), units='m**4')
self.add_output('J', val=np.zeros((self.ny - 1)), units='m**4')
a = np.arange((self.ny - 1))
self.declare_partials('*', '*', rows=a, cols=a)
self.set_check_partial_options(wrt='*', method='cs')
def compute(self, inputs, outputs):
pi = np.pi
r1 = inputs['radius'] - inputs['thickness']
r2 = inputs['radius']
outputs['A'] = pi * (r2**2 - r1**2)
outputs['Iy'] = pi * (r2**4 - r1**4) / 4.
outputs['Iz'] = pi * (r2**4 - r1**4) / 4.
outputs['J'] = pi * (r2**4 - r1**4) / 2.
def compute_partials(self, inputs, partials):
pi = np.pi
radius = inputs['radius'].real
t = inputs['thickness'].real
r1 = radius - t
r2 = radius
dr1_dr = 1.
dr2_dr = 1.
dr1_dt = -1.
dr2_dt = 0.
r1_3 = r1**3
r2_3 = r2**3
partials['A', 'radius'] = 2 * pi * (r2 * dr2_dr - r1 * dr1_dr)
partials['A', 'thickness'] = 2 * pi * (r2 * dr2_dt - r1 * dr1_dt)
partials['Iy', 'radius'] = pi * (r2_3 * dr2_dr - r1_3 * dr1_dr)
partials['Iy', 'thickness'] = pi * (r2_3 * dr2_dt - r1_3 * dr1_dt)
partials['Iz', 'radius'] = pi * (r2_3 * dr2_dr - r1_3 * dr1_dr)
partials['Iz', 'thickness'] = pi * (r2_3 * dr2_dt - r1_3 * dr1_dt)
partials['J', 'radius'] = 2 * pi * (r2_3 * dr2_dr - r1_3 * dr1_dr)
partials['J', 'thickness'] = 2 * pi * (r2_3 * dr2_dt - r1_3 * dr1_dt)
| true | true |
f72fdda2808488fef61058f47c4ebf00428e8bf0 | 9,861 | py | Python | devel/lib/python2.7/dist-packages/mav_manager/srv/_GoalTimed.py | MultiRobotUPenn/groundstation_ws_vio_swarm | 60e01af6bf32bafb5bc31626b055436278dc8311 | [
"MIT"
] | 1 | 2020-03-10T06:32:51.000Z | 2020-03-10T06:32:51.000Z | install/lib/python2.7/dist-packages/mav_manager/srv/_GoalTimed.py | MultiRobotUPenn/groundstation_ws_vio_swarm | 60e01af6bf32bafb5bc31626b055436278dc8311 | [
"MIT"
] | null | null | null | install/lib/python2.7/dist-packages/mav_manager/srv/_GoalTimed.py | MultiRobotUPenn/groundstation_ws_vio_swarm | 60e01af6bf32bafb5bc31626b055436278dc8311 | [
"MIT"
] | 1 | 2018-11-07T03:37:23.000Z | 2018-11-07T03:37:23.000Z | # This Python file uses the following encoding: utf-8
"""autogenerated by genpy from mav_manager/GoalTimedRequest.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
import genpy
class GoalTimedRequest(genpy.Message):
_md5sum = "3c9a1ea281c62219122f22aa2b508b97"
_type = "mav_manager/GoalTimedRequest"
_has_header = False #flag to mark the presence of a Header object
_full_text = """float32[4] goal
duration duration
time t_start
"""
__slots__ = ['goal','duration','t_start']
_slot_types = ['float32[4]','duration','time']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
goal,duration,t_start
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(GoalTimedRequest, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.goal is None:
self.goal = [0.] * 4
if self.duration is None:
self.duration = genpy.Duration()
if self.t_start is None:
self.t_start = genpy.Time()
else:
self.goal = [0.] * 4
self.duration = genpy.Duration()
self.t_start = genpy.Time()
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
buff.write(_get_struct_4f().pack(*self.goal))
_x = self
buff.write(_get_struct_2i2I().pack(_x.duration.secs, _x.duration.nsecs, _x.t_start.secs, _x.t_start.nsecs))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
if self.duration is None:
self.duration = genpy.Duration()
if self.t_start is None:
self.t_start = genpy.Time()
end = 0
start = end
end += 16
self.goal = _get_struct_4f().unpack(str[start:end])
_x = self
start = end
end += 16
(_x.duration.secs, _x.duration.nsecs, _x.t_start.secs, _x.t_start.nsecs,) = _get_struct_2i2I().unpack(str[start:end])
self.duration.canon()
self.t_start.canon()
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
buff.write(self.goal.tostring())
_x = self
buff.write(_get_struct_2i2I().pack(_x.duration.secs, _x.duration.nsecs, _x.t_start.secs, _x.t_start.nsecs))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
if self.duration is None:
self.duration = genpy.Duration()
if self.t_start is None:
self.t_start = genpy.Time()
end = 0
start = end
end += 16
self.goal = numpy.frombuffer(str[start:end], dtype=numpy.float32, count=4)
_x = self
start = end
end += 16
(_x.duration.secs, _x.duration.nsecs, _x.t_start.secs, _x.t_start.nsecs,) = _get_struct_2i2I().unpack(str[start:end])
self.duration.canon()
self.t_start.canon()
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
def _get_struct_I():
global _struct_I
return _struct_I
_struct_4f = None
def _get_struct_4f():
global _struct_4f
if _struct_4f is None:
_struct_4f = struct.Struct("<4f")
return _struct_4f
_struct_2i2I = None
def _get_struct_2i2I():
global _struct_2i2I
if _struct_2i2I is None:
_struct_2i2I = struct.Struct("<2i2I")
return _struct_2i2I
# This Python file uses the following encoding: utf-8
"""autogenerated by genpy from mav_manager/GoalTimedResponse.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
class GoalTimedResponse(genpy.Message):
_md5sum = "937c9679a518e3a18d831e57125ea522"
_type = "mav_manager/GoalTimedResponse"
_has_header = False #flag to mark the presence of a Header object
_full_text = """bool success
string message
"""
__slots__ = ['success','message']
_slot_types = ['bool','string']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
success,message
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(GoalTimedResponse, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.success is None:
self.success = False
if self.message is None:
self.message = ''
else:
self.success = False
self.message = ''
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
buff.write(_get_struct_B().pack(self.success))
_x = self.message
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
end = 0
start = end
end += 1
(self.success,) = _get_struct_B().unpack(str[start:end])
self.success = bool(self.success)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.message = str[start:end].decode('utf-8')
else:
self.message = str[start:end]
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
buff.write(_get_struct_B().pack(self.success))
_x = self.message
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
end = 0
start = end
end += 1
(self.success,) = _get_struct_B().unpack(str[start:end])
self.success = bool(self.success)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.message = str[start:end].decode('utf-8')
else:
self.message = str[start:end]
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
def _get_struct_I():
global _struct_I
return _struct_I
_struct_B = None
def _get_struct_B():
global _struct_B
if _struct_B is None:
_struct_B = struct.Struct("<B")
return _struct_B
class GoalTimed(object):
_type = 'mav_manager/GoalTimed'
_md5sum = '3200a97d30222d1d03961acacb87f306'
_request_class = GoalTimedRequest
_response_class = GoalTimedResponse
| 33.540816 | 145 | 0.653179 |
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
import genpy
class GoalTimedRequest(genpy.Message):
_md5sum = "3c9a1ea281c62219122f22aa2b508b97"
_type = "mav_manager/GoalTimedRequest"
_has_header = False
_full_text = """float32[4] goal
duration duration
time t_start
"""
__slots__ = ['goal','duration','t_start']
_slot_types = ['float32[4]','duration','time']
def __init__(self, *args, **kwds):
if args or kwds:
super(GoalTimedRequest, self).__init__(*args, **kwds)
if self.goal is None:
self.goal = [0.] * 4
if self.duration is None:
self.duration = genpy.Duration()
if self.t_start is None:
self.t_start = genpy.Time()
else:
self.goal = [0.] * 4
self.duration = genpy.Duration()
self.t_start = genpy.Time()
def _get_types(self):
return self._slot_types
def serialize(self, buff):
try:
buff.write(_get_struct_4f().pack(*self.goal))
_x = self
buff.write(_get_struct_2i2I().pack(_x.duration.secs, _x.duration.nsecs, _x.t_start.secs, _x.t_start.nsecs))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
try:
if self.duration is None:
self.duration = genpy.Duration()
if self.t_start is None:
self.t_start = genpy.Time()
end = 0
start = end
end += 16
self.goal = _get_struct_4f().unpack(str[start:end])
_x = self
start = end
end += 16
(_x.duration.secs, _x.duration.nsecs, _x.t_start.secs, _x.t_start.nsecs,) = _get_struct_2i2I().unpack(str[start:end])
self.duration.canon()
self.t_start.canon()
return self
except struct.error as e:
raise genpy.DeserializationError(e)
def serialize_numpy(self, buff, numpy):
try:
buff.write(self.goal.tostring())
_x = self
buff.write(_get_struct_2i2I().pack(_x.duration.secs, _x.duration.nsecs, _x.t_start.secs, _x.t_start.nsecs))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
try:
if self.duration is None:
self.duration = genpy.Duration()
if self.t_start is None:
self.t_start = genpy.Time()
end = 0
start = end
end += 16
self.goal = numpy.frombuffer(str[start:end], dtype=numpy.float32, count=4)
_x = self
start = end
end += 16
(_x.duration.secs, _x.duration.nsecs, _x.t_start.secs, _x.t_start.nsecs,) = _get_struct_2i2I().unpack(str[start:end])
self.duration.canon()
self.t_start.canon()
return self
except struct.error as e:
raise genpy.DeserializationError(e)
_struct_I = genpy.struct_I
def _get_struct_I():
global _struct_I
return _struct_I
_struct_4f = None
def _get_struct_4f():
global _struct_4f
if _struct_4f is None:
_struct_4f = struct.Struct("<4f")
return _struct_4f
_struct_2i2I = None
def _get_struct_2i2I():
global _struct_2i2I
if _struct_2i2I is None:
_struct_2i2I = struct.Struct("<2i2I")
return _struct_2i2I
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
class GoalTimedResponse(genpy.Message):
_md5sum = "937c9679a518e3a18d831e57125ea522"
_type = "mav_manager/GoalTimedResponse"
_has_header = False
_full_text = """bool success
string message
"""
__slots__ = ['success','message']
_slot_types = ['bool','string']
def __init__(self, *args, **kwds):
if args or kwds:
super(GoalTimedResponse, self).__init__(*args, **kwds)
if self.success is None:
self.success = False
if self.message is None:
self.message = ''
else:
self.success = False
self.message = ''
def _get_types(self):
return self._slot_types
def serialize(self, buff):
try:
buff.write(_get_struct_B().pack(self.success))
_x = self.message
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
try:
end = 0
start = end
end += 1
(self.success,) = _get_struct_B().unpack(str[start:end])
self.success = bool(self.success)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.message = str[start:end].decode('utf-8')
else:
self.message = str[start:end]
return self
except struct.error as e:
raise genpy.DeserializationError(e)
def serialize_numpy(self, buff, numpy):
try:
buff.write(_get_struct_B().pack(self.success))
_x = self.message
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
try:
end = 0
start = end
end += 1
(self.success,) = _get_struct_B().unpack(str[start:end])
self.success = bool(self.success)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.message = str[start:end].decode('utf-8')
else:
self.message = str[start:end]
return self
except struct.error as e:
raise genpy.DeserializationError(e)
_struct_I = genpy.struct_I
def _get_struct_I():
global _struct_I
return _struct_I
_struct_B = None
def _get_struct_B():
global _struct_B
if _struct_B is None:
_struct_B = struct.Struct("<B")
return _struct_B
class GoalTimed(object):
_type = 'mav_manager/GoalTimed'
_md5sum = '3200a97d30222d1d03961acacb87f306'
_request_class = GoalTimedRequest
_response_class = GoalTimedResponse
| true | true |
f72fde34553d0101da278cb9f85832174a12acbb | 1,294 | py | Python | src/image-gallery/azext_image_gallery/_client_factory.py | haroonf/azure-cli-extensions | 61c044d34c224372f186934fa7c9313f1cd3a525 | [
"MIT"
] | 207 | 2017-11-29T06:59:41.000Z | 2022-03-31T10:00:53.000Z | src/image-gallery/azext_image_gallery/_client_factory.py | haroonf/azure-cli-extensions | 61c044d34c224372f186934fa7c9313f1cd3a525 | [
"MIT"
] | 4,061 | 2017-10-27T23:19:56.000Z | 2022-03-31T23:18:30.000Z | src/image-gallery/azext_image_gallery/_client_factory.py | haroonf/azure-cli-extensions | 61c044d34c224372f186934fa7c9313f1cd3a525 | [
"MIT"
] | 802 | 2017-10-11T17:36:26.000Z | 2022-03-31T22:24:32.000Z | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
def _compute_client_factory(cli_ctx):
from azure.cli.core.commands.client_factory import get_mgmt_service_client
from .vendored_sdks.azure_mgmt_compute._compute_management_client import ComputeManagementClient
return get_mgmt_service_client(cli_ctx, ComputeManagementClient)
def cf_galleries(cli_ctx, _):
return _compute_client_factory(cli_ctx).galleries
def cf_gallery_images(cli_ctx, _):
return _compute_client_factory(cli_ctx).gallery_images
def cf_community_gallery(cli_ctx, *_):
return _compute_client_factory(cli_ctx).community_galleries
def cf_community_gallery_image(cli_ctx, *_):
return _compute_client_factory(cli_ctx).community_gallery_images
def cf_community_gallery_image_version(cli_ctx, *_):
return _compute_client_factory(cli_ctx).community_gallery_image_versions
def cf_community_gallery_sharing_profile(cli_ctx, *_):
return _compute_client_factory(cli_ctx).gallery_sharing_profile
| 36.971429 | 100 | 0.710974 |
def _compute_client_factory(cli_ctx):
from azure.cli.core.commands.client_factory import get_mgmt_service_client
from .vendored_sdks.azure_mgmt_compute._compute_management_client import ComputeManagementClient
return get_mgmt_service_client(cli_ctx, ComputeManagementClient)
def cf_galleries(cli_ctx, _):
return _compute_client_factory(cli_ctx).galleries
def cf_gallery_images(cli_ctx, _):
return _compute_client_factory(cli_ctx).gallery_images
def cf_community_gallery(cli_ctx, *_):
return _compute_client_factory(cli_ctx).community_galleries
def cf_community_gallery_image(cli_ctx, *_):
return _compute_client_factory(cli_ctx).community_gallery_images
def cf_community_gallery_image_version(cli_ctx, *_):
return _compute_client_factory(cli_ctx).community_gallery_image_versions
def cf_community_gallery_sharing_profile(cli_ctx, *_):
return _compute_client_factory(cli_ctx).gallery_sharing_profile
| true | true |
f72fde9be8445c641564cc9689aca34ffff96645 | 5,280 | py | Python | mmdet/core/hook/ema.py | mrzhuzhe/mmdetection | c04ca2c2a65500bc248a5d2ab6ace5b15f00064d | [
"Apache-2.0"
] | null | null | null | mmdet/core/hook/ema.py | mrzhuzhe/mmdetection | c04ca2c2a65500bc248a5d2ab6ace5b15f00064d | [
"Apache-2.0"
] | null | null | null | mmdet/core/hook/ema.py | mrzhuzhe/mmdetection | c04ca2c2a65500bc248a5d2ab6ace5b15f00064d | [
"Apache-2.0"
] | null | null | null | # Copyright (c) OpenMMLab. All rights reserved.
import math
from mmcv.parallel import is_module_wrapper
from mmcv.runner.hooks import HOOKS, Hook
class BaseEMAHook(Hook):
"""Exponential Moving Average Hook.
Use Exponential Moving Average on all parameters of model in training
process. All parameters have a ema backup, which update by the formula
as below. EMAHook takes priority over EvalHook and CheckpointHook. Note,
the original model parameters are actually saved in ema field after train.
Args:
momentum (float): The momentum used for updating ema parameter.
Ema's parameter are updated with the formula:
`ema_param = (1-momentum) * ema_param + momentum * cur_param`.
Defaults to 0.0002.
skip_buffers (bool): Whether to skip the model buffers, such as
batchnorm running stats (running_mean, running_var), it does not
perform the ema operation. Default to False.
interval (int): Update ema parameter every interval iteration.
Defaults to 1.
resume_from (str, optional): The checkpoint path. Defaults to None.
momentum_fun (func, optional): The function to change momentum
during early iteration (also warmup) to help early training.
It uses `momentum` as a constant. Defaults to None.
"""
def __init__(self,
momentum=0.0002,
interval=1,
skip_buffers=False,
resume_from=None,
momentum_fun=None):
assert 0 < momentum < 1
self.momentum = momentum
self.skip_buffers = skip_buffers
self.interval = interval
self.checkpoint = resume_from
self.momentum_fun = momentum_fun
def before_run(self, runner):
"""To resume model with it's ema parameters more friendly.
Register ema parameter as ``named_buffer`` to model.
"""
model = runner.model
if is_module_wrapper(model):
model = model.module
self.param_ema_buffer = {}
if self.skip_buffers:
self.model_parameters = dict(model.named_parameters())
else:
self.model_parameters = model.state_dict()
for name, value in self.model_parameters.items():
# "." is not allowed in module's buffer name
buffer_name = f"ema_{name.replace('.', '_')}"
self.param_ema_buffer[name] = buffer_name
model.register_buffer(buffer_name, value.data.clone())
self.model_buffers = dict(model.named_buffers())
if self.checkpoint is not None:
runner.resume(self.checkpoint)
def get_momentum(self, runner):
return self.momentum_fun(runner.iter) if self.momentum_fun else \
self.momentum
def after_train_iter(self, runner):
"""Update ema parameter every self.interval iterations."""
if (runner.iter + 1) % self.interval != 0:
return
momentum = self.get_momentum(runner)
for name, parameter in self.model_parameters.items():
# exclude num_tracking
if parameter.dtype.is_floating_point:
buffer_name = self.param_ema_buffer[name]
buffer_parameter = self.model_buffers[buffer_name]
buffer_parameter.mul_(1 - momentum).add_(
parameter.data, alpha=momentum)
def after_train_epoch(self, runner):
"""We load parameter values from ema backup to model before the
EvalHook."""
self._swap_ema_parameters()
def before_train_epoch(self, runner):
"""We recover model's parameter from ema backup after last epoch's
EvalHook."""
self._swap_ema_parameters()
def _swap_ema_parameters(self):
"""Swap the parameter of model with parameter in ema_buffer."""
for name, value in self.model_parameters.items():
temp = value.data.clone()
ema_buffer = self.model_buffers[self.param_ema_buffer[name]]
value.data.copy_(ema_buffer.data)
ema_buffer.data.copy_(temp)
@HOOKS.register_module()
class ExpMomentumEMAHook(BaseEMAHook):
"""EMAHook using exponential momentum strategy.
Args:
total_iter (int): The total number of iterations of EMA momentum.
Defaults to 2000.
"""
def __init__(self, total_iter=2000, **kwargs):
super(ExpMomentumEMAHook, self).__init__(**kwargs)
self.momentum_fun = lambda x: (1 - self.momentum) * math.exp(-(
1 + x) / total_iter) + self.momentum
@HOOKS.register_module()
class LinearMomentumEMAHook(BaseEMAHook):
"""EMAHook using linear momentum strategy.
Args:
warm_up (int): During first warm_up steps, we may use smaller decay
to update ema parameters more slowly. Defaults to 100.
"""
def __init__(self, warm_up=100, **kwargs):
super(LinearMomentumEMAHook, self).__init__(**kwargs)
self.momentum_fun = lambda x: min(self.momentum**self.interval,
(1 + x) / (warm_up + x))
| 40.305344 | 79 | 0.621212 |
import math
from mmcv.parallel import is_module_wrapper
from mmcv.runner.hooks import HOOKS, Hook
class BaseEMAHook(Hook):
def __init__(self,
momentum=0.0002,
interval=1,
skip_buffers=False,
resume_from=None,
momentum_fun=None):
assert 0 < momentum < 1
self.momentum = momentum
self.skip_buffers = skip_buffers
self.interval = interval
self.checkpoint = resume_from
self.momentum_fun = momentum_fun
def before_run(self, runner):
model = runner.model
if is_module_wrapper(model):
model = model.module
self.param_ema_buffer = {}
if self.skip_buffers:
self.model_parameters = dict(model.named_parameters())
else:
self.model_parameters = model.state_dict()
for name, value in self.model_parameters.items():
buffer_name = f"ema_{name.replace('.', '_')}"
self.param_ema_buffer[name] = buffer_name
model.register_buffer(buffer_name, value.data.clone())
self.model_buffers = dict(model.named_buffers())
if self.checkpoint is not None:
runner.resume(self.checkpoint)
def get_momentum(self, runner):
return self.momentum_fun(runner.iter) if self.momentum_fun else \
self.momentum
def after_train_iter(self, runner):
if (runner.iter + 1) % self.interval != 0:
return
momentum = self.get_momentum(runner)
for name, parameter in self.model_parameters.items():
# exclude num_tracking
if parameter.dtype.is_floating_point:
buffer_name = self.param_ema_buffer[name]
buffer_parameter = self.model_buffers[buffer_name]
buffer_parameter.mul_(1 - momentum).add_(
parameter.data, alpha=momentum)
def after_train_epoch(self, runner):
self._swap_ema_parameters()
def before_train_epoch(self, runner):
self._swap_ema_parameters()
def _swap_ema_parameters(self):
for name, value in self.model_parameters.items():
temp = value.data.clone()
ema_buffer = self.model_buffers[self.param_ema_buffer[name]]
value.data.copy_(ema_buffer.data)
ema_buffer.data.copy_(temp)
@HOOKS.register_module()
class ExpMomentumEMAHook(BaseEMAHook):
def __init__(self, total_iter=2000, **kwargs):
super(ExpMomentumEMAHook, self).__init__(**kwargs)
self.momentum_fun = lambda x: (1 - self.momentum) * math.exp(-(
1 + x) / total_iter) + self.momentum
@HOOKS.register_module()
class LinearMomentumEMAHook(BaseEMAHook):
def __init__(self, warm_up=100, **kwargs):
super(LinearMomentumEMAHook, self).__init__(**kwargs)
self.momentum_fun = lambda x: min(self.momentum**self.interval,
(1 + x) / (warm_up + x))
| true | true |
f72fdecee874f57c54aafbb15866dc4f007451be | 1,697 | py | Python | **PyBank**/main.py | cathchristabel/Python-Challenge | f8a56210c15785626c693101f12173c9b55f3c9d | [
"ADSL"
] | null | null | null | **PyBank**/main.py | cathchristabel/Python-Challenge | f8a56210c15785626c693101f12173c9b55f3c9d | [
"ADSL"
] | null | null | null | **PyBank**/main.py | cathchristabel/Python-Challenge | f8a56210c15785626c693101f12173c9b55f3c9d | [
"ADSL"
] | null | null | null | import os
import csv
filepath = os.path.join('..','**PyBank**','Resources','budget_data.csv')
output_path = os.path.join('..','**PyBank**','financial_analysis.txt')
total_months = 0
total_net = 0
net_change_list = []
month_of_change = []
greatest_increase = ["", 0]
greatest_decrease = ["", 9999999999999]
with open (filepath, newline = '') as csvfile:
csvreader = csv.reader(csvfile, delimiter = ',')
header = next(csvreader)
first_row = next(csvreader)
total_months = total_months + 1
total_net = total_net + int(first_row[1])
prev_net = int(first_row[1])
for row in csvreader:
total_months = total_months + 1
total_net += int(row[1])
net_change = int(row[1]) - prev_net
prev_net = int(row[1])
net_change_list = net_change_list + [net_change]
month_of_change = month_of_change + [row[0]]
if net_change > greatest_increase[1]:
greatest_increase[0] = row[0]
greatest_increase[1] = net_change
if net_change < greatest_decrease[1]:
greatest_decrease[0] = row[0]
greatest_decrease[1] = net_change
average_change = sum(net_change_list) / len(net_change_list)
output = (f'Financial Analysis\n'
f'-------------------\n'
f'Total Months: {total_months}\n'
f'Total: ${total_net}\n'
f'Average Change: ${average_change:.2f}\n'
f'Greatest Increase in Profits: {greatest_increase[0]} (${greatest_increase[1]})\n'
f'Greatest Decrease in Profits: {greatest_decrease[0]} (${greatest_decrease[1]})')
print(output)
with open(output_path, "w") as txt_file:
txt_file.write(output)
| 31.425926 | 93 | 0.6264 | import os
import csv
filepath = os.path.join('..','**PyBank**','Resources','budget_data.csv')
output_path = os.path.join('..','**PyBank**','financial_analysis.txt')
total_months = 0
total_net = 0
net_change_list = []
month_of_change = []
greatest_increase = ["", 0]
greatest_decrease = ["", 9999999999999]
with open (filepath, newline = '') as csvfile:
csvreader = csv.reader(csvfile, delimiter = ',')
header = next(csvreader)
first_row = next(csvreader)
total_months = total_months + 1
total_net = total_net + int(first_row[1])
prev_net = int(first_row[1])
for row in csvreader:
total_months = total_months + 1
total_net += int(row[1])
net_change = int(row[1]) - prev_net
prev_net = int(row[1])
net_change_list = net_change_list + [net_change]
month_of_change = month_of_change + [row[0]]
if net_change > greatest_increase[1]:
greatest_increase[0] = row[0]
greatest_increase[1] = net_change
if net_change < greatest_decrease[1]:
greatest_decrease[0] = row[0]
greatest_decrease[1] = net_change
average_change = sum(net_change_list) / len(net_change_list)
output = (f'Financial Analysis\n'
f'-------------------\n'
f'Total Months: {total_months}\n'
f'Total: ${total_net}\n'
f'Average Change: ${average_change:.2f}\n'
f'Greatest Increase in Profits: {greatest_increase[0]} (${greatest_increase[1]})\n'
f'Greatest Decrease in Profits: {greatest_decrease[0]} (${greatest_decrease[1]})')
print(output)
with open(output_path, "w") as txt_file:
txt_file.write(output)
| true | true |
f72fe00487d7fd4d4a1b45f52317911518a2dda8 | 923 | py | Python | integration_tests/src/main/python/marks.py | wbo4958/spark-rapids | 2b18d10313b57aaf6541f40da571c98abcdbc908 | [
"Apache-2.0"
] | null | null | null | integration_tests/src/main/python/marks.py | wbo4958/spark-rapids | 2b18d10313b57aaf6541f40da571c98abcdbc908 | [
"Apache-2.0"
] | null | null | null | integration_tests/src/main/python/marks.py | wbo4958/spark-rapids | 2b18d10313b57aaf6541f40da571c98abcdbc908 | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2020-2021, NVIDIA CORPORATION.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
allow_non_gpu = pytest.mark.allow_non_gpu
approximate_float = pytest.mark.approximate_float
ignore_order = pytest.mark.ignore_order
incompat = pytest.mark.incompat
limit = pytest.mark.limit
qarun = pytest.mark.qarun
cudf_udf = pytest.mark.cudf_udf
rapids_udf_example_native = pytest.mark.rapids_udf_example_native
| 36.92 | 74 | 0.789816 |
import pytest
allow_non_gpu = pytest.mark.allow_non_gpu
approximate_float = pytest.mark.approximate_float
ignore_order = pytest.mark.ignore_order
incompat = pytest.mark.incompat
limit = pytest.mark.limit
qarun = pytest.mark.qarun
cudf_udf = pytest.mark.cudf_udf
rapids_udf_example_native = pytest.mark.rapids_udf_example_native
| true | true |
f72fe031967fabab6e73cfb6ef6a29f19e93d585 | 473 | py | Python | src/maestral_cocoa/constants.py | SamSchott/maestral-cocoa | bb031b2df010ae84e058fadd3a1b10b19d23b762 | [
"MIT"
] | 8 | 2020-11-13T08:48:01.000Z | 2021-12-16T06:30:27.000Z | macOS/Xcode/Maestral/Maestral/app/maestral_cocoa/constants.py | SamSchott/maestral-cocoa | bb031b2df010ae84e058fadd3a1b10b19d23b762 | [
"MIT"
] | 4 | 2021-08-23T20:41:39.000Z | 2021-11-16T08:43:58.000Z | src/maestral_cocoa/constants.py | SamSchott/maestral-cocoa | bb031b2df010ae84e058fadd3a1b10b19d23b762 | [
"MIT"
] | 1 | 2021-11-09T07:14:44.000Z | 2021-11-09T07:14:44.000Z | # -*- coding: utf-8 -*-
# system imports
import sys
try:
from importlib.metadata import metadata
except ImportError:
# Backwards compatibility Python 3.7 and lower
from importlib_metadata import metadata # type: ignore
_app_module = sys.modules["__main__"].__package__
_md = metadata(_app_module) # type: ignore
# detect if we have been built with briefcase or frozen with PyInstaller
FROZEN = "Briefcase-Version" in _md or getattr(sys, "frozen", False)
| 26.277778 | 72 | 0.744186 |
import sys
try:
from importlib.metadata import metadata
except ImportError:
from importlib_metadata import metadata
_app_module = sys.modules["__main__"].__package__
_md = metadata(_app_module)
FROZEN = "Briefcase-Version" in _md or getattr(sys, "frozen", False)
| true | true |
f72fe13e3737561fcf3652de947a89127a226c44 | 619 | py | Python | scraper_app/pipelines.py | brian-yang/pollen-scraper | 77e47d68bb1c6ca31e7b91550728fa59e9cb2d8a | [
"MIT"
] | null | null | null | scraper_app/pipelines.py | brian-yang/pollen-scraper | 77e47d68bb1c6ca31e7b91550728fa59e9cb2d8a | [
"MIT"
] | null | null | null | scraper_app/pipelines.py | brian-yang/pollen-scraper | 77e47d68bb1c6ca31e7b91550728fa59e9cb2d8a | [
"MIT"
] | null | null | null | from sqlalchemy.orm import sessionmaker
from models import Forecasts, db_connect, create_forecast_table
import logging
class PollenScraperPipeline(object):
def __init__(self):
engine = db_connect()
create_forecast_table(engine)
self.Session = sessionmaker(bind=engine)
def process_item(self, item, spider):
session = self.Session()
forecast = Forecasts(**item)
try:
session.add(forecast)
session.commit()
except:
session.rollback()
raise
finally:
session.close()
return item
| 24.76 | 63 | 0.620355 | from sqlalchemy.orm import sessionmaker
from models import Forecasts, db_connect, create_forecast_table
import logging
class PollenScraperPipeline(object):
def __init__(self):
engine = db_connect()
create_forecast_table(engine)
self.Session = sessionmaker(bind=engine)
def process_item(self, item, spider):
session = self.Session()
forecast = Forecasts(**item)
try:
session.add(forecast)
session.commit()
except:
session.rollback()
raise
finally:
session.close()
return item
| true | true |
f72fe1c22165e6e851d2bbb6a57c5a9a578e49f4 | 845 | py | Python | keras/engine/saving.py | itsraina/keras | 5e9376b5b94b6fb445dd52dbfafbc4e95bff5e35 | [
"Apache-2.0"
] | null | null | null | keras/engine/saving.py | itsraina/keras | 5e9376b5b94b6fb445dd52dbfafbc4e95bff5e35 | [
"Apache-2.0"
] | null | null | null | keras/engine/saving.py | itsraina/keras | 5e9376b5b94b6fb445dd52dbfafbc4e95bff5e35 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Model saving utilities.
Everything has been moved to keras/saving/. This file will be deleted soon.
"""
from keras.saving import * # noqa: F401,F403
| 38.409091 | 80 | 0.685207 |
from keras.saving import *
| true | true |
f72fe23bdf252ab6cbb78597079dd21aae3c8959 | 719 | py | Python | ext_pylib/__init__.py | hbradleyiii/ext_pylib | 15a9b5a80db87b5f20e03ef6bfa015acf4bf8543 | [
"MIT"
] | 2 | 2015-12-18T14:33:23.000Z | 2015-12-22T11:48:53.000Z | ext_pylib/__init__.py | hbradleyiii/ext_pylib | 15a9b5a80db87b5f20e03ef6bfa015acf4bf8543 | [
"MIT"
] | null | null | null | ext_pylib/__init__.py | hbradleyiii/ext_pylib | 15a9b5a80db87b5f20e03ef6bfa015acf4bf8543 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# /'' |''\ | |
# \. | |../ | * |..
# / \/ T | \ / | | | \
# \.../\.| __ | \/ | | |../
# ###################/###############
# /
"""
ext_pylib
~~~~~~~~~
Extra python libraries for scaffolding server scripts.
"""
from __future__ import absolute_import
from . import domain
from . import files
from . import input # pylint: disable=redefined-builtin
from . import password
from . import terminal
from . import user
__title__ = 'ext_pylib'
__version__ = '0.2'
__author__ = 'Harold Bradley III'
__license__ = 'MIT'
__copyright__ = 'Copyright 2015-2016 Harold Bradley III'
# Soli Deo gloria. <><
| 20.542857 | 56 | 0.520167 | true | true | |
f72fe24bceb08d360b3e71ca50fe69638691a3cf | 4,782 | py | Python | src/train_set.py | caoyunhao/keras-speed-prediction | b1c87a012f8049050f124062e3cc24322e7d95b9 | [
"BSD-2-Clause"
] | null | null | null | src/train_set.py | caoyunhao/keras-speed-prediction | b1c87a012f8049050f124062e3cc24322e7d95b9 | [
"BSD-2-Clause"
] | null | null | null | src/train_set.py | caoyunhao/keras-speed-prediction | b1c87a012f8049050f124062e3cc24322e7d95b9 | [
"BSD-2-Clause"
] | null | null | null | # !/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2018/4/6 10:55
# @Author : Yunhao Cao
# @File : train_set.py
import os
import re
import shutil
import tool
import config
__author__ = 'Yunhao Cao'
__all__ = [
'',
]
level_list = config.LV_LIST
classes = config.NUM_OF_LEVEL
validation_rate = config.VALIDATION_RATE
origin_data_dir = config.ORIGIN_DATA_DIR
processed_set_dir = config.PROCESSED_SET_DIR
trainset_dir = config.TRAINSET_DIR
validation_set_dir = config.VALIDATION_DIR
cut_shape = config.CUT_SHAPE_0
train_shape = config.TRAIN_SHAPE
image_width = config.IMAGE_WIDTH
image_height = config.IMAGE_HEIGHT
compare_path = tool.compare_path
def get_lv(v) -> int:
"""
返回速度等级
"""
for i, lv in enumerate(level_list):
if abs(v) < lv:
return i
def generate_sync_txt():
vf = 8 # forward velocity, i.e. parallel to earth-surface (m/s)
vl = 9 # leftward velocity, i.e. parallel to earth-surface (m/s)
af = 14 # forward acceleration (m/s^2)
for dir_ in tool.get_all(origin_data_dir):
sync_data_dir = compare_path(dir_, 'oxts', 'data')
print(sync_data_dir)
txt_list = tool.get_all(sync_data_dir)
outlines = list()
for txt in txt_list:
lines = tool.read_text(txt)
line_items = lines[0].split()
# print(float(line_items[vf]) * 3.6)
v_origin = float(line_items[vf]) * 3.6
v_level = get_lv(v_origin)
if v_level is None:
raise Exception
item = '{} {}'.format(v_origin, v_level)
outlines.append(item)
tool.write_text(compare_path(dir_, tool.sync_name), outlines)
def to_name(i):
i = str(i)
return '{}{}{}'.format(''.join(['0' for i in range(0, 10 - len(i))]), i, '.png')
def copy_to_process_set():
for i, set_dir in enumerate(tool.get_all(origin_data_dir)):
lines = tool.read_text(compare_path(set_dir, 'sync.txt'))
set_id = re.match('.*2011_09_26_drive_(?P<set_id>\d*)_sync.*', set_dir).groupdict()["set_id"]
for image_index, line in enumerate(lines):
v, level = line.split()
target_path = compare_path(processed_set_dir, level)
if not os.path.exists(target_path):
os.makedirs(target_path)
origin_filename = compare_path(set_dir, 'image_02', 'data', to_name(image_index))
target_filename = compare_path(target_path, "set_{}_lv{}_{}".format(set_id, level, to_name(image_index)))
print("From {}\n\tTo: {}".format(origin_filename, target_filename))
data = tool.read_image(origin_filename)
if data is None:
print('[WAIN] From image_03', set_dir, image_index)
origin_filename = compare_path(set_dir, 'image_03', 'data', to_name(image_index))
data = tool.read_image(origin_filename)
if data is None:
print("[ERROR] No exists in ", set_dir, image_index)
else:
data = tool.ArrayCut(data, cut_shape[:2], mode=8)
data = tool.image_cut(data, (image_width, image_height))
tool.image_save(target_filename, data)
def split_validation_by_copy():
import random
from_dir = processed_set_dir
for i, cate_dirname in enumerate(os.listdir(from_dir)):
if cate_dirname.startswith('.'):
continue
cate_dir = compare_path(from_dir, cate_dirname)
cate_listdir = list(filter(lambda x: not x.startswith('.'), os.listdir(cate_dir)))
n = int(len(cate_listdir) * validation_rate)
validation_files = random.sample(cate_listdir, n)
validation_cate_path = compare_path(validation_set_dir, cate_dirname)
print(validation_cate_path)
if not os.path.exists(validation_cate_path):
os.makedirs(validation_cate_path)
for validation_file in validation_files:
shutil.copy(compare_path(cate_dir, validation_file),
compare_path(validation_cate_path, validation_file))
train_set_path = compare_path(trainset_dir, cate_dirname)
if not os.path.exists(train_set_path):
os.makedirs(train_set_path)
train_set_files = list(set(cate_listdir).difference(set(validation_files)))
for train_set_file in train_set_files:
shutil.copy(compare_path(cate_dir, train_set_file),
compare_path(train_set_path, train_set_file))
def _test():
# print(get_set('0001').shape)
# print(get_flag('0001').shape)
# print(tool.dir_util.origin_sync_dirname)
# generate_sync_txt()
# copy_to_process_set()
split_validation_by_copy()
if __name__ == '__main__':
_test()
| 31.460526 | 117 | 0.641363 |
import os
import re
import shutil
import tool
import config
__author__ = 'Yunhao Cao'
__all__ = [
'',
]
level_list = config.LV_LIST
classes = config.NUM_OF_LEVEL
validation_rate = config.VALIDATION_RATE
origin_data_dir = config.ORIGIN_DATA_DIR
processed_set_dir = config.PROCESSED_SET_DIR
trainset_dir = config.TRAINSET_DIR
validation_set_dir = config.VALIDATION_DIR
cut_shape = config.CUT_SHAPE_0
train_shape = config.TRAIN_SHAPE
image_width = config.IMAGE_WIDTH
image_height = config.IMAGE_HEIGHT
compare_path = tool.compare_path
def get_lv(v) -> int:
for i, lv in enumerate(level_list):
if abs(v) < lv:
return i
def generate_sync_txt():
vf = 8
vl = 9
af = 14
for dir_ in tool.get_all(origin_data_dir):
sync_data_dir = compare_path(dir_, 'oxts', 'data')
print(sync_data_dir)
txt_list = tool.get_all(sync_data_dir)
outlines = list()
for txt in txt_list:
lines = tool.read_text(txt)
line_items = lines[0].split()
v_origin = float(line_items[vf]) * 3.6
v_level = get_lv(v_origin)
if v_level is None:
raise Exception
item = '{} {}'.format(v_origin, v_level)
outlines.append(item)
tool.write_text(compare_path(dir_, tool.sync_name), outlines)
def to_name(i):
i = str(i)
return '{}{}{}'.format(''.join(['0' for i in range(0, 10 - len(i))]), i, '.png')
def copy_to_process_set():
for i, set_dir in enumerate(tool.get_all(origin_data_dir)):
lines = tool.read_text(compare_path(set_dir, 'sync.txt'))
set_id = re.match('.*2011_09_26_drive_(?P<set_id>\d*)_sync.*', set_dir).groupdict()["set_id"]
for image_index, line in enumerate(lines):
v, level = line.split()
target_path = compare_path(processed_set_dir, level)
if not os.path.exists(target_path):
os.makedirs(target_path)
origin_filename = compare_path(set_dir, 'image_02', 'data', to_name(image_index))
target_filename = compare_path(target_path, "set_{}_lv{}_{}".format(set_id, level, to_name(image_index)))
print("From {}\n\tTo: {}".format(origin_filename, target_filename))
data = tool.read_image(origin_filename)
if data is None:
print('[WAIN] From image_03', set_dir, image_index)
origin_filename = compare_path(set_dir, 'image_03', 'data', to_name(image_index))
data = tool.read_image(origin_filename)
if data is None:
print("[ERROR] No exists in ", set_dir, image_index)
else:
data = tool.ArrayCut(data, cut_shape[:2], mode=8)
data = tool.image_cut(data, (image_width, image_height))
tool.image_save(target_filename, data)
def split_validation_by_copy():
import random
from_dir = processed_set_dir
for i, cate_dirname in enumerate(os.listdir(from_dir)):
if cate_dirname.startswith('.'):
continue
cate_dir = compare_path(from_dir, cate_dirname)
cate_listdir = list(filter(lambda x: not x.startswith('.'), os.listdir(cate_dir)))
n = int(len(cate_listdir) * validation_rate)
validation_files = random.sample(cate_listdir, n)
validation_cate_path = compare_path(validation_set_dir, cate_dirname)
print(validation_cate_path)
if not os.path.exists(validation_cate_path):
os.makedirs(validation_cate_path)
for validation_file in validation_files:
shutil.copy(compare_path(cate_dir, validation_file),
compare_path(validation_cate_path, validation_file))
train_set_path = compare_path(trainset_dir, cate_dirname)
if not os.path.exists(train_set_path):
os.makedirs(train_set_path)
train_set_files = list(set(cate_listdir).difference(set(validation_files)))
for train_set_file in train_set_files:
shutil.copy(compare_path(cate_dir, train_set_file),
compare_path(train_set_path, train_set_file))
def _test():
split_validation_by_copy()
if __name__ == '__main__':
_test()
| true | true |
f72fe2b962d8ae02afda6b1e6bd5174272456fd7 | 1,176 | py | Python | src/okchain1/theme/rtd/conf/clients_ruby.py | sakya666/crate-docs-theme | 5767fe05c342581d1387baa7222ec09f61ce9cc5 | [
"Apache-2.0"
] | null | null | null | src/okchain1/theme/rtd/conf/clients_ruby.py | sakya666/crate-docs-theme | 5767fe05c342581d1387baa7222ec09f61ce9cc5 | [
"Apache-2.0"
] | null | null | null | src/okchain1/theme/rtd/conf/clients_ruby.py | sakya666/crate-docs-theme | 5767fe05c342581d1387baa7222ec09f61ce9cc5 | [
"Apache-2.0"
] | 1 | 2022-03-14T04:06:36.000Z | 2022-03-14T04:06:36.000Z | # -*- coding: utf-8; -*-
#
# Licensed to Crate (https://crate.io) under one or more contributor
# license agreements. See the NOTICE file distributed with this work for
# additional information regarding copyright ownership. Crate licenses
# this file to you under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. You may
# obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# However, if you have executed another commercial license agreement
# with Crate these terms will supersede the license and you may use the
# software solely pursuant to the terms of the relevant commercial agreement.
from okchain1.theme.rtd.conf import *
project = u'Crate Ruby Driver'
html_theme_options.update({
'canonical_url_path': 'docs/clients/ruby/en/latest/',
})
| 40.551724 | 77 | 0.764456 |
from okchain1.theme.rtd.conf import *
project = u'Crate Ruby Driver'
html_theme_options.update({
'canonical_url_path': 'docs/clients/ruby/en/latest/',
})
| true | true |
f72fe2bfca70709b096167614b03f46712fae7e4 | 5,248 | py | Python | hwtLib/tests/types/union_test.py | optical-o/hwtLib | edad621f5ad4cdbea20a5751ff4468979afe2f77 | [
"MIT"
] | 24 | 2017-02-23T10:00:50.000Z | 2022-01-28T12:20:21.000Z | hwtLib/tests/types/union_test.py | optical-o/hwtLib | edad621f5ad4cdbea20a5751ff4468979afe2f77 | [
"MIT"
] | 32 | 2017-04-28T10:29:34.000Z | 2021-04-27T09:16:43.000Z | hwtLib/tests/types/union_test.py | optical-o/hwtLib | edad621f5ad4cdbea20a5751ff4468979afe2f77 | [
"MIT"
] | 8 | 2019-09-19T03:34:36.000Z | 2022-01-21T06:56:58.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import unittest
from hwt.hdl.types.bits import Bits
from hwt.hdl.types.struct import HStruct
from hwt.hdl.types.union import HUnion
from hwtLib.types.ctypes import uint8_t, uint16_t, int8_t, uint32_t
from pyMathBitPrecise.bit_utils import mask
class UnionTC(unittest.TestCase):
def test_assertMembersSameSize(self):
t = HUnion(
(uint8_t, "a"),
(uint8_t, "b"),
(uint8_t, "c"),
(uint8_t, "d"),
)
self.assertEqual(t.bit_length(), 8)
with self.assertRaises(TypeError):
HUnion(
(uint16_t, "a"),
(uint8_t, "b"),
)
def test_assertNoPadding(self):
with self.assertRaises(AssertionError):
HUnion(
(uint8_t, None),
(uint8_t, "b"),
)
def test_value_simple(self):
t = HUnion(
(uint8_t, "unsigned"),
(int8_t, "signed"),
)
v = t.from_py(None)
v.unsigned = mask(8)
self.assertEqual(int(v.signed), -1)
v.signed = 0
self.assertEqual(int(v.unsigned), 0)
def test_value_struct_and_bits(self):
t = HUnion(
(uint16_t, "bits"),
(HStruct(
(uint8_t, "lower"),
(uint8_t, "upper"),
), "struct"),
)
v = t.from_py(None)
v.struct.upper = 1
self.assertEqual(v.bits.val, 1 << 8)
self.assertEqual(v.bits.vld_mask, mask(8) << 8)
v.struct.lower = 1
self.assertEqual(v.bits.val, (1 << 8) | 1)
self.assertEqual(v.bits.vld_mask, mask(16))
v.bits = 2
self.assertEqual(int(v.struct.lower), 2)
self.assertEqual(int(v.struct.upper), 0)
def test_value_array_and_bits(self):
t = HUnion(
(uint32_t, "bits"),
(uint8_t[4], "arr"),
)
v = t.from_py(None)
b = (4 << (3 * 8)) | (3 << (2 * 8)) | (2 << 8) | 1
v.bits = b
for i, item in enumerate(v.arr):
self.assertEqual(int(item), i + 1)
self.assertEqual(int(v.bits), b)
def test_value_array_toArray(self):
t = HUnion(
(uint16_t[2], "arr16b"),
(int8_t[4], "arr8b"),
)
v = t.from_py(None)
for i in range(len(v.arr16b)):
v.arr16b[i] = i + 1
for i, item in enumerate(v.arr8b):
if (i + 1) % 2 == 0:
v = 0
else:
v = i // 2 + 1
self.assertEqual(int(item), v)
def test_value_array_of_struct_to_bits(self):
t = HUnion(
(HStruct(
(uint16_t, "a"),
(uint8_t, "b"),
)[3], "arr"),
(Bits(24 * 3), "bits")
)
v = t.from_py(None)
for i in range(len(v.arr)):
v.arr[i] = {"a": i + 1,
"b": (i + 1) * 3
}
self.assertEqual(int(v.bits),
1
| 3 << 16
| 2 << 24
| 6 << (24 + 16)
| 3 << (2 * 24)
| 9 << (2 * 24 + 16))
def test_hunion_type_eq(self):
t0 = HUnion(
(HStruct(
(uint16_t, "a"),
(uint8_t, "b"),
)[3], "arr"),
(Bits(24 * 3), "bits")
)
t1 = HUnion(
(HStruct(
(uint16_t, "a"),
(uint8_t, "b"),
)[3], "arr"),
(Bits(24 * 3), "bits")
)
self.assertEqual(t0, t1)
self.assertEqual(t1, t0)
t1 = HUnion(
(Bits(24 * 3), "bits"),
(HStruct(
(uint16_t, "a"),
(uint8_t, "b"),
)[3], "arr")
)
self.assertEqual(t0, t1)
self.assertEqual(t1, t0)
t1 = HUnion(
(uint32_t, "bits"),
(uint8_t[4], "arr"),
)
self.assertNotEqual(t0, t1)
self.assertNotEqual(t1, t0)
t1 = HUnion(
(Bits(24 * 3), "bbits"),
(HStruct(
(uint16_t, "a"),
(uint8_t, "b"),
)[3], "arr")
)
self.assertNotEqual(t0, t1)
self.assertNotEqual(t1, t0)
t1 = Bits(24 * 3)
self.assertNotEqual(t0, t1)
self.assertNotEqual(t1, t0)
t1 = HUnion(
(Bits(24 * 3, signed=False), "bits"),
(HStruct(
(uint16_t, "a"),
(uint8_t, "b"),
)[3], "arr")
)
self.assertNotEqual(t0, t1)
self.assertNotEqual(t1, t0)
if __name__ == '__main__':
suite = unittest.TestSuite()
# suite.addTest(UnionTC('testValue'))
suite.addTest(unittest.makeSuite(UnionTC))
runner = unittest.TextTestRunner(verbosity=3)
runner.run(suite)
| 26.639594 | 67 | 0.416921 |
import unittest
from hwt.hdl.types.bits import Bits
from hwt.hdl.types.struct import HStruct
from hwt.hdl.types.union import HUnion
from hwtLib.types.ctypes import uint8_t, uint16_t, int8_t, uint32_t
from pyMathBitPrecise.bit_utils import mask
class UnionTC(unittest.TestCase):
def test_assertMembersSameSize(self):
t = HUnion(
(uint8_t, "a"),
(uint8_t, "b"),
(uint8_t, "c"),
(uint8_t, "d"),
)
self.assertEqual(t.bit_length(), 8)
with self.assertRaises(TypeError):
HUnion(
(uint16_t, "a"),
(uint8_t, "b"),
)
def test_assertNoPadding(self):
with self.assertRaises(AssertionError):
HUnion(
(uint8_t, None),
(uint8_t, "b"),
)
def test_value_simple(self):
t = HUnion(
(uint8_t, "unsigned"),
(int8_t, "signed"),
)
v = t.from_py(None)
v.unsigned = mask(8)
self.assertEqual(int(v.signed), -1)
v.signed = 0
self.assertEqual(int(v.unsigned), 0)
def test_value_struct_and_bits(self):
t = HUnion(
(uint16_t, "bits"),
(HStruct(
(uint8_t, "lower"),
(uint8_t, "upper"),
), "struct"),
)
v = t.from_py(None)
v.struct.upper = 1
self.assertEqual(v.bits.val, 1 << 8)
self.assertEqual(v.bits.vld_mask, mask(8) << 8)
v.struct.lower = 1
self.assertEqual(v.bits.val, (1 << 8) | 1)
self.assertEqual(v.bits.vld_mask, mask(16))
v.bits = 2
self.assertEqual(int(v.struct.lower), 2)
self.assertEqual(int(v.struct.upper), 0)
def test_value_array_and_bits(self):
t = HUnion(
(uint32_t, "bits"),
(uint8_t[4], "arr"),
)
v = t.from_py(None)
b = (4 << (3 * 8)) | (3 << (2 * 8)) | (2 << 8) | 1
v.bits = b
for i, item in enumerate(v.arr):
self.assertEqual(int(item), i + 1)
self.assertEqual(int(v.bits), b)
def test_value_array_toArray(self):
t = HUnion(
(uint16_t[2], "arr16b"),
(int8_t[4], "arr8b"),
)
v = t.from_py(None)
for i in range(len(v.arr16b)):
v.arr16b[i] = i + 1
for i, item in enumerate(v.arr8b):
if (i + 1) % 2 == 0:
v = 0
else:
v = i // 2 + 1
self.assertEqual(int(item), v)
def test_value_array_of_struct_to_bits(self):
t = HUnion(
(HStruct(
(uint16_t, "a"),
(uint8_t, "b"),
)[3], "arr"),
(Bits(24 * 3), "bits")
)
v = t.from_py(None)
for i in range(len(v.arr)):
v.arr[i] = {"a": i + 1,
"b": (i + 1) * 3
}
self.assertEqual(int(v.bits),
1
| 3 << 16
| 2 << 24
| 6 << (24 + 16)
| 3 << (2 * 24)
| 9 << (2 * 24 + 16))
def test_hunion_type_eq(self):
t0 = HUnion(
(HStruct(
(uint16_t, "a"),
(uint8_t, "b"),
)[3], "arr"),
(Bits(24 * 3), "bits")
)
t1 = HUnion(
(HStruct(
(uint16_t, "a"),
(uint8_t, "b"),
)[3], "arr"),
(Bits(24 * 3), "bits")
)
self.assertEqual(t0, t1)
self.assertEqual(t1, t0)
t1 = HUnion(
(Bits(24 * 3), "bits"),
(HStruct(
(uint16_t, "a"),
(uint8_t, "b"),
)[3], "arr")
)
self.assertEqual(t0, t1)
self.assertEqual(t1, t0)
t1 = HUnion(
(uint32_t, "bits"),
(uint8_t[4], "arr"),
)
self.assertNotEqual(t0, t1)
self.assertNotEqual(t1, t0)
t1 = HUnion(
(Bits(24 * 3), "bbits"),
(HStruct(
(uint16_t, "a"),
(uint8_t, "b"),
)[3], "arr")
)
self.assertNotEqual(t0, t1)
self.assertNotEqual(t1, t0)
t1 = Bits(24 * 3)
self.assertNotEqual(t0, t1)
self.assertNotEqual(t1, t0)
t1 = HUnion(
(Bits(24 * 3, signed=False), "bits"),
(HStruct(
(uint16_t, "a"),
(uint8_t, "b"),
)[3], "arr")
)
self.assertNotEqual(t0, t1)
self.assertNotEqual(t1, t0)
if __name__ == '__main__':
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(UnionTC))
runner = unittest.TextTestRunner(verbosity=3)
runner.run(suite)
| true | true |
f72fe2eb838ca241bfbb6311a02f5d6800326a7d | 3,217 | py | Python | editor/photo_effects.py | gitgik/photo-editing-app | 730f88a1946d425cbe790cd4ed0689a1938a8cd0 | [
"MIT"
] | 5 | 2017-02-23T14:24:22.000Z | 2021-02-23T03:43:18.000Z | editor/photo_effects.py | gitgik/photo-editing-app | 730f88a1946d425cbe790cd4ed0689a1938a8cd0 | [
"MIT"
] | 1 | 2021-06-08T19:14:01.000Z | 2021-06-08T19:14:01.000Z | editor/photo_effects.py | gitgik/photo-editing-app | 730f88a1946d425cbe790cd4ed0689a1938a8cd0 | [
"MIT"
] | 2 | 2019-01-21T20:16:05.000Z | 2019-06-23T14:30:50.000Z | """Define imports."""
from PIL import ImageFilter, ImageOps, ImageEnhance
def grayscale(image, name, temp_url):
"""Return an image with a contrast of grey."""
image.seek(0)
photo = ImageOps.grayscale(image)
photo.save(temp_url + "GRAYSCALE" + name)
return temp_url + "GRAYSCALE" + name
def smooth(image, name, temp_url):
"""Return a smoothened image."""
image.seek(0)
photo = image.filter(ImageFilter.SMOOTH)
photo.save(temp_url + "SMOOTH" + name)
return temp_url + "SMOOTH" + name
def contour(image, name, temp_url):
"""Return an image with a contour filter."""
image.seek(0)
photo = image.filter(ImageFilter.CONTOUR)
photo.save(temp_url + "CONTOUR" + name)
return temp_url + "CONTOUR" + name
def sharpen(image, name, temp_url):
"""Return a sharpened image."""
image.seek(0)
photo = image.filter(ImageFilter.SHARPEN)
photo.save(temp_url + "SHARPEN" + name)
return temp_url + "SHARPEN" + name
def detail(image, name, temp_url):
"""Return an image with edge enhancement."""
image.seek(0)
photo = image.filter(ImageFilter.EDGE_ENHANCE)
photo.save(temp_url + "DETAIL" + name)
return temp_url + "DETAIL" + name
def flip(image, name, temp_url):
"""Flip an image."""
image.seek(0)
photo = ImageOps.flip(image)
photo.save(temp_url + "FLIP" + name)
return temp_url + "FLIP" + name
def invert(image, name, temp_url):
"""Invert an image."""
image.seek(0)
photo = ImageOps.invert(image)
photo.save(temp_url + "INVERT" + name)
return temp_url + "INVERT" + name
def mirror(image, name, temp_url):
"""Flip the image horizontally."""
image.seek(0)
photo = ImageOps.mirror(image)
photo.save(temp_url + "MIRROR" + name)
return temp_url + "MIRROR" + name
def contrast(image, name, temp_url):
"""Increase the contrast of an image and return the enhanced image."""
image.seek(0)
photo = ImageEnhance.Contrast(image)
photo = photo.enhance(1.5)
photo.save(temp_url + "CONTRAST" + name)
return temp_url + "CONTRAST" + name
def blur(image, name, temp_url):
"""Return a blur image using a gaussian blur filter."""
image.seek(0)
photo = image.filter(
ImageFilter.GaussianBlur(radius=3))
photo.save(temp_url + "BLUR" + name)
return temp_url + "BLUR" + name
def brighten(image, name, temp_url):
"""Return an image with a brightness enhancement factor of 1.5."""
image.seek(0)
photo = ImageEnhance.Brightness(image)
photo = photo.enhance(1.5)
photo.save(temp_url + "BRIGHTEN" + name)
return temp_url + "BRIGHTEN" + name
def darken(image, name, temp_url):
"""Return an image with a brightness enhancement factor of 0.5."""
image.seek(0)
photo = ImageEnhance.Brightness(image)
photo = photo.enhance(0.5)
photo.save(temp_url + "SATURATE" + name)
return temp_url + "SATURATE" + name
def saturate(image, name, temp_url):
"""Return an image with a saturation enhancement factor of 2.0 ."""
image.seek(0)
photo = ImageEnhance.Color(image)
photo = photo.enhance(2.0)
photo.save(temp_url + "SATURATE" + name)
return temp_url + "SATURATE" + name
| 28.723214 | 74 | 0.658688 | from PIL import ImageFilter, ImageOps, ImageEnhance
def grayscale(image, name, temp_url):
image.seek(0)
photo = ImageOps.grayscale(image)
photo.save(temp_url + "GRAYSCALE" + name)
return temp_url + "GRAYSCALE" + name
def smooth(image, name, temp_url):
image.seek(0)
photo = image.filter(ImageFilter.SMOOTH)
photo.save(temp_url + "SMOOTH" + name)
return temp_url + "SMOOTH" + name
def contour(image, name, temp_url):
image.seek(0)
photo = image.filter(ImageFilter.CONTOUR)
photo.save(temp_url + "CONTOUR" + name)
return temp_url + "CONTOUR" + name
def sharpen(image, name, temp_url):
image.seek(0)
photo = image.filter(ImageFilter.SHARPEN)
photo.save(temp_url + "SHARPEN" + name)
return temp_url + "SHARPEN" + name
def detail(image, name, temp_url):
image.seek(0)
photo = image.filter(ImageFilter.EDGE_ENHANCE)
photo.save(temp_url + "DETAIL" + name)
return temp_url + "DETAIL" + name
def flip(image, name, temp_url):
image.seek(0)
photo = ImageOps.flip(image)
photo.save(temp_url + "FLIP" + name)
return temp_url + "FLIP" + name
def invert(image, name, temp_url):
image.seek(0)
photo = ImageOps.invert(image)
photo.save(temp_url + "INVERT" + name)
return temp_url + "INVERT" + name
def mirror(image, name, temp_url):
image.seek(0)
photo = ImageOps.mirror(image)
photo.save(temp_url + "MIRROR" + name)
return temp_url + "MIRROR" + name
def contrast(image, name, temp_url):
image.seek(0)
photo = ImageEnhance.Contrast(image)
photo = photo.enhance(1.5)
photo.save(temp_url + "CONTRAST" + name)
return temp_url + "CONTRAST" + name
def blur(image, name, temp_url):
image.seek(0)
photo = image.filter(
ImageFilter.GaussianBlur(radius=3))
photo.save(temp_url + "BLUR" + name)
return temp_url + "BLUR" + name
def brighten(image, name, temp_url):
image.seek(0)
photo = ImageEnhance.Brightness(image)
photo = photo.enhance(1.5)
photo.save(temp_url + "BRIGHTEN" + name)
return temp_url + "BRIGHTEN" + name
def darken(image, name, temp_url):
image.seek(0)
photo = ImageEnhance.Brightness(image)
photo = photo.enhance(0.5)
photo.save(temp_url + "SATURATE" + name)
return temp_url + "SATURATE" + name
def saturate(image, name, temp_url):
image.seek(0)
photo = ImageEnhance.Color(image)
photo = photo.enhance(2.0)
photo.save(temp_url + "SATURATE" + name)
return temp_url + "SATURATE" + name
| true | true |
f72fe3a6e22942dfdabf42624c7f630b6ceb120b | 610 | py | Python | eveonline-assistant/plans/urls.py | wengole/eveonline-assistant | 35041952509bd347c5c9458630404726d7ddd5d8 | [
"BSD-3-Clause"
] | 1 | 2016-07-01T03:15:16.000Z | 2016-07-01T03:15:16.000Z | eveonline-assistant/plans/urls.py | wengole/eveonline-assistant | 35041952509bd347c5c9458630404726d7ddd5d8 | [
"BSD-3-Clause"
] | null | null | null | eveonline-assistant/plans/urls.py | wengole/eveonline-assistant | 35041952509bd347c5c9458630404726d7ddd5d8 | [
"BSD-3-Clause"
] | null | null | null | from django.conf.urls import patterns, url
from . import views
urlpatterns = patterns(
'',
# URL pattern for the UserListView
url(
regex=r'^add/$',
view=views.AddPlan.as_view(),
name='add'
),
url(
regex=r'^manage/$',
view=views.ManagePlans.as_view(),
name='manage'
),
url(
regex=r'^manage/(?P<plan_id>\d+)/$',
view=views.PlanDetail.as_view(pk_url_kwarg='plan_id'),
name='detail'
),
url(
regex=r'^addToPlan/$',
view=views.AddSkillToPlan.as_view(),
name='add_to_plan'
),
)
| 20.333333 | 62 | 0.545902 | from django.conf.urls import patterns, url
from . import views
urlpatterns = patterns(
'',
url(
regex=r'^add/$',
view=views.AddPlan.as_view(),
name='add'
),
url(
regex=r'^manage/$',
view=views.ManagePlans.as_view(),
name='manage'
),
url(
regex=r'^manage/(?P<plan_id>\d+)/$',
view=views.PlanDetail.as_view(pk_url_kwarg='plan_id'),
name='detail'
),
url(
regex=r'^addToPlan/$',
view=views.AddSkillToPlan.as_view(),
name='add_to_plan'
),
)
| true | true |
f72fe3eae0d57d1739f0d017bc8c4f227f8e08ed | 11,579 | py | Python | asdf/util.py | eteq/asdf | 6d9e0e48bbffea166a19b71e29f5f9c211983bfe | [
"BSD-2-Clause",
"BSD-3-Clause"
] | null | null | null | asdf/util.py | eteq/asdf | 6d9e0e48bbffea166a19b71e29f5f9c211983bfe | [
"BSD-2-Clause",
"BSD-3-Clause"
] | null | null | null | asdf/util.py | eteq/asdf | 6d9e0e48bbffea166a19b71e29f5f9c211983bfe | [
"BSD-2-Clause",
"BSD-3-Clause"
] | null | null | null | # Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
import inspect
import math
import struct
import types
from urllib.parse import urljoin
from urllib.request import pathname2url
from urllib import parse as urlparse
import numpy as np
from .extern.decorators import add_common_docstring
__all__ = ['human_list', 'get_array_base', 'get_base_uri', 'filepath_to_url',
'iter_subclasses', 'calculate_padding', 'resolve_name']
def human_list(l, separator="and"):
"""
Formats a list for human readability.
Parameters
----------
l : sequence
A sequence of strings
separator : string, optional
The word to use between the last two entries. Default:
``"and"``.
Returns
-------
formatted_list : string
Examples
--------
>>> human_list(["vanilla", "strawberry", "chocolate"], "or")
'vanilla, strawberry or chocolate'
"""
if len(l) == 1:
return l[0]
else:
return ', '.join(l[:-1]) + ' ' + separator + ' ' + l[-1]
def get_array_base(arr):
"""
For a given Numpy array, finds the base array that "owns" the
actual data.
"""
base = arr
while isinstance(base.base, np.ndarray):
base = base.base
return base
def get_base_uri(uri):
"""
For a given URI, return the part without any fragment.
"""
parts = urlparse.urlparse(uri)
return urlparse.urlunparse(list(parts[:5]) + [''])
def filepath_to_url(path):
"""
For a given local file path, return a file:// url.
"""
return urljoin('file:', pathname2url(path))
def iter_subclasses(cls):
"""
Returns all subclasses of a class.
"""
for x in cls.__subclasses__():
yield x
for y in iter_subclasses(x):
yield y
def calculate_padding(content_size, pad_blocks, block_size):
"""
Calculates the amount of extra space to add to a block given the
user's request for the amount of extra space. Care is given so
that the total of size of the block with padding is evenly
divisible by block size.
Parameters
----------
content_size : int
The size of the actual content
pad_blocks : float or bool
If `False`, add no padding (always return 0). If `True`, add
a default amount of padding of 10% If a float, it is a factor
to multiple content_size by to get the new total size.
block_size : int
The filesystem block size to use.
Returns
-------
nbytes : int
The number of extra bytes to add for padding.
"""
if not pad_blocks:
return 0
if pad_blocks is True:
pad_blocks = 1.1
new_size = content_size * pad_blocks
new_size = int((math.ceil(
float(new_size) / block_size) + 1) * block_size)
return max(new_size - content_size, 0)
class BinaryStruct(object):
"""
A wrapper around the Python stdlib struct module to define a
binary struct more like a dictionary than a tuple.
"""
def __init__(self, descr, endian='>'):
"""
Parameters
----------
descr : list of tuple
Each entry is a pair ``(name, format)``, where ``format``
is one of the format types understood by `struct`.
endian : str, optional
The endianness of the struct. Must be ``>`` or ``<``.
"""
self._fmt = [endian]
self._offsets = {}
self._names = []
i = 0
for name, fmt in descr:
self._fmt.append(fmt)
self._offsets[name] = (i, (endian + fmt).encode('ascii'))
self._names.append(name)
i += struct.calcsize(fmt.encode('ascii'))
self._fmt = ''.join(self._fmt).encode('ascii')
self._size = struct.calcsize(self._fmt)
@property
def size(self):
"""
Return the size of the struct.
"""
return self._size
def pack(self, **kwargs):
"""
Pack the given arguments, which are given as kwargs, and
return the binary struct.
"""
fields = [0] * len(self._names)
for key, val in kwargs.items():
if key not in self._offsets:
raise KeyError("No header field '{0}'".format(key))
i = self._names.index(key)
fields[i] = val
return struct.pack(self._fmt, *fields)
def unpack(self, buff):
"""
Unpack the given binary buffer into the fields. The result
is a dictionary mapping field names to values.
"""
args = struct.unpack_from(self._fmt, buff[:self._size])
return dict(zip(self._names, args))
def update(self, fd, **kwargs):
"""
Update part of the struct in-place.
Parameters
----------
fd : generic_io.GenericIO instance
A writable, seekable file descriptor, currently seeked
to the beginning of the struct.
**kwargs : values
The values to update on the struct.
"""
updates = []
for key, val in kwargs.items():
if key not in self._offsets:
raise KeyError("No header field '{0}'".format(key))
updates.append((self._offsets[key], val))
updates.sort()
start = fd.tell()
for ((offset, datatype), val) in updates:
fd.seek(start + offset)
fd.write(struct.pack(datatype, val))
class HashableDict(dict):
"""
A simple wrapper around dict to make it hashable.
This is sure to be slow, but for small dictionaries it shouldn't
matter.
"""
def __hash__(self):
return hash(frozenset(self.items()))
def resolve_name(name):
"""Resolve a name like ``module.object`` to an object and return it.
This ends up working like ``from module import object`` but is easier
to deal with than the `__import__` builtin and supports digging into
submodules.
Parameters
----------
name : `str`
A dotted path to a Python object--that is, the name of a function,
class, or other object in a module with the full path to that module,
including parent modules, separated by dots. Also known as the fully
qualified name of the object.
Examples
--------
>>> resolve_name('asdf.util.resolve_name')
<function resolve_name at 0x...>
Raises
------
`ImportError`
If the module or named object is not found.
"""
# Note: On python 2 these must be str objects and not unicode
parts = [str(part) for part in name.split('.')]
if len(parts) == 1:
# No dots in the name--just a straight up module import
cursor = 1
attr_name = str('') # Must not be unicode on Python 2
else:
cursor = len(parts) - 1
attr_name = parts[-1]
module_name = parts[:cursor]
while cursor > 0:
try:
ret = __import__(str('.'.join(module_name)), fromlist=[attr_name])
break
except ImportError:
if cursor == 0:
raise
cursor -= 1
module_name = parts[:cursor]
attr_name = parts[cursor]
ret = ''
for part in parts[cursor:]:
try:
ret = getattr(ret, part)
except AttributeError:
raise ImportError(name)
return ret
def get_class_name(obj, instance=True):
"""
Given a class or instance of a class, returns a string representing the
fully specified path of the class.
Parameters
----------
obj : object
An instance of any object
instance: bool
Indicates whether given object is an instance of the class to be named
"""
typ = type(obj) if instance else obj
return "{}.{}".format(typ.__module__, typ.__name__)
def minversion(module, version, inclusive=True, version_path='__version__'):
"""
Returns `True` if the specified Python module satisfies a minimum version
requirement, and `False` if not.
By default this uses `pkg_resources.parse_version` to do the version
comparison if available. Otherwise it falls back on
`distutils.version.LooseVersion`.
Parameters
----------
module : module or `str`
An imported module of which to check the version, or the name of
that module (in which case an import of that module is attempted--
if this fails `False` is returned).
version : `str`
The version as a string that this module must have at a minimum (e.g.
``'0.12'``).
inclusive : `bool`
The specified version meets the requirement inclusively (i.e. ``>=``)
as opposed to strictly greater than (default: `True`).
version_path : `str`
A dotted attribute path to follow in the module for the version.
Defaults to just ``'__version__'``, which should work for most Python
modules.
"""
if isinstance(module, types.ModuleType):
module_name = module.__name__
elif isinstance(module, str):
module_name = module
try:
module = resolve_name(module_name)
except ImportError:
return False
else:
raise ValueError('module argument must be an actual imported '
'module, or the import name of the module; '
'got {0!r}'.format(module))
if '.' not in version_path:
have_version = getattr(module, version_path)
else:
have_version = resolve_name('.'.join([module.__name__, version_path]))
try:
from pkg_resources import parse_version
except ImportError:
from distutils.version import LooseVersion as parse_version
if inclusive:
return parse_version(have_version) >= parse_version(version)
else:
return parse_version(have_version) > parse_version(version)
class InheritDocstrings(type):
"""
This metaclass makes methods of a class automatically have their
docstrings filled in from the methods they override in the base
class.
If the class uses multiple inheritance, the docstring will be
chosen from the first class in the bases list, in the same way as
methods are normally resolved in Python. If this results in
selecting the wrong docstring, the docstring will need to be
explicitly included on the method.
For example::
>>> from asdf.util import InheritDocstrings
>>> import six
>>> @six.add_metaclass(InheritDocstrings)
... class A(object):
... def wiggle(self):
... "Wiggle the thingamajig"
... pass
>>> class B(A):
... def wiggle(self):
... pass
>>> B.wiggle.__doc__
u'Wiggle the thingamajig'
"""
def __init__(cls, name, bases, dct):
def is_public_member(key):
return (
(key.startswith('__') and key.endswith('__')
and len(key) > 4) or
not key.startswith('_'))
for key, val in dct.items():
if (inspect.isfunction(val) and
is_public_member(key) and
val.__doc__ is None):
for base in cls.__mro__[1:]:
super_method = getattr(base, key, None)
if super_method is not None:
val.__doc__ = super_method.__doc__
break
super(InheritDocstrings, cls).__init__(name, bases, dct)
| 28.9475 | 78 | 0.59297 |
import inspect
import math
import struct
import types
from urllib.parse import urljoin
from urllib.request import pathname2url
from urllib import parse as urlparse
import numpy as np
from .extern.decorators import add_common_docstring
__all__ = ['human_list', 'get_array_base', 'get_base_uri', 'filepath_to_url',
'iter_subclasses', 'calculate_padding', 'resolve_name']
def human_list(l, separator="and"):
if len(l) == 1:
return l[0]
else:
return ', '.join(l[:-1]) + ' ' + separator + ' ' + l[-1]
def get_array_base(arr):
base = arr
while isinstance(base.base, np.ndarray):
base = base.base
return base
def get_base_uri(uri):
parts = urlparse.urlparse(uri)
return urlparse.urlunparse(list(parts[:5]) + [''])
def filepath_to_url(path):
return urljoin('file:', pathname2url(path))
def iter_subclasses(cls):
for x in cls.__subclasses__():
yield x
for y in iter_subclasses(x):
yield y
def calculate_padding(content_size, pad_blocks, block_size):
if not pad_blocks:
return 0
if pad_blocks is True:
pad_blocks = 1.1
new_size = content_size * pad_blocks
new_size = int((math.ceil(
float(new_size) / block_size) + 1) * block_size)
return max(new_size - content_size, 0)
class BinaryStruct(object):
def __init__(self, descr, endian='>'):
self._fmt = [endian]
self._offsets = {}
self._names = []
i = 0
for name, fmt in descr:
self._fmt.append(fmt)
self._offsets[name] = (i, (endian + fmt).encode('ascii'))
self._names.append(name)
i += struct.calcsize(fmt.encode('ascii'))
self._fmt = ''.join(self._fmt).encode('ascii')
self._size = struct.calcsize(self._fmt)
@property
def size(self):
return self._size
def pack(self, **kwargs):
fields = [0] * len(self._names)
for key, val in kwargs.items():
if key not in self._offsets:
raise KeyError("No header field '{0}'".format(key))
i = self._names.index(key)
fields[i] = val
return struct.pack(self._fmt, *fields)
def unpack(self, buff):
args = struct.unpack_from(self._fmt, buff[:self._size])
return dict(zip(self._names, args))
def update(self, fd, **kwargs):
updates = []
for key, val in kwargs.items():
if key not in self._offsets:
raise KeyError("No header field '{0}'".format(key))
updates.append((self._offsets[key], val))
updates.sort()
start = fd.tell()
for ((offset, datatype), val) in updates:
fd.seek(start + offset)
fd.write(struct.pack(datatype, val))
class HashableDict(dict):
def __hash__(self):
return hash(frozenset(self.items()))
def resolve_name(name):
parts = [str(part) for part in name.split('.')]
if len(parts) == 1:
cursor = 1
attr_name = str('')
else:
cursor = len(parts) - 1
attr_name = parts[-1]
module_name = parts[:cursor]
while cursor > 0:
try:
ret = __import__(str('.'.join(module_name)), fromlist=[attr_name])
break
except ImportError:
if cursor == 0:
raise
cursor -= 1
module_name = parts[:cursor]
attr_name = parts[cursor]
ret = ''
for part in parts[cursor:]:
try:
ret = getattr(ret, part)
except AttributeError:
raise ImportError(name)
return ret
def get_class_name(obj, instance=True):
typ = type(obj) if instance else obj
return "{}.{}".format(typ.__module__, typ.__name__)
def minversion(module, version, inclusive=True, version_path='__version__'):
if isinstance(module, types.ModuleType):
module_name = module.__name__
elif isinstance(module, str):
module_name = module
try:
module = resolve_name(module_name)
except ImportError:
return False
else:
raise ValueError('module argument must be an actual imported '
'module, or the import name of the module; '
'got {0!r}'.format(module))
if '.' not in version_path:
have_version = getattr(module, version_path)
else:
have_version = resolve_name('.'.join([module.__name__, version_path]))
try:
from pkg_resources import parse_version
except ImportError:
from distutils.version import LooseVersion as parse_version
if inclusive:
return parse_version(have_version) >= parse_version(version)
else:
return parse_version(have_version) > parse_version(version)
class InheritDocstrings(type):
def __init__(cls, name, bases, dct):
def is_public_member(key):
return (
(key.startswith('__') and key.endswith('__')
and len(key) > 4) or
not key.startswith('_'))
for key, val in dct.items():
if (inspect.isfunction(val) and
is_public_member(key) and
val.__doc__ is None):
for base in cls.__mro__[1:]:
super_method = getattr(base, key, None)
if super_method is not None:
val.__doc__ = super_method.__doc__
break
super(InheritDocstrings, cls).__init__(name, bases, dct)
| true | true |
f72fe510b547f529b3a5626defad1371dfcbc75e | 16,658 | py | Python | wbgapi/data.py | mo-cmyk/wbgapi | a0f8658b7a74ec79256d7b66ff58cb95726e89aa | [
"MIT"
] | 41 | 2020-01-29T17:39:50.000Z | 2022-03-31T00:21:52.000Z | wbgapi/data.py | mo-cmyk/wbgapi | a0f8658b7a74ec79256d7b66ff58cb95726e89aa | [
"MIT"
] | 18 | 2020-01-03T06:43:43.000Z | 2022-02-19T13:09:21.000Z | wbgapi/data.py | mo-cmyk/wbgapi | a0f8658b7a74ec79256d7b66ff58cb95726e89aa | [
"MIT"
] | 7 | 2021-03-24T15:41:09.000Z | 2022-03-21T21:26:25.000Z |
'''Access World Bank API data
'''
import wbgapi as w
try:
import numpy as np
import pandas as pd
except ImportError:
np = None
pd = None
def fetch(series, economy='all', time='all', mrv=None, mrnev=None, skipBlanks=False, labels=False, skipAggs=False, numericTimeKeys=False, params={}, db=None, **dimensions):
'''Retrieve rows of data for the current database
Arguments:
series: a series identifier or list-like, e.g., SP.POP.TOTL
economy: an economy identifier or list-like, e.g., 'BRA' or ['USA', 'CAN', 'MEX']
time: a time identifier or list-like, e.g., 'YR2015' or range(2010,2020).
Both element keys and values are acceptable
mrv: return only the specified number of most recent values (same time period for all economies)
mrnev: return only the specified number of non-empty most recent values (time period varies)
skipBlanks: skip empty observations
labels: include both dimension id and name (e.g., ZWE & Zimbabwe, not just ZWE)
skipAggs: skip aggregates
numericTimeKeys: store the time object by value (e.g., 2014) instead of key ('YR2014') if value is numeric
params: extra query parameters to pass to the API
dimensions: extra dimensions, database specific (e.g., version)
Returns:
A generator object
Examples:
# print name and population of all economies for all available years
for elem in wbgapi.data.fetch('SP.POP.TOTL',labels=True):
print(elem['economy']['value'], elem['time']['value'], elem['value'])
# fetch data for Brazil for odd-numbered years
for elem in wbgapi.data.fetch('NY.GDP.PCAP.CD', 'BRA', range(2011,2020,2)):
print(elem['value'])
# most recent poverty rates for all LAC countries
for elem in wbgapi.data.fetch('SI.POV.NAHC', economy=wb.region.members('LAC'), mrnev=1):
print(elem['economy'], elem['time'], elem['value'])
# dict of most recent population data for economies over 100000
popData = {i['economy']: i['value'] for i in wbgapi.data.fetch('SP.POP.TOTL', mrnev=1, skipAggs=True) if i['value'] > 100000}
'''
if db is None:
db = w.db
concepts = w.source.concepts(db)
concept_keys = {v['key']: k for k,v in concepts.items()}
params_ = {}
params_.update(params)
if mrv:
params_['mrv'] = mrv
elif mrnev:
params_['mrnev'] = mrnev
# you can thus pass series, economy, and time in the dimensions array, and those will overwrite the explicit parameters
dimensions_ = {'series': series, 'economy': economy, 'time': time}
dimensions_.update(dimensions)
url = 'sources/{}'.format(db)
keys = ['series', 'economy', 'time']
values = {}
for k,v in dimensions_.items():
if k not in concepts:
raise KeyError('{} is not a concept in database {}'.format(k, db))
if k not in keys:
keys.append(k)
url += '/{}/{}'.format(concepts[k]['key'], '{' + k + '}')
values[k] = w.queryParam(v, concept=k, db=db)
aggs = w.economy.aggregates()
for row in w.refetch(url, keys, params=params_, **values):
if skipBlanks and row['value'] is None:
continue
skip = False
x = {'value': row['value']}
for elem in row['variable']:
key = concept_keys[elem['concept'].lower()]
if key == 'economy' and skipAggs and elem['id'] in aggs:
skip = True
break
if not skip:
if labels:
del(elem['concept'])
x[key] = elem
if key == 'economy':
x[key]['aggregate'] = elem['id'] in aggs
elif key == 'time' and numericTimeKeys and elem['value'].isdigit():
x[key]['id'] = int(elem['value'])
else:
x[key] = elem['id']
if key == 'economy':
x['aggregate'] = elem['id'] in aggs
elif key == 'time' and numericTimeKeys and elem['value'].isdigit():
x[key] = int(elem['value'])
if not skip:
yield x
def FlatFrame(series, economy='all', time='all', mrv=None, mrnev=None, skipBlanks=False, labels=False, skipAggs=False, params={}, db=None, **dimensions):
'''Retrieve a flat pandas dataframe (1 row per observation)
Arguments:
series: a series identifier or list-like, e.g., SP.POP.TOTL
economy: an economy identifier or list-like, e.g., 'BRA' or ['USA', 'CAN', 'MEX']
time: a time identifier or list-like, e.g., 'YR2015' or range(2010,2020).
Both element keys and values are acceptable
mrv: return only the specified number of most recent values (same time period for all economies)
mrnev: return only the specified number of non-empty most recent values (time period varies)
skipBlanks: skip empty observations
labels: return the dimension name instead of the identifier
skipAggs: skip aggregates
params: extra query parameters to pass to the API
dimensions: extra dimensions, database specific (e.g., version)
Returns:
a pandas DataFrame
Notes:
values in the time column are numeric if possible (2015 not 'YR2015')
'''
if pd is None:
raise ModuleNotFoundError('you must install pandas to use this feature')
key = 'value' if labels else 'id'
df = None
# we set numericTimeKeys=True so that time values will always be numeric if possible
for row in fetch(series, economy, time, mrv=mrv, mrnev=mrnev, skipBlanks=skipBlanks, labels=True, numericTimeKeys=True, skipAggs=skipAggs, params=params, db=db, **dimensions):
if df is None:
# this assumes that the API returns the same object structure in every row, so we can use the first as a template
columns = row.keys()
df = pd.DataFrame(columns=columns)
df.loc[len(df)] = [row[i][key] if type(row[i]) is dict else row[i] for i in columns]
return df
def DataFrame(series, economy='all', time='all', index=None, columns=None, mrv=None, mrnev=None, skipBlanks=False, labels=False, skipAggs=False, numericTimeKeys=False, timeColumns=False, params={}, db=None, **dimensions):
'''Retrieve a 2-dimensional pandas dataframe.
Arguments:
series: a series identifier or list-like, e.g., SP.POP.TOTL
economy: an economy identifier or list-like, e.g., 'BRA' or ['USA', 'CAN', 'MEX']
time: a time identifier or list-like, e.g., 'YR2015' or range(2010,2020).
Both element keys and values are acceptable
index: name or list of dimensions for the DataFrame's index, e.g., 'economy'. If None then the function
will define the index based on your request. Note: to get a dataframe with no index
(i.e., 0-based integers) call `reset_index()` with on the return value of this function.
columns: name of the dimension for the DataFrame's columns, e.g., 'series'. If None then the function
will define columns based on your request.
mrv: return only the specified number of most recent values (same time period for all economies)
mrnev: return only the specified number of non-empty most recent values (time period varies)
skipBlanks: skip empty observations
labels: include the dimension name for rows
skipAggs: skip aggregates
numericTimeKeys: store the time object by value (e.g., 2014) instead of key ('YR2014') if value is numeric
timeColumns: add extra columns to show the time dimension for each series/economy
If 'auto' then the function will guess based on other parameters
params: extra query parameters to pass to the API
dimensions: extra dimensions, database specific (e.g., version)
Returns:
a pandas DataFrame
Examples:
# 5 years of population data (with economy names)
wbgapi.data.DataFrame('SP.POP.TOTL', time=range(2010,2020),labels=True)
# Most recent poverty and income data for LAC
wbgapi.data.DataFrame(['SI.POV.NAHC', 'NY.GDP.PCAP.CD'], economy=wb.region.members('LAC'),mrnev=1,timeColumns=True)
# Fetch most recent CO2 emissions for each country and merge its income group
wbgapi.data.DataFrame('EN.ATM.CO2E.PC',mrnev=1).join(wbgapi.economy.DataFrame()['incomeLevel'])
# Top 10 emitters per capita
wbgapi.data.DataFrame('EN.ATM.CO2E.PC',mrnev=1,labels=True).sort_values('EN.ATM.CO2E.PC',ascending=False).head(10)
Notes:
timeColumns currently defaults to False so that the default column composition is consistent. This may change to 'auto'
at some point, so that mrv behavior is more intuitive for data discovery
'''
def frame(index):
if len(index) > 1:
i = [[]] * len(index)
return pd.DataFrame(index=pd.MultiIndex(levels=i, codes=i, names=tuple(index)))
df = pd.DataFrame()
df.index.name = index[0]
return df
def is_single(x):
if type(x) is str:
if x == 'all':
return False
elif x == 'mrv':
return True
# not necessary to pass db since we don't actually care about the parameters just the count of them
return len(w.queryParam(x).split(';')) == 1
if pd is None:
raise ModuleNotFoundError('you must install pandas to use this feature')
# set up the axes by looking at the index/column parameters
concepts = ['economy','series','time']
for k,v in w.source.concepts(db).items():
if k not in concepts:
concepts.insert(0, k)
if type(index) is str:
index = [index]
if index is None or columns is None:
# we need to infer at least one dimension
dimensions_ = {'series': series, 'economy': economy, 'time': time}
dimensions_.update(dimensions)
axes = concepts.copy()
# now we reduce axes by eliminating any dimension consisting of
# one element not defined in the calling parameters, with a stop
# if we reduce to 2 dimensions
x = concepts.copy()
x.reverse()
for k in x:
if len(axes) == 2:
break
if k == columns or (type(index) is list and k in index):
continue
values = dimensions_.get(k, 'all')
if k == 'time' and (mrv == 1 or mrnev == 1 or is_single(values)):
axes.remove(k)
if timeColumns == 'auto' and (mrv == 1 or mrnev == 1):
timeColumns = True
elif is_single(values):
axes.remove(k)
if columns is None and index is None:
columns = axes.pop(-1)
index = axes
elif columns is None:
# try to guess a column based on what index doesn't define
x = list(filter(lambda x: x not in index, axes))
if len(x) > 0:
columns = x[-1]
elif (set(concepts) - set(list)) > 0:
# index has claimed all non-singular dimensions, so set columns from the full concepts list
x = list(filter(lambda x: x not in index, concepts))
columns = x[-1]
else:
# index is the same as the concepts list. That's not allowed
raise ValueError('one dimension must be a column')
elif index is None:
axes.remove(columns)
index = axes
# sanity checks
if type(columns) is not str or columns not in concepts:
raise ValueError('columns must be None or a dimension')
if type(index) is not list or len(set(index) - set(concepts)) > 0:
raise ValueError('index must be None or a dimension list')
if columns in index:
raise ValueError('columns cannot be an element in index')
if columns == 'time' or 'time' in index or timeColumns == 'auto':
timeColumns = False
# for now let's see if it works to build the dataframe dynamically
df = frame(index)
dummy = pd.Series() # empty series - never assigned actual values
ts_suffix = ':T'
concepts = w.source.concepts(db)
if labels:
# create a separate dataframe for labels so that we can control the column position below
df2 = frame(index)
for row in fetch(series, economy, time, mrv=mrv, mrnev=mrnev, skipBlanks=skipBlanks, labels=True, skipAggs=skipAggs, numericTimeKeys=numericTimeKeys, params=params, db=db, **dimensions):
column_key = row[columns]['id']
if len(index) == 1:
index_key = row[index[0]]['id']
else:
index_key = tuple(map(lambda x: row[x]['id'], index))
# this logic only assigns values to locations that don't yet exist. First observations thus take precedent over subsequent ones
if pd.isna(df.get(column_key, dummy).get(index_key)):
df.loc[index_key, column_key] = np.nan if row['value'] is None else row['value']
if timeColumns:
df.loc[index_key, column_key + ts_suffix] = row['time']['value']
if labels:
for i in index:
df2.loc[index_key, concepts[i]['value']] = row[i]['value']
df.sort_index(axis=0,inplace=True)
df.sort_index(axis=1,inplace=True)
if labels:
return df2.join(df)
# return pd.concat([df2,df], axis=1, sort=False)
return df
def get(series, economy, time='all', mrv=None, mrnev=None, labels=False, numericTimeKeys=False, db=None, **dimensions):
'''Retrieve a single data point for the current database
Arguments:
series: a series identifier
economy: an economy identifier
time: a time identifier. Both element keys and values are acceptable
mrv: return only the specified number of most recent values (same time period for all economies)
mrnev: return only the specified number of non-empty most recent values (time period varies)
labels: include both dimension id and name (e.g., ZWE & Zimbabwe, not just ZWE)
numericTimeKeys: store the time object by value (e.g., 2014) instead of key ('YR2014') if value is numeric
dimensions: extra dimensions, database specific (e.g., version)
Returns:
a data observation
Notes:
This function simply calls fetch() and returns the first result. Hence, you should set mrv or mrnev to 1, or set
time to a single value to get predictable results.
Example:
# print the last population estimate for France
print(wbgapi.data.get('SP.POP.TOTL', 'FRA', mrnev=1)['value'])
'''
for row in fetch(series, economy, time, mrv=mrv, mrnev=mrnev, labels=labels, numericTimeKeys=numericTimeKeys, params={'per_page': 1}, db=db, **dimensions):
return row
def footnote(series, economy, time, db=None):
'''Return the footnote for a single data point, if any
Arguments:
series: a series identifier
economy: an economy identifier
time: a time identifier. Both element keys and values are acceptable
Returns:
footnote text, or None
Example:
print(wbgapi.data.footnote('SP.POP.TOTL', 'FRA', 2015))
'''
if db is None:
db = w.db
# note that this only supports singular footnote references at this point, although the interface suggests otherwise
url = 'sources/{source}/footnote/{economy}~{series}~{time}/metadata'
try:
for row in w.metadata(url, ['series'], source=db, series=series, economy=economy, time=w.queryParam(time, 'time', db=db)):
return row.metadata['FootNote']
except:
pass # will return None then
| 39.380615 | 221 | 0.593228 |
import wbgapi as w
try:
import numpy as np
import pandas as pd
except ImportError:
np = None
pd = None
def fetch(series, economy='all', time='all', mrv=None, mrnev=None, skipBlanks=False, labels=False, skipAggs=False, numericTimeKeys=False, params={}, db=None, **dimensions):
if db is None:
db = w.db
concepts = w.source.concepts(db)
concept_keys = {v['key']: k for k,v in concepts.items()}
params_ = {}
params_.update(params)
if mrv:
params_['mrv'] = mrv
elif mrnev:
params_['mrnev'] = mrnev
dimensions_ = {'series': series, 'economy': economy, 'time': time}
dimensions_.update(dimensions)
url = 'sources/{}'.format(db)
keys = ['series', 'economy', 'time']
values = {}
for k,v in dimensions_.items():
if k not in concepts:
raise KeyError('{} is not a concept in database {}'.format(k, db))
if k not in keys:
keys.append(k)
url += '/{}/{}'.format(concepts[k]['key'], '{' + k + '}')
values[k] = w.queryParam(v, concept=k, db=db)
aggs = w.economy.aggregates()
for row in w.refetch(url, keys, params=params_, **values):
if skipBlanks and row['value'] is None:
continue
skip = False
x = {'value': row['value']}
for elem in row['variable']:
key = concept_keys[elem['concept'].lower()]
if key == 'economy' and skipAggs and elem['id'] in aggs:
skip = True
break
if not skip:
if labels:
del(elem['concept'])
x[key] = elem
if key == 'economy':
x[key]['aggregate'] = elem['id'] in aggs
elif key == 'time' and numericTimeKeys and elem['value'].isdigit():
x[key]['id'] = int(elem['value'])
else:
x[key] = elem['id']
if key == 'economy':
x['aggregate'] = elem['id'] in aggs
elif key == 'time' and numericTimeKeys and elem['value'].isdigit():
x[key] = int(elem['value'])
if not skip:
yield x
def FlatFrame(series, economy='all', time='all', mrv=None, mrnev=None, skipBlanks=False, labels=False, skipAggs=False, params={}, db=None, **dimensions):
if pd is None:
raise ModuleNotFoundError('you must install pandas to use this feature')
key = 'value' if labels else 'id'
df = None
for row in fetch(series, economy, time, mrv=mrv, mrnev=mrnev, skipBlanks=skipBlanks, labels=True, numericTimeKeys=True, skipAggs=skipAggs, params=params, db=db, **dimensions):
if df is None:
columns = row.keys()
df = pd.DataFrame(columns=columns)
df.loc[len(df)] = [row[i][key] if type(row[i]) is dict else row[i] for i in columns]
return df
def DataFrame(series, economy='all', time='all', index=None, columns=None, mrv=None, mrnev=None, skipBlanks=False, labels=False, skipAggs=False, numericTimeKeys=False, timeColumns=False, params={}, db=None, **dimensions):
def frame(index):
if len(index) > 1:
i = [[]] * len(index)
return pd.DataFrame(index=pd.MultiIndex(levels=i, codes=i, names=tuple(index)))
df = pd.DataFrame()
df.index.name = index[0]
return df
def is_single(x):
if type(x) is str:
if x == 'all':
return False
elif x == 'mrv':
return True
return len(w.queryParam(x).split(';')) == 1
if pd is None:
raise ModuleNotFoundError('you must install pandas to use this feature')
# set up the axes by looking at the index/column parameters
concepts = ['economy','series','time']
for k,v in w.source.concepts(db).items():
if k not in concepts:
concepts.insert(0, k)
if type(index) is str:
index = [index]
if index is None or columns is None:
# we need to infer at least one dimension
dimensions_ = {'series': series, 'economy': economy, 'time': time}
dimensions_.update(dimensions)
axes = concepts.copy()
# now we reduce axes by eliminating any dimension consisting of
# one element not defined in the calling parameters, with a stop
# if we reduce to 2 dimensions
x = concepts.copy()
x.reverse()
for k in x:
if len(axes) == 2:
break
if k == columns or (type(index) is list and k in index):
continue
values = dimensions_.get(k, 'all')
if k == 'time' and (mrv == 1 or mrnev == 1 or is_single(values)):
axes.remove(k)
if timeColumns == 'auto' and (mrv == 1 or mrnev == 1):
timeColumns = True
elif is_single(values):
axes.remove(k)
if columns is None and index is None:
columns = axes.pop(-1)
index = axes
elif columns is None:
# try to guess a column based on what index doesn't define
x = list(filter(lambda x: x not in index, axes))
if len(x) > 0:
columns = x[-1]
elif (set(concepts) - set(list)) > 0:
x = list(filter(lambda x: x not in index, concepts))
columns = x[-1]
else:
raise ValueError('one dimension must be a column')
elif index is None:
axes.remove(columns)
index = axes
# sanity checks
if type(columns) is not str or columns not in concepts:
raise ValueError('columns must be None or a dimension')
if type(index) is not list or len(set(index) - set(concepts)) > 0:
raise ValueError('index must be None or a dimension list')
if columns in index:
raise ValueError('columns cannot be an element in index')
if columns == 'time' or 'time' in index or timeColumns == 'auto':
timeColumns = False
# for now let's see if it works to build the dataframe dynamically
df = frame(index)
dummy = pd.Series()
ts_suffix = ':T'
concepts = w.source.concepts(db)
if labels:
df2 = frame(index)
for row in fetch(series, economy, time, mrv=mrv, mrnev=mrnev, skipBlanks=skipBlanks, labels=True, skipAggs=skipAggs, numericTimeKeys=numericTimeKeys, params=params, db=db, **dimensions):
column_key = row[columns]['id']
if len(index) == 1:
index_key = row[index[0]]['id']
else:
index_key = tuple(map(lambda x: row[x]['id'], index))
if pd.isna(df.get(column_key, dummy).get(index_key)):
df.loc[index_key, column_key] = np.nan if row['value'] is None else row['value']
if timeColumns:
df.loc[index_key, column_key + ts_suffix] = row['time']['value']
if labels:
for i in index:
df2.loc[index_key, concepts[i]['value']] = row[i]['value']
df.sort_index(axis=0,inplace=True)
df.sort_index(axis=1,inplace=True)
if labels:
return df2.join(df)
# return pd.concat([df2,df], axis=1, sort=False)
return df
def get(series, economy, time='all', mrv=None, mrnev=None, labels=False, numericTimeKeys=False, db=None, **dimensions):
for row in fetch(series, economy, time, mrv=mrv, mrnev=mrnev, labels=labels, numericTimeKeys=numericTimeKeys, params={'per_page': 1}, db=db, **dimensions):
return row
def footnote(series, economy, time, db=None):
if db is None:
db = w.db
# note that this only supports singular footnote references at this point, although the interface suggests otherwise
url = 'sources/{source}/footnote/{economy}~{series}~{time}/metadata'
try:
for row in w.metadata(url, ['series'], source=db, series=series, economy=economy, time=w.queryParam(time, 'time', db=db)):
return row.metadata['FootNote']
except:
pass # will return None then
| true | true |
f72fe57794917edbcfc8d26818116b24e336b4d8 | 787 | py | Python | examples/tf/trpo_gym_tf_cartpole.py | shadiakiki1986/garage | 095bb5d25b32df1d44b47e99a78a9b01796941d9 | [
"MIT"
] | null | null | null | examples/tf/trpo_gym_tf_cartpole.py | shadiakiki1986/garage | 095bb5d25b32df1d44b47e99a78a9b01796941d9 | [
"MIT"
] | null | null | null | examples/tf/trpo_gym_tf_cartpole.py | shadiakiki1986/garage | 095bb5d25b32df1d44b47e99a78a9b01796941d9 | [
"MIT"
] | null | null | null | import gym
from garage.baselines import LinearFeatureBaseline
from garage.experiment import run_experiment
from garage.tf.algos import TRPO
from garage.tf.envs import TfEnv
from garage.tf.policies import CategoricalMLPPolicy
# Need to wrap in a tf environment and force_reset to true
# see https://github.com/openai/rllab/issues/87#issuecomment-282519288
env = TfEnv(gym.make("CartPole-v0"))
policy = CategoricalMLPPolicy(
name="policy", env_spec=env.spec, hidden_sizes=(32, 32))
baseline = LinearFeatureBaseline(env_spec=env.spec)
algo = TRPO(
env=env,
policy=policy,
baseline=baseline,
batch_size=4000,
max_path_length=200,
n_itr=120,
discount=0.99,
max_kl_step=0.01,
)
run_experiment(algo.train(), n_parallel=1, snapshot_mode="last", seed=1)
| 26.233333 | 72 | 0.758577 | import gym
from garage.baselines import LinearFeatureBaseline
from garage.experiment import run_experiment
from garage.tf.algos import TRPO
from garage.tf.envs import TfEnv
from garage.tf.policies import CategoricalMLPPolicy
CartPole-v0"))
policy = CategoricalMLPPolicy(
name="policy", env_spec=env.spec, hidden_sizes=(32, 32))
baseline = LinearFeatureBaseline(env_spec=env.spec)
algo = TRPO(
env=env,
policy=policy,
baseline=baseline,
batch_size=4000,
max_path_length=200,
n_itr=120,
discount=0.99,
max_kl_step=0.01,
)
run_experiment(algo.train(), n_parallel=1, snapshot_mode="last", seed=1)
| true | true |
f72fe5795879771746bcc6ee4b44c101ac8e4453 | 267 | py | Python | CCF/CSP/2018/18121.py | cnsteven/online-judge | 60ee841a97e2bc0dc9c7b23fe5daa186898ab8b7 | [
"MIT"
] | 1 | 2019-05-04T10:28:32.000Z | 2019-05-04T10:28:32.000Z | CCF/CSP/2018/18121.py | cnsteven/online-judge | 60ee841a97e2bc0dc9c7b23fe5daa186898ab8b7 | [
"MIT"
] | null | null | null | CCF/CSP/2018/18121.py | cnsteven/online-judge | 60ee841a97e2bc0dc9c7b23fe5daa186898ab8b7 | [
"MIT"
] | 3 | 2020-12-31T04:36:38.000Z | 2021-07-25T07:39:31.000Z | r, y, g = map(int, input().split())
n = int(input())
ans = 0
for _ in range(n):
k, t = map(int, input().split())
if k == 0:
ans += t
elif k == 1:
ans += t
elif k == 2:
ans = ans + t + r
elif k == 3:
pass
print(ans)
| 17.8 | 36 | 0.419476 | r, y, g = map(int, input().split())
n = int(input())
ans = 0
for _ in range(n):
k, t = map(int, input().split())
if k == 0:
ans += t
elif k == 1:
ans += t
elif k == 2:
ans = ans + t + r
elif k == 3:
pass
print(ans)
| true | true |
f72fe6c802fc9b6df210c17f9eaf4d123167398f | 1,996 | py | Python | examples/Model_HM_RWS.py | kpoeppel/pytorch_probgraph | b78595ab03bbe92595ad2f6b35f5dd8bf84d6da0 | [
"BSD-3-Clause"
] | 47 | 2020-08-10T02:04:26.000Z | 2022-03-23T22:20:56.000Z | examples/Model_HM_RWS.py | kpoeppel/pytorch_probgraph | b78595ab03bbe92595ad2f6b35f5dd8bf84d6da0 | [
"BSD-3-Clause"
] | null | null | null | examples/Model_HM_RWS.py | kpoeppel/pytorch_probgraph | b78595ab03bbe92595ad2f6b35f5dd8bf84d6da0 | [
"BSD-3-Clause"
] | 4 | 2020-08-10T15:32:06.000Z | 2021-12-29T15:04:20.000Z |
import site
site.addsitedir('..')
import torch
from pytorch_probgraph import BernoulliLayer
from pytorch_probgraph import InteractionLinear
from pytorch_probgraph import HelmholtzMachine
from itertools import chain
from tqdm import tqdm
class Model_HM_RWS(torch.nn.Module):
def __init__(self):
super().__init__()
layer0 = BernoulliLayer(torch.nn.Parameter(torch.zeros([1, 1, 28, 28]), requires_grad=True))
layer1 = BernoulliLayer(torch.nn.Parameter(torch.zeros([1, 200]), requires_grad=True))
layer2 = BernoulliLayer(torch.nn.Parameter(torch.zeros([1, 200]), requires_grad=True))
interactionUp1 = InteractionLinear(layer0.bias.shape[1:], layer1.bias.shape[1:])
interactionDown1 = InteractionLinear(layer1.bias.shape[1:], layer0.bias.shape[1:])
interactionUp2 = InteractionLinear(layer1.bias.shape[1:], layer2.bias.shape[1:])
interactionDown2 = InteractionLinear(layer2.bias.shape[1:], layer1.bias.shape[1:])
parameters = chain(*[m.parameters() for m in [layer0, layer1, layer2, interactionUp1, interactionUp2, interactionDown1, interactionDown2]])
opt = torch.optim.Adam(parameters)
self.model = HelmholtzMachine([layer0, layer1, layer2],
[interactionUp1, interactionUp2],
[interactionDown1, interactionDown2],
optimizer=opt)
#print(interaction.weight.shape)
def train(self, data, epochs=1, device=None):
for epoch in range(epochs):
for dat in data:
self.model.trainReweightedWS(dat.to(device), ksamples=5)
if isinstance(data, tqdm):
data = tqdm(data)
#print(torch.sum(self.model.interaction.weight))
def loglikelihood(self, data):
return self.model.loglikelihood(data, ksamples=100).cpu().detach()
def generate(self, N=1):
return self.model.sampleAll(N=N)[0][0].cpu()
| 43.391304 | 147 | 0.655311 |
import site
site.addsitedir('..')
import torch
from pytorch_probgraph import BernoulliLayer
from pytorch_probgraph import InteractionLinear
from pytorch_probgraph import HelmholtzMachine
from itertools import chain
from tqdm import tqdm
class Model_HM_RWS(torch.nn.Module):
def __init__(self):
super().__init__()
layer0 = BernoulliLayer(torch.nn.Parameter(torch.zeros([1, 1, 28, 28]), requires_grad=True))
layer1 = BernoulliLayer(torch.nn.Parameter(torch.zeros([1, 200]), requires_grad=True))
layer2 = BernoulliLayer(torch.nn.Parameter(torch.zeros([1, 200]), requires_grad=True))
interactionUp1 = InteractionLinear(layer0.bias.shape[1:], layer1.bias.shape[1:])
interactionDown1 = InteractionLinear(layer1.bias.shape[1:], layer0.bias.shape[1:])
interactionUp2 = InteractionLinear(layer1.bias.shape[1:], layer2.bias.shape[1:])
interactionDown2 = InteractionLinear(layer2.bias.shape[1:], layer1.bias.shape[1:])
parameters = chain(*[m.parameters() for m in [layer0, layer1, layer2, interactionUp1, interactionUp2, interactionDown1, interactionDown2]])
opt = torch.optim.Adam(parameters)
self.model = HelmholtzMachine([layer0, layer1, layer2],
[interactionUp1, interactionUp2],
[interactionDown1, interactionDown2],
optimizer=opt)
def train(self, data, epochs=1, device=None):
for epoch in range(epochs):
for dat in data:
self.model.trainReweightedWS(dat.to(device), ksamples=5)
if isinstance(data, tqdm):
data = tqdm(data)
def loglikelihood(self, data):
return self.model.loglikelihood(data, ksamples=100).cpu().detach()
def generate(self, N=1):
return self.model.sampleAll(N=N)[0][0].cpu()
| true | true |
f72fe7511018a20cd842050a050f2a2e4c49353b | 6,242 | py | Python | jesse/models/utils.py | farukuzun/jesse | c4c0c3dbab034db853fc1b09ac0f2697592bed79 | [
"MIT"
] | 1 | 2021-07-04T10:18:28.000Z | 2021-07-04T10:18:28.000Z | jesse/models/utils.py | farukuzun/jesse | c4c0c3dbab034db853fc1b09ac0f2697592bed79 | [
"MIT"
] | null | null | null | jesse/models/utils.py | farukuzun/jesse | c4c0c3dbab034db853fc1b09ac0f2697592bed79 | [
"MIT"
] | null | null | null | import threading
import numpy as np
import jesse.helpers as jh
from jesse.models.Candle import Candle
from jesse.models.CompletedTrade import CompletedTrade
from jesse.models.DailyBalance import DailyBalance
from jesse.models.Order import Order
from jesse.models.Orderbook import Orderbook
from jesse.models.Ticker import Ticker
from jesse.models.Trade import Trade
from jesse.services import logger
def store_candle_into_db(exchange: str, symbol: str, candle: np.ndarray) -> None:
d = {
'id': jh.generate_unique_id(),
'symbol': symbol,
'exchange': exchange,
'timestamp': candle[0],
'open': candle[1],
'high': candle[3],
'low': candle[4],
'close': candle[2],
'volume': candle[5]
}
def async_save() -> None:
Candle.insert(**d).on_conflict_ignore().execute()
print(
jh.color(
f"candle: {jh.timestamp_to_time(d['timestamp'])}-{exchange}-{symbol}: {candle}",
'blue'
)
)
# async call
threading.Thread(target=async_save).start()
def store_ticker_into_db(exchange: str, symbol: str, ticker: np.ndarray) -> None:
return
d = {
'id': jh.generate_unique_id(),
'timestamp': ticker[0],
'last_price': ticker[1],
'high_price': ticker[2],
'low_price': ticker[3],
'volume': ticker[4],
'symbol': symbol,
'exchange': exchange,
}
def async_save() -> None:
Ticker.insert(**d).on_conflict_ignore().execute()
print(
jh.color(f'ticker: {jh.timestamp_to_time(d["timestamp"])}-{exchange}-{symbol}: {ticker}', 'yellow')
)
# async call
threading.Thread(target=async_save).start()
def store_completed_trade_into_db(completed_trade: CompletedTrade) -> None:
return
d = {
'id': completed_trade.id,
'strategy_name': completed_trade.strategy_name,
'symbol': completed_trade.symbol,
'exchange': completed_trade.exchange,
'type': completed_trade.type,
'timeframe': completed_trade.timeframe,
'entry_price': completed_trade.entry_price,
'exit_price': completed_trade.exit_price,
'take_profit_at': completed_trade.take_profit_at,
'stop_loss_at': completed_trade.stop_loss_at,
'qty': completed_trade.qty,
'opened_at': completed_trade.opened_at,
'closed_at': completed_trade.closed_at,
'entry_candle_timestamp': completed_trade.entry_candle_timestamp,
'exit_candle_timestamp': completed_trade.exit_candle_timestamp,
'leverage': completed_trade.leverage,
}
def async_save() -> None:
CompletedTrade.insert(**d).execute()
if jh.is_debugging():
logger.info(f'Stored the completed trade record for {completed_trade.exchange}-{completed_trade.symbol}-{completed_trade.strategy_name} into database.')
# async call
threading.Thread(target=async_save).start()
def store_order_into_db(order: Order) -> None:
return
d = {
'id': order.id,
'trade_id': order.trade_id,
'exchange_id': order.exchange_id,
'vars': order.vars,
'symbol': order.symbol,
'exchange': order.exchange,
'side': order.side,
'type': order.type,
'flag': order.flag,
'qty': order.qty,
'price': order.price,
'status': order.status,
'created_at': order.created_at,
'executed_at': order.executed_at,
'canceled_at': order.canceled_at,
'role': order.role,
}
def async_save() -> None:
Order.insert(**d).execute()
if jh.is_debugging():
logger.info(f'Stored the executed order record for {order.exchange}-{order.symbol} into database.')
# async call
threading.Thread(target=async_save).start()
def store_daily_balance_into_db(daily_balance: dict) -> None:
return
def async_save():
DailyBalance.insert(**daily_balance).execute()
if jh.is_debugging():
logger.info(f'Stored daily portfolio balance record into the database: {daily_balance["asset"]} => {jh.format_currency(round(daily_balance["balance"], 2))}'
)
# async call
threading.Thread(target=async_save).start()
def store_trade_into_db(exchange: str, symbol: str, trade: np.ndarray) -> None:
return
d = {
'id': jh.generate_unique_id(),
'timestamp': trade[0],
'price': trade[1],
'buy_qty': trade[2],
'sell_qty': trade[3],
'buy_count': trade[4],
'sell_count': trade[5],
'symbol': symbol,
'exchange': exchange,
}
def async_save() -> None:
Trade.insert(**d).on_conflict_ignore().execute()
print(
jh.color(
f'trade: {jh.timestamp_to_time(d["timestamp"])}-{exchange}-{symbol}: {trade}',
'green'
)
)
# async call
threading.Thread(target=async_save).start()
def store_orderbook_into_db(exchange: str, symbol: str, orderbook: np.ndarray) -> None:
return
d = {
'id': jh.generate_unique_id(),
'timestamp': jh.now_to_timestamp(),
'data': orderbook.dumps(),
'symbol': symbol,
'exchange': exchange,
}
def async_save() -> None:
Orderbook.insert(**d).on_conflict_ignore().execute()
print(
jh.color(
f'orderbook: {jh.timestamp_to_time(d["timestamp"])}-{exchange}-{symbol}: [{orderbook[0][0][0]}, {orderbook[0][0][1]}], [{orderbook[1][0][0]}, {orderbook[1][0][1]}]',
'magenta'
)
)
# async call
threading.Thread(target=async_save).start()
def fetch_candles_from_db(exchange: str, symbol: str, start_date: int, finish_date: int) -> tuple:
candles_tuple = tuple(
Candle.select(
Candle.timestamp, Candle.open, Candle.close, Candle.high, Candle.low,
Candle.volume
).where(
Candle.timestamp.between(start_date, finish_date),
Candle.exchange == exchange,
Candle.symbol == symbol
).order_by(Candle.timestamp.asc()).tuples()
)
return candles_tuple
| 31.21 | 181 | 0.60942 | import threading
import numpy as np
import jesse.helpers as jh
from jesse.models.Candle import Candle
from jesse.models.CompletedTrade import CompletedTrade
from jesse.models.DailyBalance import DailyBalance
from jesse.models.Order import Order
from jesse.models.Orderbook import Orderbook
from jesse.models.Ticker import Ticker
from jesse.models.Trade import Trade
from jesse.services import logger
def store_candle_into_db(exchange: str, symbol: str, candle: np.ndarray) -> None:
d = {
'id': jh.generate_unique_id(),
'symbol': symbol,
'exchange': exchange,
'timestamp': candle[0],
'open': candle[1],
'high': candle[3],
'low': candle[4],
'close': candle[2],
'volume': candle[5]
}
def async_save() -> None:
Candle.insert(**d).on_conflict_ignore().execute()
print(
jh.color(
f"candle: {jh.timestamp_to_time(d['timestamp'])}-{exchange}-{symbol}: {candle}",
'blue'
)
)
threading.Thread(target=async_save).start()
def store_ticker_into_db(exchange: str, symbol: str, ticker: np.ndarray) -> None:
return
d = {
'id': jh.generate_unique_id(),
'timestamp': ticker[0],
'last_price': ticker[1],
'high_price': ticker[2],
'low_price': ticker[3],
'volume': ticker[4],
'symbol': symbol,
'exchange': exchange,
}
def async_save() -> None:
Ticker.insert(**d).on_conflict_ignore().execute()
print(
jh.color(f'ticker: {jh.timestamp_to_time(d["timestamp"])}-{exchange}-{symbol}: {ticker}', 'yellow')
)
threading.Thread(target=async_save).start()
def store_completed_trade_into_db(completed_trade: CompletedTrade) -> None:
return
d = {
'id': completed_trade.id,
'strategy_name': completed_trade.strategy_name,
'symbol': completed_trade.symbol,
'exchange': completed_trade.exchange,
'type': completed_trade.type,
'timeframe': completed_trade.timeframe,
'entry_price': completed_trade.entry_price,
'exit_price': completed_trade.exit_price,
'take_profit_at': completed_trade.take_profit_at,
'stop_loss_at': completed_trade.stop_loss_at,
'qty': completed_trade.qty,
'opened_at': completed_trade.opened_at,
'closed_at': completed_trade.closed_at,
'entry_candle_timestamp': completed_trade.entry_candle_timestamp,
'exit_candle_timestamp': completed_trade.exit_candle_timestamp,
'leverage': completed_trade.leverage,
}
def async_save() -> None:
CompletedTrade.insert(**d).execute()
if jh.is_debugging():
logger.info(f'Stored the completed trade record for {completed_trade.exchange}-{completed_trade.symbol}-{completed_trade.strategy_name} into database.')
threading.Thread(target=async_save).start()
def store_order_into_db(order: Order) -> None:
return
d = {
'id': order.id,
'trade_id': order.trade_id,
'exchange_id': order.exchange_id,
'vars': order.vars,
'symbol': order.symbol,
'exchange': order.exchange,
'side': order.side,
'type': order.type,
'flag': order.flag,
'qty': order.qty,
'price': order.price,
'status': order.status,
'created_at': order.created_at,
'executed_at': order.executed_at,
'canceled_at': order.canceled_at,
'role': order.role,
}
def async_save() -> None:
Order.insert(**d).execute()
if jh.is_debugging():
logger.info(f'Stored the executed order record for {order.exchange}-{order.symbol} into database.')
threading.Thread(target=async_save).start()
def store_daily_balance_into_db(daily_balance: dict) -> None:
return
def async_save():
DailyBalance.insert(**daily_balance).execute()
if jh.is_debugging():
logger.info(f'Stored daily portfolio balance record into the database: {daily_balance["asset"]} => {jh.format_currency(round(daily_balance["balance"], 2))}'
)
threading.Thread(target=async_save).start()
def store_trade_into_db(exchange: str, symbol: str, trade: np.ndarray) -> None:
return
d = {
'id': jh.generate_unique_id(),
'timestamp': trade[0],
'price': trade[1],
'buy_qty': trade[2],
'sell_qty': trade[3],
'buy_count': trade[4],
'sell_count': trade[5],
'symbol': symbol,
'exchange': exchange,
}
def async_save() -> None:
Trade.insert(**d).on_conflict_ignore().execute()
print(
jh.color(
f'trade: {jh.timestamp_to_time(d["timestamp"])}-{exchange}-{symbol}: {trade}',
'green'
)
)
threading.Thread(target=async_save).start()
def store_orderbook_into_db(exchange: str, symbol: str, orderbook: np.ndarray) -> None:
return
d = {
'id': jh.generate_unique_id(),
'timestamp': jh.now_to_timestamp(),
'data': orderbook.dumps(),
'symbol': symbol,
'exchange': exchange,
}
def async_save() -> None:
Orderbook.insert(**d).on_conflict_ignore().execute()
print(
jh.color(
f'orderbook: {jh.timestamp_to_time(d["timestamp"])}-{exchange}-{symbol}: [{orderbook[0][0][0]}, {orderbook[0][0][1]}], [{orderbook[1][0][0]}, {orderbook[1][0][1]}]',
'magenta'
)
)
threading.Thread(target=async_save).start()
def fetch_candles_from_db(exchange: str, symbol: str, start_date: int, finish_date: int) -> tuple:
candles_tuple = tuple(
Candle.select(
Candle.timestamp, Candle.open, Candle.close, Candle.high, Candle.low,
Candle.volume
).where(
Candle.timestamp.between(start_date, finish_date),
Candle.exchange == exchange,
Candle.symbol == symbol
).order_by(Candle.timestamp.asc()).tuples()
)
return candles_tuple
| true | true |
f72fe7bf8580c7c8f15d68a00c11795a0b14058e | 23,210 | py | Python | vyper/semantics/validation/local.py | Doc-Pixel/vyper | 4da1090d5ed9c339fdd402e987db760d7d63c088 | [
"Apache-2.0"
] | null | null | null | vyper/semantics/validation/local.py | Doc-Pixel/vyper | 4da1090d5ed9c339fdd402e987db760d7d63c088 | [
"Apache-2.0"
] | null | null | null | vyper/semantics/validation/local.py | Doc-Pixel/vyper | 4da1090d5ed9c339fdd402e987db760d7d63c088 | [
"Apache-2.0"
] | null | null | null | import copy
from typing import Optional
from vyper import ast as vy_ast
from vyper.ast.validation import validate_call_args
from vyper.exceptions import (
ExceptionList,
FunctionDeclarationException,
ImmutableViolation,
InvalidLiteral,
InvalidOperation,
InvalidType,
IteratorException,
NonPayableViolation,
StateAccessViolation,
StructureException,
TypeMismatch,
VariableDeclarationException,
VyperException,
)
# TODO consolidate some of these imports
from vyper.semantics.environment import CONSTANT_ENVIRONMENT_VARS, MUTABLE_ENVIRONMENT_VARS
from vyper.semantics.namespace import get_namespace
from vyper.semantics.types.abstract import IntegerAbstractType
from vyper.semantics.types.bases import DataLocation
from vyper.semantics.types.function import (
ContractFunction,
MemberFunctionDefinition,
StateMutability,
)
from vyper.semantics.types.indexable.mapping import MappingDefinition
from vyper.semantics.types.indexable.sequence import (
ArrayDefinition,
DynamicArrayDefinition,
TupleDefinition,
)
from vyper.semantics.types.user.event import Event
from vyper.semantics.types.utils import get_type_from_annotation
from vyper.semantics.types.value.address import AddressDefinition
from vyper.semantics.types.value.array_value import StringDefinition
from vyper.semantics.types.value.boolean import BoolDefinition
from vyper.semantics.validation.annotation import StatementAnnotationVisitor
from vyper.semantics.validation.base import VyperNodeVisitorBase
from vyper.semantics.validation.utils import (
get_common_types,
get_exact_type_from_node,
get_possible_types_from_node,
validate_expected_type,
)
def validate_functions(vy_module: vy_ast.Module) -> None:
"""Analyzes a vyper ast and validates the function-level namespaces."""
err_list = ExceptionList()
namespace = get_namespace()
for node in vy_module.get_children(vy_ast.FunctionDef):
with namespace.enter_scope():
try:
FunctionNodeVisitor(vy_module, node, namespace)
except VyperException as e:
err_list.append(e)
err_list.raise_if_not_empty()
def _is_terminus_node(node: vy_ast.VyperNode) -> bool:
if getattr(node, "_is_terminus", None):
return True
if isinstance(node, vy_ast.Expr) and isinstance(node.value, vy_ast.Call):
func = get_exact_type_from_node(node.value.func)
if getattr(func, "_is_terminus", None):
return True
return False
def check_for_terminus(node_list: list) -> bool:
if next((i for i in node_list if _is_terminus_node(i)), None):
return True
for node in [i for i in node_list if isinstance(i, vy_ast.If)][::-1]:
if not node.orelse or not check_for_terminus(node.orelse):
continue
if not check_for_terminus(node.body):
continue
return True
return False
def _check_iterator_modification(
target_node: vy_ast.VyperNode, search_node: vy_ast.VyperNode
) -> Optional[vy_ast.VyperNode]:
similar_nodes = [
n
for n in search_node.get_descendants(type(target_node))
if vy_ast.compare_nodes(target_node, n)
]
for node in similar_nodes:
# raise if the node is the target of an assignment statement
assign_node = node.get_ancestor((vy_ast.Assign, vy_ast.AugAssign))
# note the use of get_descendants() blocks statements like
# self.my_array[i] = x
if assign_node and node in assign_node.target.get_descendants(include_self=True):
return node
attr_node = node.get_ancestor(vy_ast.Attribute)
# note the use of get_descendants() blocks statements like
# self.my_array[i].append(x)
if (
attr_node is not None
and node in attr_node.value.get_descendants(include_self=True)
and attr_node.attr in ("append", "pop", "extend")
):
return node
return None
def _validate_revert_reason(msg_node: vy_ast.VyperNode) -> None:
if msg_node:
if isinstance(msg_node, vy_ast.Str):
if not msg_node.value.strip():
raise StructureException("Reason string cannot be empty", msg_node)
elif not (isinstance(msg_node, vy_ast.Name) and msg_node.id == "UNREACHABLE"):
try:
validate_expected_type(msg_node, StringDefinition(1024))
except TypeMismatch as e:
raise InvalidType("revert reason must fit within String[1024]") from e
def _validate_address_code_attribute(node: vy_ast.Attribute) -> None:
value_type = get_exact_type_from_node(node.value)
if isinstance(value_type, AddressDefinition) and node.attr == "code":
# Validate `slice(<address>.code, start, length)` where `length` is constant
parent = node.get_ancestor()
if isinstance(parent, vy_ast.Call):
ok_func = isinstance(parent.func, vy_ast.Name) and parent.func.id == "slice"
ok_args = len(parent.args) == 3 and isinstance(parent.args[2], vy_ast.Int)
if ok_func and ok_args:
return
raise StructureException(
"(address).code is only allowed inside of a slice function with a constant length",
node,
)
def _validate_msg_data_attribute(node: vy_ast.Attribute) -> None:
if isinstance(node.value, vy_ast.Name) and node.value.id == "msg" and node.attr == "data":
parent = node.get_ancestor()
if not isinstance(parent, vy_ast.Call) or parent.get("func.id") not in ("slice", "len"):
raise StructureException(
"msg.data is only allowed inside of the slice or len functions",
node,
)
if parent.get("func.id") == "slice":
ok_args = len(parent.args) == 3 and isinstance(parent.args[2], vy_ast.Int)
if not ok_args:
raise StructureException(
"slice(msg.data) must use a compile-time constant for length argument",
parent,
)
class FunctionNodeVisitor(VyperNodeVisitorBase):
ignored_types = (
vy_ast.Break,
vy_ast.Constant,
vy_ast.Pass,
)
scope_name = "function"
def __init__(
self, vyper_module: vy_ast.Module, fn_node: vy_ast.FunctionDef, namespace: dict
) -> None:
self.vyper_module = vyper_module
self.fn_node = fn_node
self.namespace = namespace
self.func = fn_node._metadata["type"]
self.annotation_visitor = StatementAnnotationVisitor(fn_node, namespace)
self.expr_visitor = _LocalExpressionVisitor()
namespace.update(self.func.arguments)
for node in fn_node.body:
self.visit(node)
if self.func.return_type:
if not check_for_terminus(fn_node.body):
raise FunctionDeclarationException(
f"Missing or unmatched return statements in function '{fn_node.name}'",
fn_node,
)
if self.func.mutability == StateMutability.PURE:
node_list = fn_node.get_descendants(
vy_ast.Attribute,
{
"value.id": set(CONSTANT_ENVIRONMENT_VARS.keys()).union(
set(MUTABLE_ENVIRONMENT_VARS.keys())
)
},
)
for node in node_list:
t = node._metadata.get("type")
if isinstance(t, ContractFunction) and t.mutability == StateMutability.PURE:
# allowed
continue
raise StateAccessViolation(
"not allowed to query contract or environment variables in pure functions",
node_list[0],
)
if self.func.mutability is not StateMutability.PAYABLE:
node_list = fn_node.get_descendants(
vy_ast.Attribute, {"value.id": "msg", "attr": "value"}
)
if node_list:
raise NonPayableViolation(
"msg.value is not allowed in non-payable functions", node_list[0]
)
def visit(self, node):
super().visit(node)
self.annotation_visitor.visit(node)
def visit_AnnAssign(self, node):
name = node.get("target.id")
if name is None:
raise VariableDeclarationException("Invalid assignment", node)
if not node.value:
raise VariableDeclarationException(
"Memory variables must be declared with an initial value", node
)
type_definition = get_type_from_annotation(node.annotation, DataLocation.MEMORY)
validate_expected_type(node.value, type_definition)
try:
self.namespace[name] = type_definition
except VyperException as exc:
raise exc.with_annotation(node) from None
self.expr_visitor.visit(node.value)
def visit_Assign(self, node):
if isinstance(node.value, vy_ast.Tuple):
raise StructureException("Right-hand side of assignment cannot be a tuple", node.value)
target = get_exact_type_from_node(node.target)
if isinstance(target, MappingDefinition):
raise StructureException(
"Left-hand side of assignment cannot be a HashMap without a key", node
)
validate_expected_type(node.value, target)
target.validate_modification(node, self.func.mutability)
self.expr_visitor.visit(node.value)
self.expr_visitor.visit(node.target)
def visit_AugAssign(self, node):
if isinstance(node.value, vy_ast.Tuple):
raise StructureException("Right-hand side of assignment cannot be a tuple", node.value)
target = get_exact_type_from_node(node.target)
validate_expected_type(node.value, target)
target.validate_modification(node, self.func.mutability)
self.expr_visitor.visit(node.value)
def visit_Raise(self, node):
if node.exc:
_validate_revert_reason(node.exc)
self.expr_visitor.visit(node.exc)
def visit_Assert(self, node):
if node.msg:
_validate_revert_reason(node.msg)
self.expr_visitor.visit(node.msg)
try:
validate_expected_type(node.test, BoolDefinition())
except InvalidType:
raise InvalidType("Assertion test value must be a boolean", node.test)
self.expr_visitor.visit(node.test)
def visit_Continue(self, node):
for_node = node.get_ancestor(vy_ast.For)
if for_node is None:
raise StructureException("`continue` must be enclosed in a `for` loop", node)
def visit_Return(self, node):
values = node.value
if values is None:
if self.func.return_type:
raise FunctionDeclarationException("Return statement is missing a value", node)
return
elif self.func.return_type is None:
raise FunctionDeclarationException("Function does not return any values", node)
if isinstance(values, vy_ast.Tuple):
values = values.elements
if not isinstance(self.func.return_type, TupleDefinition):
raise FunctionDeclarationException("Function only returns a single value", node)
if self.func.return_type.length != len(values):
raise FunctionDeclarationException(
f"Incorrect number of return values: "
f"expected {self.func.return_type.length}, got {len(values)}",
node,
)
for given, expected in zip(values, self.func.return_type.value_type):
validate_expected_type(given, expected)
else:
validate_expected_type(values, self.func.return_type)
self.expr_visitor.visit(node.value)
def visit_If(self, node):
validate_expected_type(node.test, BoolDefinition())
self.expr_visitor.visit(node.test)
with self.namespace.enter_scope():
for n in node.body:
self.visit(n)
with self.namespace.enter_scope():
for n in node.orelse:
self.visit(n)
def visit_For(self, node):
if isinstance(node.iter, vy_ast.Subscript):
raise StructureException("Cannot iterate over a nested list", node.iter)
if isinstance(node.iter, vy_ast.Call):
# iteration via range()
if node.iter.get("func.id") != "range":
raise IteratorException(
"Cannot iterate over the result of a function call", node.iter
)
validate_call_args(node.iter, (1, 2))
args = node.iter.args
if len(args) == 1:
# range(CONSTANT)
if not isinstance(args[0], vy_ast.Num):
raise StateAccessViolation("Value must be a literal", node)
if args[0].value <= 0:
raise StructureException("For loop must have at least 1 iteration", args[0])
validate_expected_type(args[0], IntegerAbstractType())
type_list = get_possible_types_from_node(args[0])
else:
validate_expected_type(args[0], IntegerAbstractType())
type_list = get_common_types(*args)
if not isinstance(args[0], vy_ast.Constant):
# range(x, x + CONSTANT)
if not isinstance(args[1], vy_ast.BinOp) or not isinstance(
args[1].op, vy_ast.Add
):
raise StructureException(
"Second element must be the first element plus a literal value",
args[0],
)
if not vy_ast.compare_nodes(args[0], args[1].left):
raise StructureException(
"First and second variable must be the same", args[1].left
)
if not isinstance(args[1].right, vy_ast.Int):
raise InvalidLiteral("Literal must be an integer", args[1].right)
if args[1].right.value < 1:
raise StructureException(
f"For loop has invalid number of iterations ({args[1].right.value}),"
" the value must be greater than zero",
args[1].right,
)
else:
# range(CONSTANT, CONSTANT)
if not isinstance(args[1], vy_ast.Int):
raise InvalidType("Value must be a literal integer", args[1])
validate_expected_type(args[1], IntegerAbstractType())
if args[0].value >= args[1].value:
raise StructureException("Second value must be > first value", args[1])
else:
# iteration over a variable or literal list
type_list = [
i.value_type
for i in get_possible_types_from_node(node.iter)
if isinstance(i, (DynamicArrayDefinition, ArrayDefinition))
]
if not type_list:
raise InvalidType("Not an iterable type", node.iter)
if isinstance(node.iter, (vy_ast.Name, vy_ast.Attribute)):
# check for references to the iterated value within the body of the loop
assign = _check_iterator_modification(node.iter, node)
if assign:
raise ImmutableViolation("Cannot modify array during iteration", assign)
# Check if `iter` is a storage variable. get_descendants` is used to check for
# nested `self` (e.g. structs)
iter_is_storage_var = (
isinstance(node.iter, vy_ast.Attribute)
and len(node.iter.get_descendants(vy_ast.Name, {"id": "self"})) > 0
)
if iter_is_storage_var:
# check if iterated value may be modified by function calls inside the loop
iter_name = node.iter.attr
for call_node in node.get_descendants(vy_ast.Call, {"func.value.id": "self"}):
fn_name = call_node.func.attr
fn_node = self.vyper_module.get_children(vy_ast.FunctionDef, {"name": fn_name})[0]
if _check_iterator_modification(node.iter, fn_node):
# check for direct modification
raise ImmutableViolation(
f"Cannot call '{fn_name}' inside for loop, it potentially "
f"modifies iterated storage variable '{iter_name}'",
call_node,
)
for name in self.namespace["self"].members[fn_name].recursive_calls:
# check for indirect modification
fn_node = self.vyper_module.get_children(vy_ast.FunctionDef, {"name": name})[0]
if _check_iterator_modification(node.iter, fn_node):
raise ImmutableViolation(
f"Cannot call '{fn_name}' inside for loop, it may call to '{name}' "
f"which potentially modifies iterated storage variable '{iter_name}'",
call_node,
)
self.expr_visitor.visit(node.iter)
for_loop_exceptions = []
iter_name = node.target.id
for type_ in type_list:
# type check the for loop body using each possible type for iterator value
type_ = copy.deepcopy(type_)
type_.is_constant = True
with self.namespace.enter_scope():
try:
self.namespace[iter_name] = type_
except VyperException as exc:
raise exc.with_annotation(node) from None
try:
for n in node.body:
self.visit(n)
# type information is applied directly because the scope is
# closed prior to the call to `StatementAnnotationVisitor`
node.target._metadata["type"] = type_
return
except (TypeMismatch, InvalidOperation) as exc:
for_loop_exceptions.append(exc)
if len(set(str(i) for i in for_loop_exceptions)) == 1:
# if every attempt at type checking raised the same exception
raise for_loop_exceptions[0]
# return an aggregate TypeMismatch that shows all possible exceptions
# depending on which type is used
types_str = [str(i) for i in type_list]
given_str = f"{', '.join(types_str[:1])} or {types_str[-1]}"
raise TypeMismatch(
f"Iterator value '{iter_name}' may be cast as {given_str}, "
"but type checking fails with all possible types:",
node,
*(
(f"Casting '{iter_name}' as {type_}: {exc.message}", exc.annotations[0])
for type_, exc in zip(type_list, for_loop_exceptions)
),
)
def visit_Expr(self, node):
if not isinstance(node.value, vy_ast.Call):
raise StructureException("Expressions without assignment are disallowed", node)
fn_type = get_exact_type_from_node(node.value.func)
if isinstance(fn_type, Event):
raise StructureException("To call an event you must use the `log` statement", node)
if isinstance(fn_type, ContractFunction):
if (
fn_type.mutability > StateMutability.VIEW
and self.func.mutability <= StateMutability.VIEW
):
raise StateAccessViolation(
f"Cannot call a mutating function from a {self.func.mutability.value} function",
node,
)
if (
self.func.mutability == StateMutability.PURE
and fn_type.mutability != StateMutability.PURE
):
raise StateAccessViolation(
"Cannot call non-pure function from a pure function", node
)
if isinstance(fn_type, MemberFunctionDefinition) and fn_type.is_modifying:
fn_type.underlying_type.validate_modification(node, self.func.mutability)
# NOTE: fetch_call_return validates call args.
return_value = fn_type.fetch_call_return(node.value)
if (
return_value
and not isinstance(fn_type, MemberFunctionDefinition)
and not isinstance(fn_type, ContractFunction)
):
raise StructureException(
f"Function '{fn_type._id}' cannot be called without assigning the result", node
)
self.expr_visitor.visit(node.value)
def visit_Log(self, node):
if not isinstance(node.value, vy_ast.Call):
raise StructureException("Log must call an event", node)
event = get_exact_type_from_node(node.value.func)
if not isinstance(event, Event):
raise StructureException("Value is not an event", node.value)
event.fetch_call_return(node.value)
self.expr_visitor.visit(node.value)
class _LocalExpressionVisitor(VyperNodeVisitorBase):
ignored_types = (vy_ast.Constant, vy_ast.Name)
scope_name = "function"
def visit_Attribute(self, node: vy_ast.Attribute) -> None:
self.visit(node.value)
_validate_msg_data_attribute(node)
_validate_address_code_attribute(node)
def visit_BinOp(self, node: vy_ast.BinOp) -> None:
self.visit(node.left)
self.visit(node.right)
def visit_BoolOp(self, node: vy_ast.BoolOp) -> None:
for value in node.values: # type: ignore[attr-defined]
self.visit(value)
def visit_Call(self, node: vy_ast.Call) -> None:
self.visit(node.func)
for arg in node.args:
self.visit(arg)
for kwarg in node.keywords:
self.visit(kwarg.value)
def visit_Compare(self, node: vy_ast.Compare) -> None:
self.visit(node.left) # type: ignore[attr-defined]
self.visit(node.right) # type: ignore[attr-defined]
def visit_Dict(self, node: vy_ast.Dict) -> None:
for key in node.keys:
self.visit(key)
for value in node.values:
self.visit(value)
def visit_Index(self, node: vy_ast.Index) -> None:
self.visit(node.value)
def visit_List(self, node: vy_ast.List) -> None:
for element in node.elements:
self.visit(element)
def visit_Subscript(self, node: vy_ast.Subscript) -> None:
self.visit(node.value)
self.visit(node.slice)
def visit_Tuple(self, node: vy_ast.Tuple) -> None:
for element in node.elements:
self.visit(element)
def visit_UnaryOp(self, node: vy_ast.UnaryOp) -> None:
self.visit(node.operand) # type: ignore[attr-defined]
| 40.43554 | 100 | 0.609823 | import copy
from typing import Optional
from vyper import ast as vy_ast
from vyper.ast.validation import validate_call_args
from vyper.exceptions import (
ExceptionList,
FunctionDeclarationException,
ImmutableViolation,
InvalidLiteral,
InvalidOperation,
InvalidType,
IteratorException,
NonPayableViolation,
StateAccessViolation,
StructureException,
TypeMismatch,
VariableDeclarationException,
VyperException,
)
from vyper.semantics.environment import CONSTANT_ENVIRONMENT_VARS, MUTABLE_ENVIRONMENT_VARS
from vyper.semantics.namespace import get_namespace
from vyper.semantics.types.abstract import IntegerAbstractType
from vyper.semantics.types.bases import DataLocation
from vyper.semantics.types.function import (
ContractFunction,
MemberFunctionDefinition,
StateMutability,
)
from vyper.semantics.types.indexable.mapping import MappingDefinition
from vyper.semantics.types.indexable.sequence import (
ArrayDefinition,
DynamicArrayDefinition,
TupleDefinition,
)
from vyper.semantics.types.user.event import Event
from vyper.semantics.types.utils import get_type_from_annotation
from vyper.semantics.types.value.address import AddressDefinition
from vyper.semantics.types.value.array_value import StringDefinition
from vyper.semantics.types.value.boolean import BoolDefinition
from vyper.semantics.validation.annotation import StatementAnnotationVisitor
from vyper.semantics.validation.base import VyperNodeVisitorBase
from vyper.semantics.validation.utils import (
get_common_types,
get_exact_type_from_node,
get_possible_types_from_node,
validate_expected_type,
)
def validate_functions(vy_module: vy_ast.Module) -> None:
err_list = ExceptionList()
namespace = get_namespace()
for node in vy_module.get_children(vy_ast.FunctionDef):
with namespace.enter_scope():
try:
FunctionNodeVisitor(vy_module, node, namespace)
except VyperException as e:
err_list.append(e)
err_list.raise_if_not_empty()
def _is_terminus_node(node: vy_ast.VyperNode) -> bool:
if getattr(node, "_is_terminus", None):
return True
if isinstance(node, vy_ast.Expr) and isinstance(node.value, vy_ast.Call):
func = get_exact_type_from_node(node.value.func)
if getattr(func, "_is_terminus", None):
return True
return False
def check_for_terminus(node_list: list) -> bool:
if next((i for i in node_list if _is_terminus_node(i)), None):
return True
for node in [i for i in node_list if isinstance(i, vy_ast.If)][::-1]:
if not node.orelse or not check_for_terminus(node.orelse):
continue
if not check_for_terminus(node.body):
continue
return True
return False
def _check_iterator_modification(
target_node: vy_ast.VyperNode, search_node: vy_ast.VyperNode
) -> Optional[vy_ast.VyperNode]:
similar_nodes = [
n
for n in search_node.get_descendants(type(target_node))
if vy_ast.compare_nodes(target_node, n)
]
for node in similar_nodes:
assign_node = node.get_ancestor((vy_ast.Assign, vy_ast.AugAssign))
if assign_node and node in assign_node.target.get_descendants(include_self=True):
return node
attr_node = node.get_ancestor(vy_ast.Attribute)
if (
attr_node is not None
and node in attr_node.value.get_descendants(include_self=True)
and attr_node.attr in ("append", "pop", "extend")
):
return node
return None
def _validate_revert_reason(msg_node: vy_ast.VyperNode) -> None:
if msg_node:
if isinstance(msg_node, vy_ast.Str):
if not msg_node.value.strip():
raise StructureException("Reason string cannot be empty", msg_node)
elif not (isinstance(msg_node, vy_ast.Name) and msg_node.id == "UNREACHABLE"):
try:
validate_expected_type(msg_node, StringDefinition(1024))
except TypeMismatch as e:
raise InvalidType("revert reason must fit within String[1024]") from e
def _validate_address_code_attribute(node: vy_ast.Attribute) -> None:
value_type = get_exact_type_from_node(node.value)
if isinstance(value_type, AddressDefinition) and node.attr == "code":
parent = node.get_ancestor()
if isinstance(parent, vy_ast.Call):
ok_func = isinstance(parent.func, vy_ast.Name) and parent.func.id == "slice"
ok_args = len(parent.args) == 3 and isinstance(parent.args[2], vy_ast.Int)
if ok_func and ok_args:
return
raise StructureException(
"(address).code is only allowed inside of a slice function with a constant length",
node,
)
def _validate_msg_data_attribute(node: vy_ast.Attribute) -> None:
if isinstance(node.value, vy_ast.Name) and node.value.id == "msg" and node.attr == "data":
parent = node.get_ancestor()
if not isinstance(parent, vy_ast.Call) or parent.get("func.id") not in ("slice", "len"):
raise StructureException(
"msg.data is only allowed inside of the slice or len functions",
node,
)
if parent.get("func.id") == "slice":
ok_args = len(parent.args) == 3 and isinstance(parent.args[2], vy_ast.Int)
if not ok_args:
raise StructureException(
"slice(msg.data) must use a compile-time constant for length argument",
parent,
)
class FunctionNodeVisitor(VyperNodeVisitorBase):
ignored_types = (
vy_ast.Break,
vy_ast.Constant,
vy_ast.Pass,
)
scope_name = "function"
def __init__(
self, vyper_module: vy_ast.Module, fn_node: vy_ast.FunctionDef, namespace: dict
) -> None:
self.vyper_module = vyper_module
self.fn_node = fn_node
self.namespace = namespace
self.func = fn_node._metadata["type"]
self.annotation_visitor = StatementAnnotationVisitor(fn_node, namespace)
self.expr_visitor = _LocalExpressionVisitor()
namespace.update(self.func.arguments)
for node in fn_node.body:
self.visit(node)
if self.func.return_type:
if not check_for_terminus(fn_node.body):
raise FunctionDeclarationException(
f"Missing or unmatched return statements in function '{fn_node.name}'",
fn_node,
)
if self.func.mutability == StateMutability.PURE:
node_list = fn_node.get_descendants(
vy_ast.Attribute,
{
"value.id": set(CONSTANT_ENVIRONMENT_VARS.keys()).union(
set(MUTABLE_ENVIRONMENT_VARS.keys())
)
},
)
for node in node_list:
t = node._metadata.get("type")
if isinstance(t, ContractFunction) and t.mutability == StateMutability.PURE:
continue
raise StateAccessViolation(
"not allowed to query contract or environment variables in pure functions",
node_list[0],
)
if self.func.mutability is not StateMutability.PAYABLE:
node_list = fn_node.get_descendants(
vy_ast.Attribute, {"value.id": "msg", "attr": "value"}
)
if node_list:
raise NonPayableViolation(
"msg.value is not allowed in non-payable functions", node_list[0]
)
def visit(self, node):
super().visit(node)
self.annotation_visitor.visit(node)
def visit_AnnAssign(self, node):
name = node.get("target.id")
if name is None:
raise VariableDeclarationException("Invalid assignment", node)
if not node.value:
raise VariableDeclarationException(
"Memory variables must be declared with an initial value", node
)
type_definition = get_type_from_annotation(node.annotation, DataLocation.MEMORY)
validate_expected_type(node.value, type_definition)
try:
self.namespace[name] = type_definition
except VyperException as exc:
raise exc.with_annotation(node) from None
self.expr_visitor.visit(node.value)
def visit_Assign(self, node):
if isinstance(node.value, vy_ast.Tuple):
raise StructureException("Right-hand side of assignment cannot be a tuple", node.value)
target = get_exact_type_from_node(node.target)
if isinstance(target, MappingDefinition):
raise StructureException(
"Left-hand side of assignment cannot be a HashMap without a key", node
)
validate_expected_type(node.value, target)
target.validate_modification(node, self.func.mutability)
self.expr_visitor.visit(node.value)
self.expr_visitor.visit(node.target)
def visit_AugAssign(self, node):
if isinstance(node.value, vy_ast.Tuple):
raise StructureException("Right-hand side of assignment cannot be a tuple", node.value)
target = get_exact_type_from_node(node.target)
validate_expected_type(node.value, target)
target.validate_modification(node, self.func.mutability)
self.expr_visitor.visit(node.value)
def visit_Raise(self, node):
if node.exc:
_validate_revert_reason(node.exc)
self.expr_visitor.visit(node.exc)
def visit_Assert(self, node):
if node.msg:
_validate_revert_reason(node.msg)
self.expr_visitor.visit(node.msg)
try:
validate_expected_type(node.test, BoolDefinition())
except InvalidType:
raise InvalidType("Assertion test value must be a boolean", node.test)
self.expr_visitor.visit(node.test)
def visit_Continue(self, node):
for_node = node.get_ancestor(vy_ast.For)
if for_node is None:
raise StructureException("`continue` must be enclosed in a `for` loop", node)
def visit_Return(self, node):
values = node.value
if values is None:
if self.func.return_type:
raise FunctionDeclarationException("Return statement is missing a value", node)
return
elif self.func.return_type is None:
raise FunctionDeclarationException("Function does not return any values", node)
if isinstance(values, vy_ast.Tuple):
values = values.elements
if not isinstance(self.func.return_type, TupleDefinition):
raise FunctionDeclarationException("Function only returns a single value", node)
if self.func.return_type.length != len(values):
raise FunctionDeclarationException(
f"Incorrect number of return values: "
f"expected {self.func.return_type.length}, got {len(values)}",
node,
)
for given, expected in zip(values, self.func.return_type.value_type):
validate_expected_type(given, expected)
else:
validate_expected_type(values, self.func.return_type)
self.expr_visitor.visit(node.value)
def visit_If(self, node):
validate_expected_type(node.test, BoolDefinition())
self.expr_visitor.visit(node.test)
with self.namespace.enter_scope():
for n in node.body:
self.visit(n)
with self.namespace.enter_scope():
for n in node.orelse:
self.visit(n)
def visit_For(self, node):
if isinstance(node.iter, vy_ast.Subscript):
raise StructureException("Cannot iterate over a nested list", node.iter)
if isinstance(node.iter, vy_ast.Call):
if node.iter.get("func.id") != "range":
raise IteratorException(
"Cannot iterate over the result of a function call", node.iter
)
validate_call_args(node.iter, (1, 2))
args = node.iter.args
if len(args) == 1:
if not isinstance(args[0], vy_ast.Num):
raise StateAccessViolation("Value must be a literal", node)
if args[0].value <= 0:
raise StructureException("For loop must have at least 1 iteration", args[0])
validate_expected_type(args[0], IntegerAbstractType())
type_list = get_possible_types_from_node(args[0])
else:
validate_expected_type(args[0], IntegerAbstractType())
type_list = get_common_types(*args)
if not isinstance(args[0], vy_ast.Constant):
if not isinstance(args[1], vy_ast.BinOp) or not isinstance(
args[1].op, vy_ast.Add
):
raise StructureException(
"Second element must be the first element plus a literal value",
args[0],
)
if not vy_ast.compare_nodes(args[0], args[1].left):
raise StructureException(
"First and second variable must be the same", args[1].left
)
if not isinstance(args[1].right, vy_ast.Int):
raise InvalidLiteral("Literal must be an integer", args[1].right)
if args[1].right.value < 1:
raise StructureException(
f"For loop has invalid number of iterations ({args[1].right.value}),"
" the value must be greater than zero",
args[1].right,
)
else:
if not isinstance(args[1], vy_ast.Int):
raise InvalidType("Value must be a literal integer", args[1])
validate_expected_type(args[1], IntegerAbstractType())
if args[0].value >= args[1].value:
raise StructureException("Second value must be > first value", args[1])
else:
type_list = [
i.value_type
for i in get_possible_types_from_node(node.iter)
if isinstance(i, (DynamicArrayDefinition, ArrayDefinition))
]
if not type_list:
raise InvalidType("Not an iterable type", node.iter)
if isinstance(node.iter, (vy_ast.Name, vy_ast.Attribute)):
assign = _check_iterator_modification(node.iter, node)
if assign:
raise ImmutableViolation("Cannot modify array during iteration", assign)
iter_is_storage_var = (
isinstance(node.iter, vy_ast.Attribute)
and len(node.iter.get_descendants(vy_ast.Name, {"id": "self"})) > 0
)
if iter_is_storage_var:
iter_name = node.iter.attr
for call_node in node.get_descendants(vy_ast.Call, {"func.value.id": "self"}):
fn_name = call_node.func.attr
fn_node = self.vyper_module.get_children(vy_ast.FunctionDef, {"name": fn_name})[0]
if _check_iterator_modification(node.iter, fn_node):
raise ImmutableViolation(
f"Cannot call '{fn_name}' inside for loop, it potentially "
f"modifies iterated storage variable '{iter_name}'",
call_node,
)
for name in self.namespace["self"].members[fn_name].recursive_calls:
fn_node = self.vyper_module.get_children(vy_ast.FunctionDef, {"name": name})[0]
if _check_iterator_modification(node.iter, fn_node):
raise ImmutableViolation(
f"Cannot call '{fn_name}' inside for loop, it may call to '{name}' "
f"which potentially modifies iterated storage variable '{iter_name}'",
call_node,
)
self.expr_visitor.visit(node.iter)
for_loop_exceptions = []
iter_name = node.target.id
for type_ in type_list:
type_ = copy.deepcopy(type_)
type_.is_constant = True
with self.namespace.enter_scope():
try:
self.namespace[iter_name] = type_
except VyperException as exc:
raise exc.with_annotation(node) from None
try:
for n in node.body:
self.visit(n)
node.target._metadata["type"] = type_
return
except (TypeMismatch, InvalidOperation) as exc:
for_loop_exceptions.append(exc)
if len(set(str(i) for i in for_loop_exceptions)) == 1:
raise for_loop_exceptions[0]
types_str = [str(i) for i in type_list]
given_str = f"{', '.join(types_str[:1])} or {types_str[-1]}"
raise TypeMismatch(
f"Iterator value '{iter_name}' may be cast as {given_str}, "
"but type checking fails with all possible types:",
node,
*(
(f"Casting '{iter_name}' as {type_}: {exc.message}", exc.annotations[0])
for type_, exc in zip(type_list, for_loop_exceptions)
),
)
def visit_Expr(self, node):
if not isinstance(node.value, vy_ast.Call):
raise StructureException("Expressions without assignment are disallowed", node)
fn_type = get_exact_type_from_node(node.value.func)
if isinstance(fn_type, Event):
raise StructureException("To call an event you must use the `log` statement", node)
if isinstance(fn_type, ContractFunction):
if (
fn_type.mutability > StateMutability.VIEW
and self.func.mutability <= StateMutability.VIEW
):
raise StateAccessViolation(
f"Cannot call a mutating function from a {self.func.mutability.value} function",
node,
)
if (
self.func.mutability == StateMutability.PURE
and fn_type.mutability != StateMutability.PURE
):
raise StateAccessViolation(
"Cannot call non-pure function from a pure function", node
)
if isinstance(fn_type, MemberFunctionDefinition) and fn_type.is_modifying:
fn_type.underlying_type.validate_modification(node, self.func.mutability)
return_value = fn_type.fetch_call_return(node.value)
if (
return_value
and not isinstance(fn_type, MemberFunctionDefinition)
and not isinstance(fn_type, ContractFunction)
):
raise StructureException(
f"Function '{fn_type._id}' cannot be called without assigning the result", node
)
self.expr_visitor.visit(node.value)
def visit_Log(self, node):
if not isinstance(node.value, vy_ast.Call):
raise StructureException("Log must call an event", node)
event = get_exact_type_from_node(node.value.func)
if not isinstance(event, Event):
raise StructureException("Value is not an event", node.value)
event.fetch_call_return(node.value)
self.expr_visitor.visit(node.value)
class _LocalExpressionVisitor(VyperNodeVisitorBase):
ignored_types = (vy_ast.Constant, vy_ast.Name)
scope_name = "function"
def visit_Attribute(self, node: vy_ast.Attribute) -> None:
self.visit(node.value)
_validate_msg_data_attribute(node)
_validate_address_code_attribute(node)
def visit_BinOp(self, node: vy_ast.BinOp) -> None:
self.visit(node.left)
self.visit(node.right)
def visit_BoolOp(self, node: vy_ast.BoolOp) -> None:
for value in node.values:
self.visit(value)
def visit_Call(self, node: vy_ast.Call) -> None:
self.visit(node.func)
for arg in node.args:
self.visit(arg)
for kwarg in node.keywords:
self.visit(kwarg.value)
def visit_Compare(self, node: vy_ast.Compare) -> None:
self.visit(node.left)
self.visit(node.right)
def visit_Dict(self, node: vy_ast.Dict) -> None:
for key in node.keys:
self.visit(key)
for value in node.values:
self.visit(value)
def visit_Index(self, node: vy_ast.Index) -> None:
self.visit(node.value)
def visit_List(self, node: vy_ast.List) -> None:
for element in node.elements:
self.visit(element)
def visit_Subscript(self, node: vy_ast.Subscript) -> None:
self.visit(node.value)
self.visit(node.slice)
def visit_Tuple(self, node: vy_ast.Tuple) -> None:
for element in node.elements:
self.visit(element)
def visit_UnaryOp(self, node: vy_ast.UnaryOp) -> None:
self.visit(node.operand)
| true | true |
f72fe8bd0b61ca670674b153c7c49dccbd99c3d8 | 3,581 | py | Python | google/bigtable/v2/bigtable-v2-py/noxfile.py | googleapis/googleapis-gen | d84824c78563d59b0e58d5664bfaa430e9ad7e7a | [
"Apache-2.0"
] | 7 | 2021-02-21T10:39:41.000Z | 2021-12-07T07:31:28.000Z | google/bigtable/v2/bigtable-v2-py/noxfile.py | googleapis/googleapis-gen | d84824c78563d59b0e58d5664bfaa430e9ad7e7a | [
"Apache-2.0"
] | 6 | 2021-02-02T23:46:11.000Z | 2021-11-15T01:46:02.000Z | google/bigtable/v2/bigtable-v2-py/noxfile.py | googleapis/googleapis-gen | d84824c78563d59b0e58d5664bfaa430e9ad7e7a | [
"Apache-2.0"
] | 4 | 2021-01-28T23:25:45.000Z | 2021-08-30T01:55:16.000Z | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import pathlib
import shutil
import subprocess
import sys
import nox # type: ignore
CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute()
LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt"
PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8")
nox.sessions = [
"unit",
"cover",
"mypy",
"check_lower_bounds"
# exclude update_lower_bounds from default
"docs",
]
@nox.session(python=['3.6', '3.7', '3.8', '3.9'])
def unit(session):
"""Run the unit test suite."""
session.install('coverage', 'pytest', 'pytest-cov', 'asyncmock', 'pytest-asyncio')
session.install('-e', '.')
session.run(
'py.test',
'--quiet',
'--cov=google/cloud/bigtable_v2/',
'--cov-config=.coveragerc',
'--cov-report=term',
'--cov-report=html',
os.path.join('tests', 'unit', ''.join(session.posargs))
)
@nox.session(python='3.7')
def cover(session):
"""Run the final coverage report.
This outputs the coverage report aggregating coverage from the unit
test runs (not system test runs), and then erases coverage data.
"""
session.install("coverage", "pytest-cov")
session.run("coverage", "report", "--show-missing", "--fail-under=100")
session.run("coverage", "erase")
@nox.session(python=['3.6', '3.7'])
def mypy(session):
"""Run the type checker."""
session.install('mypy', 'types-pkg_resources')
session.install('.')
session.run(
'mypy',
'--explicit-package-bases',
'google',
)
@nox.session
def update_lower_bounds(session):
"""Update lower bounds in constraints.txt to match setup.py"""
session.install('google-cloud-testutils')
session.install('.')
session.run(
'lower-bound-checker',
'update',
'--package-name',
PACKAGE_NAME,
'--constraints-file',
str(LOWER_BOUND_CONSTRAINTS_FILE),
)
@nox.session
def check_lower_bounds(session):
"""Check lower bounds in setup.py are reflected in constraints file"""
session.install('google-cloud-testutils')
session.install('.')
session.run(
'lower-bound-checker',
'check',
'--package-name',
PACKAGE_NAME,
'--constraints-file',
str(LOWER_BOUND_CONSTRAINTS_FILE),
)
@nox.session(python='3.6')
def docs(session):
"""Build the docs for this library."""
session.install("-e", ".")
session.install("sphinx<3.0.0", "alabaster", "recommonmark")
shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True)
session.run(
"sphinx-build",
"-W", # warnings as errors
"-T", # show full traceback on exception
"-N", # no colors
"-b",
"html",
"-d",
os.path.join("docs", "_build", "doctrees", ""),
os.path.join("docs", ""),
os.path.join("docs", "_build", "html", ""),
)
| 26.924812 | 96 | 0.62692 |
import os
import pathlib
import shutil
import subprocess
import sys
import nox
CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute()
LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt"
PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8")
nox.sessions = [
"unit",
"cover",
"mypy",
"check_lower_bounds"
"docs",
]
@nox.session(python=['3.6', '3.7', '3.8', '3.9'])
def unit(session):
session.install('coverage', 'pytest', 'pytest-cov', 'asyncmock', 'pytest-asyncio')
session.install('-e', '.')
session.run(
'py.test',
'--quiet',
'--cov=google/cloud/bigtable_v2/',
'--cov-config=.coveragerc',
'--cov-report=term',
'--cov-report=html',
os.path.join('tests', 'unit', ''.join(session.posargs))
)
@nox.session(python='3.7')
def cover(session):
session.install("coverage", "pytest-cov")
session.run("coverage", "report", "--show-missing", "--fail-under=100")
session.run("coverage", "erase")
@nox.session(python=['3.6', '3.7'])
def mypy(session):
session.install('mypy', 'types-pkg_resources')
session.install('.')
session.run(
'mypy',
'--explicit-package-bases',
'google',
)
@nox.session
def update_lower_bounds(session):
session.install('google-cloud-testutils')
session.install('.')
session.run(
'lower-bound-checker',
'update',
'--package-name',
PACKAGE_NAME,
'--constraints-file',
str(LOWER_BOUND_CONSTRAINTS_FILE),
)
@nox.session
def check_lower_bounds(session):
session.install('google-cloud-testutils')
session.install('.')
session.run(
'lower-bound-checker',
'check',
'--package-name',
PACKAGE_NAME,
'--constraints-file',
str(LOWER_BOUND_CONSTRAINTS_FILE),
)
@nox.session(python='3.6')
def docs(session):
session.install("-e", ".")
session.install("sphinx<3.0.0", "alabaster", "recommonmark")
shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True)
session.run(
"sphinx-build",
"-W",
"-T",
"-N",
"-b",
"html",
"-d",
os.path.join("docs", "_build", "doctrees", ""),
os.path.join("docs", ""),
os.path.join("docs", "_build", "html", ""),
)
| true | true |
f72fe92c6ad18f95463733bd1c3b6b698ed22a61 | 26,454 | py | Python | Python/VRI/CLUS_VRI_ProcessByTSA_GROUPC.py | bcgov/clus | e0d4e49f031126ee40f36b338651b9fddc180f8a | [
"Apache-2.0"
] | 27 | 2018-07-26T23:05:54.000Z | 2022-03-15T22:55:46.000Z | Python/VRI/CLUS_VRI_ProcessByTSA_GROUPC.py | ElizabethKleynhans/clus | a02aef861712ab62bb5b5877208a138e0074e365 | [
"Apache-2.0"
] | 41 | 2018-04-25T19:31:29.000Z | 2022-03-28T17:08:36.000Z | Python/VRI/CLUS_VRI_ProcessByTSA_GROUPC.py | ElizabethKleynhans/clus | a02aef861712ab62bb5b5877208a138e0074e365 | [
"Apache-2.0"
] | 10 | 2018-04-25T17:25:10.000Z | 2022-02-16T21:53:23.000Z | #-------------------------------------------------------------------------------------------------------------------------------------------------------------------------
'''
Script for processing SPI Data for use in CLUS Caribou Project
Mike Fowler
Spatial Data Analyst
June 2018
'''
#-------------------------------------------------------------------------------------------------------------------------------------------------------------------------
#--Imports
import datetime
import sys
import os
import shutil
import getpass
import arcpy as gp
import gc
#--Globals
global tsa, connInstance, kennyloggins
#srcTSA = r"\\spatialfiles2.bcgov\archive\FOR\VIC\HTS\ANA\PROJECTS\CLUS\Data\tsa\tsa.gdb\data\tsa_study_area_test"
#srcTSA = r"\\spatialfiles2.bcgov\archive\FOR\VIC\HTS\ANA\PROJECTS\CLUS\Data\tsa\tsa.gdb\data\tsa_study_area"
srcTSA = r'C:\Users\mwfowler\AppData\Local\Temp\tsa.gdb\data\tsa_study_area'
#srcTSA = r'C:\Users\mwfowler\AppData\Local\Temp\tsa.gdb\data\tsa_study_area_for_processing'
#srcTSA = "C:\Users\mwfowler\AppData\Local\Temp\tsa.gdb\data\tsa_study_area"
#srcVRI = r"C:\Users\mwfowler\AppData\Local\Temp\VRI_TFL.gdb\VEG_COMP_LYR_R1_POLY_with_TFL"
#srcVRI = r'\\spatialfiles2.bcgov\archive\FOR\VIC\HTS\ANA\PROJECTS\CLUS\Data\vri_tfl\vri_tfl.gdb\vri_tfl'
#srcVRI = r'\\spatialfiles2.bcgov\archive\FOR\VIC\HTS\ANA\PROJECTS\CLUS\Data\vri_tfl\vri_test.gdb\data\vri_test'
#srcVRI = r'\\spatialfiles2.bcgov\archive\FOR\VIC\HTS\ANA\PROJECTS\CLUS\Data\vri_tfl\VRI_TFL_GEOM.gdb\VRI_TFL_GEOM'
srcVRI = r'C:\Users\mwfowler\AppData\Local\Temp\VRI_TFL_GEOM.gdb\VRI_TFL_GEOM'
fldTSANum = 'TSA_NUMBER'
fldTSANam = 'TSA_NUMBER_DESCRIPTION'
connInstance = r'bcgw.bcgov/idwprod1.bcgov'
simplifyTol = 3
processGroup = 'C'
#wrk = os.environ['TEMP']
#wrk = r"\\spatialfiles2.bcgov\work\FOR\VIC\HTS\ANA\Workarea\mwfowler\CLUS\Data\SPI"
wrk = r"C:\Users\mwfowler\AppData\Local\Temp"
#wrk = r'\\spatialfiles2.bcgov\archive\FOR\VIC\HTS\ANA\PROJECTS\CLUS\Data\vri_tfl'
#dirLogFile = r"\\spatialfiles2.bcgov\work\FOR\VIC\HTS\ANA\Workarea\mwfowler\CLUS\Scripts\Python\VRI\log"
dirLogFile = wrk
sLogPrefix = "CLUS_ProcessByTSA_Group{0}_".format(processGroup)
def CalcOIDColumn(fc, newOIDField='SOURCE_OBJECTID'):
if not newOIDField in [fld.name for fld in gp.ListFields(srcVRI)]:
#WriteLog(kennyloggins, 'Deleting existing {0} field from {1}....'.format(newOIDField, fc), True)
#arcpy.DeleteField_management(fc,[newOIDField])
WriteLog(kennyloggins, 'Adding new field {0} field to {1}....'.format(newOIDField, fc), True)
arcpy.AddField_management(fc, newOIDField, "LONG", 9)
OIDFld = arcpy.Describe(fc).OIDFieldName
#--Cursor through the data and update the new OID field to the OID Value
WriteLog(kennyloggins, 'Computing value of {0} to {1}....\n'.format(newOIDField, fc), True)
with arcpy.da.UpdateCursor(fc, [OIDFld, newOIDField]) as cursor:
for row in cursor:
row[1] = row[0]
cursor.updateRow(row)
return
def CreateBCGWConn(dbUser, dbPass):
connBCGW = os.path.join(os.path.dirname(arcpy.env.scratchGDB), 'SPI_DataAnalysis.sde')
if os.path.isfile(connBCGW):
os.remove(connBCGW)
try:
arcpy.CreateDatabaseConnection_management(os.path.dirname(connBCGW), os.path.basename(connBCGW), 'ORACLE', connInstance, username=dbUser, password=dbPass)
except:
print 'Error Creating BCGW connection....'
connBCGW = None
return connBCGW
def CreateTempDB(wrk, sType='FILE', name='VRI_by_TSA'):
if sType == 'FILE':
tmpName = '{0}.gdb'.format(name)
tmpWrk = os.path.join(wrk, tmpName)
if not arcpy.Exists(os.path.join(wrk, tmpName)):
#DeleteExists(tmpWrk)
arcpy.CreateFileGDB_management(wrk, tmpName)
if not arcpy.Exists(os.path.join(wrk, tmpName, "Data")):
arcpy.CreateFeatureDataset_management(tmpWrk, "Data", arcpy.SpatialReference(3005))
return os.path.join(tmpWrk, "Data")
elif sType == 'PERSONAL':
tmpName = '{0}.mdb'.format(name)
tmpWrk = os.path.join(wrk, tmpName)
if not arcpy.Exists(tmpWrk):
#DeleteExists(tmpWrk)
arcpy.CreatePersonalGDB_management(wrk, tmpName)
return tmpWrk
def DeleteExists(data):
if arcpy.Exists(data):
arcpy.Delete_management(data)
return True
else:
return False
def CreateLogFile(bMsg=False):
currLog = os.path.join(dirLogFile, sLogPrefix + datetime.datetime.now().strftime("%Y%m%d_%H%M%S.log"))
fLog = open(currLog, 'w')
lstLog = []
lstLog.append("------------------------------------------------------------------\n")
lstLog.append("Log file for VRI Process By TSA - Group {0} \n".format(processGroup))
lstLog.append("Date:{0} \n".format(datetime.datetime.now().strftime("%B %d, %Y - %H%M")))
lstLog.append("User:{}\n".format(getpass.getuser()))
lstLog.append("Script:{}\n".format(sys.argv[0]))
lstLog.append("Source VRI:{}\n".format(srcVRI))
lstLog.append("Source TSA:{}\n".format(srcTSA))
lstLog.append("Output Directory:{}\n".format(os.path.join(wrk, 'VRI_by_TSA.gdb')))
lstLog.append("\n")
lstLog.append("------------------------------------------------------------------\n")
sLog = ''.join(lstLog)
fLog.write(sLog)
if bMsg:
print sLog
#gp.AddMessage(sLog)
return fLog
def WriteLog(fLog, sMessage, bMsg=False):
ts = datetime.datetime.now().strftime("%B %d, %Y - %H%M")
sMsg = '{0} - {1}'.format(ts, sMessage)
fLog.write(sMsg)
if bMsg:
print sMsg
#gp.AddMessage(sMsg)
def CreateProcessMetadataTable(wrk):
tab = 'PROCESS_METADATA'
#DeleteExists(os.path.join(wrk, tab))
if not arcpy.Exists(os.path.join(wrk, tab)):
arcpy.CreateTable_management(wrk, tab)
arcpy.AddField_management(os.path.join(wrk, tab), "TSA_NUMBER", "TEXT", 3)
arcpy.AddField_management(os.path.join(wrk, tab), "TSA_NAME", "TEXT", 50)
arcpy.AddField_management(os.path.join(wrk, tab), "POLY_COUNT_ORIG", "LONG", 9)
arcpy.AddField_management(os.path.join(wrk, tab), "POLY_COUNT_ELIM", "LONG", 9)
arcpy.AddField_management(os.path.join(wrk, tab), "SIMPLIFY_TOLERANCE", "SHORT", 2)
arcpy.AddField_management(os.path.join(wrk, tab), "VERTICES_PRE_SIMPLIFY", "LONG", 12)
arcpy.AddField_management(os.path.join(wrk, tab), "VERTICES_POST_SIMPLIFY", "LONG", 12)
arcpy.AddField_management(os.path.join(wrk, tab), "VERTICES_REDUCE_PCT", "FLOAT", 6, 6)
return os.path.join(wrk, tab)
def FieldExists(fc, fld):
bExists = False
if fld.upper() in [f.name.upper() for f in arcpy.ListFields(fc)]:
bExists = True
return bExists
def GetAreaField(fc):
for fld in gp.ListFields(fc):
if fld.name.upper() in ['GEOMETRY_AREA', 'FEATURE_AREA', 'SHAPE_AREA']:
return fld.name
def EliminatebyGrid(tsa, vri, outFC, fraction=2):
#--Need a temporary DB to assemble this stuff
DeleteExists(os.path.join(os.environ['TEMP'], 'ElimTemp{0}.gdb'.format(processGroup)))
elimDB = CreateTempDB(os.environ['TEMP'], name='ElimTemp{0}'.format(processGroup))
#--Get Extents of Grids to create as fraction of TSA width, height
lyrTSA = 'lyrTSA'
gp.MakeFeatureLayer_management(tsa, lyrTSA)
desc = gp.Describe(lyrTSA)
ext = gp.Describe(lyrTSA).extent
extW = ((ext.XMax - ext.XMin)/fraction) + 1 #--Add 1m to ensure we are not touching edge of grids
extH = ((ext.YMax - ext.YMin)/fraction) + 1
gridTemp = os.path.join(elimDB, 'Grid')
idTemp = os.path.join(elimDB, 'VRI_ID')
#WriteLog(kennyloggins, 'extW - {0}\n'.format(str(extW)), True)
#WriteLog(kennyloggins, 'extH - {0}\n'.format(str(extH)), True)
gp.GridIndexFeatures_cartography(gridTemp, tsa, "INTERSECTFEATURE", "NO_USEPAGEUNIT", polygon_width=extW, polygon_height=extH)
gp.Identity_analysis(vri, gridTemp, idTemp, "ALL", 1)
outElims = []
with arcpy.da.SearchCursor(gridTemp,['SHAPE@', 'PageName']) as cursor:
for row in cursor:
try:
pg = row[1]
WriteLog(kennyloggins, '----Doing Sub-Eliminate on - {0}\n'.format(str(pg)), True)
lyrIDTemp = 'lyrIDTemp'
lyrGridTemp = 'lyrGridTemp'
outGrid = os.path.join(elimDB, 'Temp_{0}_1Grid'.format(pg))
outElim = os.path.join(elimDB, 'Temp_{0}_2Elim'.format(pg))
arcpy.MakeFeatureLayer_management(idTemp, lyrIDTemp, "PageName = '{0}'".format(pg))
arcpy.env.extent = arcpy.Describe(lyrIDTemp).extent
arcpy.CopyFeatures_management(lyrIDTemp, outGrid)
arcpy.Delete_management(lyrIDTemp)
arcpy.MakeFeatureLayer_management(outGrid, lyrGridTemp)
arcpy.SelectLayerByAttribute_management(lyrGridTemp, "NEW_SELECTION", "({0}/10000) <= 0.5".format(GetAreaField(outGrid)))
arcpy.Eliminate_management(lyrGridTemp, outElim, "LENGTH", ex_features=gridTemp)
outElims.append(outElim)
arcpy.Delete_management(lyrGridTemp)
arcpy.Delete_management(outGrid)
WriteLog(kennyloggins, '----Done Sub-Eliminate - {0}\n'.format(str(outElims)), True)
except Exception, e:
WriteLog(kennyloggins, '***Error in Grid by Fraction - {0}\n'.format(str(e)), True)
WriteLog(kennyloggins, '----Merge the Output Sub-Eliminate grids\n', True)
arcpy.Merge_management(inputs=outElims, output=outFC)
WriteLog(kennyloggins, '----Outputs Merged - {0}\n'.format(str(outFC)), True)
DeleteExists(os.path.join(os.environ['TEMP'], 'ElimTemp{0}.gdb'.format(processGroup)))
return
def ProcessByTSA(outWrk, tsaWC=None):
lyrTSA = 'lyrTSA'
#--Create Table in TSA Database to track Processing Metadata
processMDTab = CreateProcessMetadataTable(os.path.dirname(outWrk))
if tsaWC == None:
arcpy.MakeFeatureLayer_management(srcTSA, lyrTSA)
else:
arcpy.MakeFeatureLayer_management(srcTSA, lyrTSA, tsaWC)
with arcpy.da.SearchCursor(lyrTSA,['SHAPE@', fldTSANum, fldTSANam]) as cursor:
for row in cursor:
try:
#--Get values from the TSA into Variables
geom = row[0]
tsa_num = row[1].zfill(2)
tsa_nam = row[2]
polyCountClip = 0
polyCountElim = 0
iInVerts = 0
iOutVerts = 0
reductionRatio = 0.00
lyrVRI = 'lyrVRI'
lyrTSA = 'lyrTSA'
#--Set the Geoprocessing Extent to the current TSA
arcpy.env.extent = geom.extent
gp.MakeFeatureLayer_management(geom, lyrTSA)
WriteLog(kennyloggins, '---------------------------------------------------------------------------\n'.format(tsa_num), True)
WriteLog(kennyloggins, '----Starting to Process TSA-{0}-{1}\n'.format(tsa_num, tsa_nam), True)
WriteLog(kennyloggins, '----Creating the VRI Layer....\n', True)
arcpy.MakeFeatureLayer_management(srcVRI, lyrVRI)
#---------------------------------------------------------------------
#--Prepare to select the VRI using the TSA Area. Speeds up the clip
#---------------------------------------------------------------------
#-Select the VRI Using the Current TSA
arcpy.SelectLayerByLocation_management (lyrVRI, "INTERSECT", geom)
#---------------------------------------------------------------------
#--Prepare to do the Clip
#---------------------------------------------------------------------
clipTemp = os.path.join(outWrk, 'vri_tsa_{0}_01Clip'.format(tsa_num))
currOutput = clipTemp
lyrClipTemp = 'lyrClipTemp'
WriteLog(kennyloggins, '----Start Clip TSA-{0}\n'.format(tsa_num), True)
arcpy.Clip_analysis(lyrVRI, geom, clipTemp)
polyCountClip = arcpy.GetCount_management(clipTemp)[0]
WriteLog(kennyloggins, '----Clip product Polygon Count-{0}\n'.format(str(polyCountClip)), True)
WriteLog(kennyloggins, '----End Clip TSA-{0}\n'.format(tsa_num), True)
try:
#---------------------------------------------------------------------
#--Prepare to do the Eliminate
#---------------------------------------------------------------------
arcpy.MakeFeatureLayer_management(clipTemp, lyrClipTemp)
arcpy.SelectLayerByAttribute_management(lyrClipTemp, "NEW_SELECTION", "({0}/10000) <= 0.5".format(GetAreaField(clipTemp)))
elimTemp = os.path.join(outWrk, 'vri_tsa_{0}_02Elim'.format(tsa_num))
WriteLog(kennyloggins, '----Start Eliminate TSA-{0}\n'.format(tsa_num), True)
try:
arcpy.Eliminate_management(lyrClipTemp, elimTemp, "LENGTH")
#raise Exception('Testing, raising just to simulate failure')
currOutput = elimTemp
WriteLog(kennyloggins, '----End Eliminate TSA-{0}\n'.format(tsa_num), True)
except Exception, e:
WriteLog(kennyloggins, '----Eliminate Failed, will try to process by Grid TSA-{0}\n'.format(tsa_num), True)
WriteLog(kennyloggins, '----Error Message:\n {0}\n'.format(str(e)), True)
try:
EliminatebyGrid(geom, clipTemp, elimTemp, fraction=8)
currOutput = elimTemp
WriteLog(kennyloggins, '----End Eliminate TSA-{0}\n'.format(tsa_num), True)
except Exception, e:
WriteLog(kennyloggins, '----Eliminate by Grid Fraction Failed! TSA-{0}\n'.format(tsa_num), True)
WriteLog(kennyloggins, '----Error Message:\n {0}\n'.format(str(e)), True)
#--Unable to Simplify by Partition or Grid. Give up. Toss Exception, move on to next TSA
raise Exception('Eliminating using Grid Fractions Failed. I Give Up.')
polyCountElim = arcpy.GetCount_management(elimTemp)[0]
WriteLog(kennyloggins, '----Elim product Polygon Count-{0}\n'.format(str(polyCountElim)), True)
#---------------------------------------------------------------------
#--Prepare to do the Geometry Simplify
#---------------------------------------------------------------------
simpTemp = os.path.join(outWrk, 'vri_tsa_{0}_03Simp'.format(tsa_num))
simpTempPnt = os.path.join(outWrk, 'vri_tsa_{0}_03Simp_Pnt'.format(tsa_num))
WriteLog(kennyloggins, '----Start Simplify TSA-{0}\n'.format(tsa_num), True)
try:
arcpy.env.cartographicPartitions = None
arcpy.cartography.SimplifyPolygon(elimTemp, simpTemp, "POINT_REMOVE", 3, 5000, "RESOLVE_ERRORS", "NO_KEEP")
#raise Exception('Raising just for testing purposes')
WriteLog(kennyloggins, '----End Simplify TSA-{0}\n'.format(tsa_num), True)
currOutput = simpTemp
except Exception, e:
WriteLog(kennyloggins, '----Straight up Simplify Failed TSA-{0}\n'.format(tsa_num), True)
WriteLog(kennyloggins, '----Error Message: {0}\n'.format(str(e)), True)
#--If the Simplify fails we will create a grid at 20km square and then set this as the partition layer environment
try:
WriteLog(kennyloggins, '----Going to try to Simplify with Grid Features 20,000m TSA-{0}\n'.format(tsa_num), True)
lyrElimTemp = 'lyrElim'
partTemp = os.path.join(outWrk, 'vri_tsa_{0}_02Part'.format(tsa_num))
arcpy.MakeFeatureLayer_management(elimTemp, lyrElimTemp)
arcpy.GridIndexFeatures_cartography(partTemp, lyrElimTemp, "INTERSECTFEATURE", "NO_USEPAGEUNIT", polygon_width=10000, polygon_height= 10000)
arcpy.env.cartographicPartitions = partTemp
res = arcpy.cartography.SimplifyPolygon(elimTemp, simpTemp, "POINT_REMOVE", simplifyTol, 5000, "RESOLVE_ERRORS", "NO_KEEP")
DeleteExists(partTemp)
arcpy.env.cartographicPartitions = None
#-----------------------------------------------------------------------------------------
#Gather stats on the number of vertices removed
#-----------------------------------------------------------------------------------------
iInVerts = 0
iOutVerts = 0
for i in range(0, res.messageCount):
msg = res.getMessage(i).upper()
if msg.find('INPUT VERTEX COUNT', 0) >= 0:
iInVerts = iInVerts + int(msg[19:len(msg)])
if msg.find('OUTPUT VERTEX COUNT', 0 ) >= 0:
iOutVerts = iOutVerts + int(msg[20:len(msg)])
WriteLog(kennyloggins, '----Total Polys before Eliminate-{0}\n'.format(str(polyCountClip)), True)
WriteLog(kennyloggins, '----Total Polys after Eliminate-{0}\n'.format(str(polyCountElim)), True)
WriteLog(kennyloggins, '----Total In Vertices-{0}\n'.format(str(iInVerts)), True)
WriteLog(kennyloggins, '----Total Out Vertices-{0}\n'.format(str(iOutVerts)), True)
keepRatio = float(float(iOutVerts)/float(iInVerts))
reductionRatio = float(float(iInVerts - iOutVerts)/iInVerts)
WriteLog(kennyloggins, '----Reduction %-{0}\n'.format(str(reductionRatio)), True)
currOutput = simpTemp
WriteLog(kennyloggins, '----End Simplify TSA-{0}\n'.format(tsa_num), True)
except Exception, e:
WriteLog(kennyloggins, '----Simplify with Grid Features 10,000m Failed! TSA-{0}\n'.format(tsa_num), True)
WriteLog(kennyloggins, '----Error Message:\n {0}\n'.format(str(e)), True)
#--Unable to Simplify by Partition or Grid. Give up. Toss Exception, move on to next TSA
raise Exception('Cartographic Partitions and Grid Index Attempts on Simplify Failed. I Give Up.')
#---------------------------------------------------------------------
#--Add TSA Information Columns
#---------------------------------------------------------------------
arcpy.AddField_management(currOutput, "TSA_NUMBER", "TEXT", 3)
arcpy.AddField_management(currOutput, "TSA_NAME", "TEXT", 50)
with arcpy.da.UpdateCursor(currOutput, ["TSA_NUMBER", "TSA_NAME"]) as cursor:
for row in cursor:
row[0] = tsa_num
row[1] = tsa_nam
cursor.updateRow(row)
#---------------------------------------------------------------------
#--Update the Process Metadata
#---------------------------------------------------------------------
cursor = arcpy.da.InsertCursor(processMDTab,["TSA_NUMBER", "TSA_NAME", "POLY_COUNT_ORIG", "POLY_COUNT_ELIM", "VERTICES_PRE_SIMPLIFY", "VERTICES_POST_SIMPLIFY", "VERTICES_REDUCE_PCT", "SIMPLIFY_TOLERANCE"])
cursor.insertRow((tsa_num, tsa_nam, polyCountClip, polyCountElim, iInVerts, iOutVerts, reductionRatio, simplifyTol))
del cursor
#---------------------------------------------------------------------
#--Rename and Cleanup Temp Datasets
#---------------------------------------------------------------------
finData = os.path.join(outWrk, 'vri_tsa_{0}'.format(tsa_num))
DeleteExists(finData)
arcpy.Rename_management(currOutput, finData)
DeleteExists(simpTemp)
DeleteExists(simpTempPnt)
DeleteExists(clipTemp)
DeleteExists(elimTemp)
#--Clean up Layers to free up memory
lyrVRI = ''
lyrClipTemp = ''
lyrElimTemp = ''
lyrTSA = ''
DeleteExists(lyrVRI)
DeleteExists(lyrClipTemp)
DeleteExists(lyrTSA)
DeleteExists(lyrElimTemp)
del lyrVRI, lyrClipTemp, lyrElimTemp
gc.collect()
WriteLog(kennyloggins, '----Done Processing TSA-{0}\n'.format(tsa_num), True)
WriteLog(kennyloggins, '----Output Data is {1} TSA-{0}\n'.format(tsa_num, finData), True)
WriteLog(kennyloggins, '---------------------------------------------------------------------------\n'.format(tsa_num), True)
except Exception, e:
WriteLog(kennyloggins, 'Error Message:\n {0}\n'.format(str(e)), True)
#---------------------------------------------------------------------
#--Add TSA Information Columns
#---------------------------------------------------------------------
arcpy.AddField_management(currOutput, "TSA_NUMBER", "TEXT", 3)
arcpy.AddField_management(currOutput, "TSA_NAME", "TEXT", 50)
with arcpy.da.UpdateCursor(currOutput, ["TSA_NUMBER", "TSA_NAME"]) as cursor:
for row in cursor:
row[0] = tsa_num
row[1] = tsa_nam
cursor.updateRow(row)
#---------------------------------------------------------------------
#--Update the Process Metadata
#---------------------------------------------------------------------
cursor = arcpy.da.InsertCursor(processMDTab,["TSA_NUMBER", "TSA_NAME", "POLY_COUNT_ORIG", "POLY_COUNT_ELIM", "VERTICES_PRE_SIMPLIFY", "VERTICES_POST_SIMPLIFY", "VERTICES_REDUCE_PCT", "SIMPLIFY_TOLERANCE"])
cursor.insertRow((tsa_num, tsa_nam, polyCountClip, polyCountElim, iInVerts, iOutVerts, reductionRatio, simplifyTol))
del cursor
#---------------------------------------------------------------------
#--Rename and Cleanup Temp Datasets
#---------------------------------------------------------------------
finData = os.path.join(outWrk, 'vri_tsa_{0}'.format(tsa_num))
DeleteExists(finData)
arcpy.Rename_management(currOutput, finData)
DeleteExists(simpTemp)
DeleteExists(simpTempPnt)
DeleteExists(clipTemp)
DeleteExists(elimTemp)
DeleteExists(partTemp)
#--Clean up Layers to free up memory
lyrVRI = ''
lyrClipTemp = ''
lyrElimTemp = ''
lyrTSA = ''
DeleteExists(lyrVRI)
DeleteExists(lyrClipTemp)
DeleteExists(lyrTSA)
DeleteExists(lyrElimTemp)
del lyrVRI, lyrClipTemp, lyrElimTemp
gc.collect()
WriteLog(kennyloggins, '----Done Processing TSA-{0}\n'.format(tsa_num), True)
WriteLog(kennyloggins, '----Output Data is {1} TSA-{0}\n'.format(tsa_num, finData), True)
WriteLog(kennyloggins, '---------------------------------------------------------------------------\n'.format(tsa_num), True)
except Exception, e:
WriteLog(kennyloggins, '*****Error Processing TSA-{0}\n'.format(tsa_num), True)
WriteLog(kennyloggins, '*****Error Message:\n {0}\n'.format(str(e)), True)
WriteLog(kennyloggins, '---------------------------------------------------------------------------\n'.format(tsa_num), True)
WriteLog(kennyloggins, '---------------------------------------------------------------------------\n', True)
WriteLog(kennyloggins, '----Script Complete\n', True)
WriteLog(kennyloggins, '---------------------------------------------------------------------------\n', True)
#-----------------------------------------------
if __name__ == '__main__':
#--Setup Environment parameters
arcpy.env.parallelProcessingFactor = "100%"
arcpy.env.overwriteOutput = True
arcpy.env.outputMFlag = "Disabled"
#--Start a log file
kennyloggins = CreateLogFile(True)
#--Create the Output GDB
#CalcOIDColumn(srcVRI)
#ProcessByTSA(outWrk)
outWrk = CreateTempDB(wrk, name='VRI_By_TSA_Group{0}'.format(processGroup))
ProcessByTSA(outWrk, tsaWC="TSA_NUMBER NOT IN ('99', '98') AND PROCESS_GROUP = '{0}'".format(processGroup))
#ProcessByTSA(outWrk, tsaWC="TSA_NUMBER IN ('99', '98')")
#ProcessByTSA(outWrk, tsaExcl="'04', '26'")
#GetAreaField(r'\\spatialfiles2.bcgov\archive\FOR\VIC\HTS\ANA\PROJECTS\CLUS\Data\vri_tfl\VRI_by_TSA.gdb\Data\vri_tsa_05_01Clip')
#-----------------------------------------------------------
#-Close the Log File
#-----------------------------------------------------------
kennyloggins.close()
| 61.37819 | 225 | 0.525251 |
'''
Script for processing SPI Data for use in CLUS Caribou Project
Mike Fowler
Spatial Data Analyst
June 2018
'''
import datetime
import sys
import os
import shutil
import getpass
import arcpy as gp
import gc
global tsa, connInstance, kennyloggins
srcTSA = r'C:\Users\mwfowler\AppData\Local\Temp\tsa.gdb\data\tsa_study_area'
srcVRI = r'C:\Users\mwfowler\AppData\Local\Temp\VRI_TFL_GEOM.gdb\VRI_TFL_GEOM'
fldTSANum = 'TSA_NUMBER'
fldTSANam = 'TSA_NUMBER_DESCRIPTION'
connInstance = r'bcgw.bcgov/idwprod1.bcgov'
simplifyTol = 3
processGroup = 'C'
wrk = r"C:\Users\mwfowler\AppData\Local\Temp"
dirLogFile = wrk
sLogPrefix = "CLUS_ProcessByTSA_Group{0}_".format(processGroup)
def CalcOIDColumn(fc, newOIDField='SOURCE_OBJECTID'):
if not newOIDField in [fld.name for fld in gp.ListFields(srcVRI)]:
WriteLog(kennyloggins, 'Adding new field {0} field to {1}....'.format(newOIDField, fc), True)
arcpy.AddField_management(fc, newOIDField, "LONG", 9)
OIDFld = arcpy.Describe(fc).OIDFieldName
WriteLog(kennyloggins, 'Computing value of {0} to {1}....\n'.format(newOIDField, fc), True)
with arcpy.da.UpdateCursor(fc, [OIDFld, newOIDField]) as cursor:
for row in cursor:
row[1] = row[0]
cursor.updateRow(row)
return
def CreateBCGWConn(dbUser, dbPass):
connBCGW = os.path.join(os.path.dirname(arcpy.env.scratchGDB), 'SPI_DataAnalysis.sde')
if os.path.isfile(connBCGW):
os.remove(connBCGW)
try:
arcpy.CreateDatabaseConnection_management(os.path.dirname(connBCGW), os.path.basename(connBCGW), 'ORACLE', connInstance, username=dbUser, password=dbPass)
except:
print 'Error Creating BCGW connection....'
connBCGW = None
return connBCGW
def CreateTempDB(wrk, sType='FILE', name='VRI_by_TSA'):
if sType == 'FILE':
tmpName = '{0}.gdb'.format(name)
tmpWrk = os.path.join(wrk, tmpName)
if not arcpy.Exists(os.path.join(wrk, tmpName)):
arcpy.CreateFileGDB_management(wrk, tmpName)
if not arcpy.Exists(os.path.join(wrk, tmpName, "Data")):
arcpy.CreateFeatureDataset_management(tmpWrk, "Data", arcpy.SpatialReference(3005))
return os.path.join(tmpWrk, "Data")
elif sType == 'PERSONAL':
tmpName = '{0}.mdb'.format(name)
tmpWrk = os.path.join(wrk, tmpName)
if not arcpy.Exists(tmpWrk):
arcpy.CreatePersonalGDB_management(wrk, tmpName)
return tmpWrk
def DeleteExists(data):
if arcpy.Exists(data):
arcpy.Delete_management(data)
return True
else:
return False
def CreateLogFile(bMsg=False):
currLog = os.path.join(dirLogFile, sLogPrefix + datetime.datetime.now().strftime("%Y%m%d_%H%M%S.log"))
fLog = open(currLog, 'w')
lstLog = []
lstLog.append("------------------------------------------------------------------\n")
lstLog.append("Log file for VRI Process By TSA - Group {0} \n".format(processGroup))
lstLog.append("Date:{0} \n".format(datetime.datetime.now().strftime("%B %d, %Y - %H%M")))
lstLog.append("User:{}\n".format(getpass.getuser()))
lstLog.append("Script:{}\n".format(sys.argv[0]))
lstLog.append("Source VRI:{}\n".format(srcVRI))
lstLog.append("Source TSA:{}\n".format(srcTSA))
lstLog.append("Output Directory:{}\n".format(os.path.join(wrk, 'VRI_by_TSA.gdb')))
lstLog.append("\n")
lstLog.append("------------------------------------------------------------------\n")
sLog = ''.join(lstLog)
fLog.write(sLog)
if bMsg:
print sLog
return fLog
def WriteLog(fLog, sMessage, bMsg=False):
ts = datetime.datetime.now().strftime("%B %d, %Y - %H%M")
sMsg = '{0} - {1}'.format(ts, sMessage)
fLog.write(sMsg)
if bMsg:
print sMsg
def CreateProcessMetadataTable(wrk):
tab = 'PROCESS_METADATA'
if not arcpy.Exists(os.path.join(wrk, tab)):
arcpy.CreateTable_management(wrk, tab)
arcpy.AddField_management(os.path.join(wrk, tab), "TSA_NUMBER", "TEXT", 3)
arcpy.AddField_management(os.path.join(wrk, tab), "TSA_NAME", "TEXT", 50)
arcpy.AddField_management(os.path.join(wrk, tab), "POLY_COUNT_ORIG", "LONG", 9)
arcpy.AddField_management(os.path.join(wrk, tab), "POLY_COUNT_ELIM", "LONG", 9)
arcpy.AddField_management(os.path.join(wrk, tab), "SIMPLIFY_TOLERANCE", "SHORT", 2)
arcpy.AddField_management(os.path.join(wrk, tab), "VERTICES_PRE_SIMPLIFY", "LONG", 12)
arcpy.AddField_management(os.path.join(wrk, tab), "VERTICES_POST_SIMPLIFY", "LONG", 12)
arcpy.AddField_management(os.path.join(wrk, tab), "VERTICES_REDUCE_PCT", "FLOAT", 6, 6)
return os.path.join(wrk, tab)
def FieldExists(fc, fld):
bExists = False
if fld.upper() in [f.name.upper() for f in arcpy.ListFields(fc)]:
bExists = True
return bExists
def GetAreaField(fc):
for fld in gp.ListFields(fc):
if fld.name.upper() in ['GEOMETRY_AREA', 'FEATURE_AREA', 'SHAPE_AREA']:
return fld.name
def EliminatebyGrid(tsa, vri, outFC, fraction=2):
DeleteExists(os.path.join(os.environ['TEMP'], 'ElimTemp{0}.gdb'.format(processGroup)))
elimDB = CreateTempDB(os.environ['TEMP'], name='ElimTemp{0}'.format(processGroup))
lyrTSA = 'lyrTSA'
gp.MakeFeatureLayer_management(tsa, lyrTSA)
desc = gp.Describe(lyrTSA)
ext = gp.Describe(lyrTSA).extent
extW = ((ext.XMax - ext.XMin)/fraction) + 1
extH = ((ext.YMax - ext.YMin)/fraction) + 1
gridTemp = os.path.join(elimDB, 'Grid')
idTemp = os.path.join(elimDB, 'VRI_ID')
gp.GridIndexFeatures_cartography(gridTemp, tsa, "INTERSECTFEATURE", "NO_USEPAGEUNIT", polygon_width=extW, polygon_height=extH)
gp.Identity_analysis(vri, gridTemp, idTemp, "ALL", 1)
outElims = []
with arcpy.da.SearchCursor(gridTemp,['SHAPE@', 'PageName']) as cursor:
for row in cursor:
try:
pg = row[1]
WriteLog(kennyloggins, '----Doing Sub-Eliminate on - {0}\n'.format(str(pg)), True)
lyrIDTemp = 'lyrIDTemp'
lyrGridTemp = 'lyrGridTemp'
outGrid = os.path.join(elimDB, 'Temp_{0}_1Grid'.format(pg))
outElim = os.path.join(elimDB, 'Temp_{0}_2Elim'.format(pg))
arcpy.MakeFeatureLayer_management(idTemp, lyrIDTemp, "PageName = '{0}'".format(pg))
arcpy.env.extent = arcpy.Describe(lyrIDTemp).extent
arcpy.CopyFeatures_management(lyrIDTemp, outGrid)
arcpy.Delete_management(lyrIDTemp)
arcpy.MakeFeatureLayer_management(outGrid, lyrGridTemp)
arcpy.SelectLayerByAttribute_management(lyrGridTemp, "NEW_SELECTION", "({0}/10000) <= 0.5".format(GetAreaField(outGrid)))
arcpy.Eliminate_management(lyrGridTemp, outElim, "LENGTH", ex_features=gridTemp)
outElims.append(outElim)
arcpy.Delete_management(lyrGridTemp)
arcpy.Delete_management(outGrid)
WriteLog(kennyloggins, '----Done Sub-Eliminate - {0}\n'.format(str(outElims)), True)
except Exception, e:
WriteLog(kennyloggins, '***Error in Grid by Fraction - {0}\n'.format(str(e)), True)
WriteLog(kennyloggins, '----Merge the Output Sub-Eliminate grids\n', True)
arcpy.Merge_management(inputs=outElims, output=outFC)
WriteLog(kennyloggins, '----Outputs Merged - {0}\n'.format(str(outFC)), True)
DeleteExists(os.path.join(os.environ['TEMP'], 'ElimTemp{0}.gdb'.format(processGroup)))
return
def ProcessByTSA(outWrk, tsaWC=None):
lyrTSA = 'lyrTSA'
processMDTab = CreateProcessMetadataTable(os.path.dirname(outWrk))
if tsaWC == None:
arcpy.MakeFeatureLayer_management(srcTSA, lyrTSA)
else:
arcpy.MakeFeatureLayer_management(srcTSA, lyrTSA, tsaWC)
with arcpy.da.SearchCursor(lyrTSA,['SHAPE@', fldTSANum, fldTSANam]) as cursor:
for row in cursor:
try:
geom = row[0]
tsa_num = row[1].zfill(2)
tsa_nam = row[2]
polyCountClip = 0
polyCountElim = 0
iInVerts = 0
iOutVerts = 0
reductionRatio = 0.00
lyrVRI = 'lyrVRI'
lyrTSA = 'lyrTSA'
arcpy.env.extent = geom.extent
gp.MakeFeatureLayer_management(geom, lyrTSA)
WriteLog(kennyloggins, '---------------------------------------------------------------------------\n'.format(tsa_num), True)
WriteLog(kennyloggins, '----Starting to Process TSA-{0}-{1}\n'.format(tsa_num, tsa_nam), True)
WriteLog(kennyloggins, '----Creating the VRI Layer....\n', True)
arcpy.MakeFeatureLayer_management(srcVRI, lyrVRI)
arcpy.SelectLayerByLocation_management (lyrVRI, "INTERSECT", geom)
clipTemp = os.path.join(outWrk, 'vri_tsa_{0}_01Clip'.format(tsa_num))
currOutput = clipTemp
lyrClipTemp = 'lyrClipTemp'
WriteLog(kennyloggins, '----Start Clip TSA-{0}\n'.format(tsa_num), True)
arcpy.Clip_analysis(lyrVRI, geom, clipTemp)
polyCountClip = arcpy.GetCount_management(clipTemp)[0]
WriteLog(kennyloggins, '----Clip product Polygon Count-{0}\n'.format(str(polyCountClip)), True)
WriteLog(kennyloggins, '----End Clip TSA-{0}\n'.format(tsa_num), True)
try:
arcpy.MakeFeatureLayer_management(clipTemp, lyrClipTemp)
arcpy.SelectLayerByAttribute_management(lyrClipTemp, "NEW_SELECTION", "({0}/10000) <= 0.5".format(GetAreaField(clipTemp)))
elimTemp = os.path.join(outWrk, 'vri_tsa_{0}_02Elim'.format(tsa_num))
WriteLog(kennyloggins, '----Start Eliminate TSA-{0}\n'.format(tsa_num), True)
try:
arcpy.Eliminate_management(lyrClipTemp, elimTemp, "LENGTH")
currOutput = elimTemp
WriteLog(kennyloggins, '----End Eliminate TSA-{0}\n'.format(tsa_num), True)
except Exception, e:
WriteLog(kennyloggins, '----Eliminate Failed, will try to process by Grid TSA-{0}\n'.format(tsa_num), True)
WriteLog(kennyloggins, '----Error Message:\n {0}\n'.format(str(e)), True)
try:
EliminatebyGrid(geom, clipTemp, elimTemp, fraction=8)
currOutput = elimTemp
WriteLog(kennyloggins, '----End Eliminate TSA-{0}\n'.format(tsa_num), True)
except Exception, e:
WriteLog(kennyloggins, '----Eliminate by Grid Fraction Failed! TSA-{0}\n'.format(tsa_num), True)
WriteLog(kennyloggins, '----Error Message:\n {0}\n'.format(str(e)), True)
raise Exception('Eliminating using Grid Fractions Failed. I Give Up.')
polyCountElim = arcpy.GetCount_management(elimTemp)[0]
WriteLog(kennyloggins, '----Elim product Polygon Count-{0}\n'.format(str(polyCountElim)), True)
simpTemp = os.path.join(outWrk, 'vri_tsa_{0}_03Simp'.format(tsa_num))
simpTempPnt = os.path.join(outWrk, 'vri_tsa_{0}_03Simp_Pnt'.format(tsa_num))
WriteLog(kennyloggins, '----Start Simplify TSA-{0}\n'.format(tsa_num), True)
try:
arcpy.env.cartographicPartitions = None
arcpy.cartography.SimplifyPolygon(elimTemp, simpTemp, "POINT_REMOVE", 3, 5000, "RESOLVE_ERRORS", "NO_KEEP")
WriteLog(kennyloggins, '----End Simplify TSA-{0}\n'.format(tsa_num), True)
currOutput = simpTemp
except Exception, e:
WriteLog(kennyloggins, '----Straight up Simplify Failed TSA-{0}\n'.format(tsa_num), True)
WriteLog(kennyloggins, '----Error Message: {0}\n'.format(str(e)), True)
try:
WriteLog(kennyloggins, '----Going to try to Simplify with Grid Features 20,000m TSA-{0}\n'.format(tsa_num), True)
lyrElimTemp = 'lyrElim'
partTemp = os.path.join(outWrk, 'vri_tsa_{0}_02Part'.format(tsa_num))
arcpy.MakeFeatureLayer_management(elimTemp, lyrElimTemp)
arcpy.GridIndexFeatures_cartography(partTemp, lyrElimTemp, "INTERSECTFEATURE", "NO_USEPAGEUNIT", polygon_width=10000, polygon_height= 10000)
arcpy.env.cartographicPartitions = partTemp
res = arcpy.cartography.SimplifyPolygon(elimTemp, simpTemp, "POINT_REMOVE", simplifyTol, 5000, "RESOLVE_ERRORS", "NO_KEEP")
DeleteExists(partTemp)
arcpy.env.cartographicPartitions = None
iInVerts = 0
iOutVerts = 0
for i in range(0, res.messageCount):
msg = res.getMessage(i).upper()
if msg.find('INPUT VERTEX COUNT', 0) >= 0:
iInVerts = iInVerts + int(msg[19:len(msg)])
if msg.find('OUTPUT VERTEX COUNT', 0 ) >= 0:
iOutVerts = iOutVerts + int(msg[20:len(msg)])
WriteLog(kennyloggins, '----Total Polys before Eliminate-{0}\n'.format(str(polyCountClip)), True)
WriteLog(kennyloggins, '----Total Polys after Eliminate-{0}\n'.format(str(polyCountElim)), True)
WriteLog(kennyloggins, '----Total In Vertices-{0}\n'.format(str(iInVerts)), True)
WriteLog(kennyloggins, '----Total Out Vertices-{0}\n'.format(str(iOutVerts)), True)
keepRatio = float(float(iOutVerts)/float(iInVerts))
reductionRatio = float(float(iInVerts - iOutVerts)/iInVerts)
WriteLog(kennyloggins, '----Reduction %-{0}\n'.format(str(reductionRatio)), True)
currOutput = simpTemp
WriteLog(kennyloggins, '----End Simplify TSA-{0}\n'.format(tsa_num), True)
except Exception, e:
WriteLog(kennyloggins, '----Simplify with Grid Features 10,000m Failed! TSA-{0}\n'.format(tsa_num), True)
WriteLog(kennyloggins, '----Error Message:\n {0}\n'.format(str(e)), True)
raise Exception('Cartographic Partitions and Grid Index Attempts on Simplify Failed. I Give Up.')
arcpy.AddField_management(currOutput, "TSA_NUMBER", "TEXT", 3)
arcpy.AddField_management(currOutput, "TSA_NAME", "TEXT", 50)
with arcpy.da.UpdateCursor(currOutput, ["TSA_NUMBER", "TSA_NAME"]) as cursor:
for row in cursor:
row[0] = tsa_num
row[1] = tsa_nam
cursor.updateRow(row)
cursor = arcpy.da.InsertCursor(processMDTab,["TSA_NUMBER", "TSA_NAME", "POLY_COUNT_ORIG", "POLY_COUNT_ELIM", "VERTICES_PRE_SIMPLIFY", "VERTICES_POST_SIMPLIFY", "VERTICES_REDUCE_PCT", "SIMPLIFY_TOLERANCE"])
cursor.insertRow((tsa_num, tsa_nam, polyCountClip, polyCountElim, iInVerts, iOutVerts, reductionRatio, simplifyTol))
del cursor
finData = os.path.join(outWrk, 'vri_tsa_{0}'.format(tsa_num))
DeleteExists(finData)
arcpy.Rename_management(currOutput, finData)
DeleteExists(simpTemp)
DeleteExists(simpTempPnt)
DeleteExists(clipTemp)
DeleteExists(elimTemp)
lyrVRI = ''
lyrClipTemp = ''
lyrElimTemp = ''
lyrTSA = ''
DeleteExists(lyrVRI)
DeleteExists(lyrClipTemp)
DeleteExists(lyrTSA)
DeleteExists(lyrElimTemp)
del lyrVRI, lyrClipTemp, lyrElimTemp
gc.collect()
WriteLog(kennyloggins, '----Done Processing TSA-{0}\n'.format(tsa_num), True)
WriteLog(kennyloggins, '----Output Data is {1} TSA-{0}\n'.format(tsa_num, finData), True)
WriteLog(kennyloggins, '---------------------------------------------------------------------------\n'.format(tsa_num), True)
except Exception, e:
WriteLog(kennyloggins, 'Error Message:\n {0}\n'.format(str(e)), True)
arcpy.AddField_management(currOutput, "TSA_NUMBER", "TEXT", 3)
arcpy.AddField_management(currOutput, "TSA_NAME", "TEXT", 50)
with arcpy.da.UpdateCursor(currOutput, ["TSA_NUMBER", "TSA_NAME"]) as cursor:
for row in cursor:
row[0] = tsa_num
row[1] = tsa_nam
cursor.updateRow(row)
cursor = arcpy.da.InsertCursor(processMDTab,["TSA_NUMBER", "TSA_NAME", "POLY_COUNT_ORIG", "POLY_COUNT_ELIM", "VERTICES_PRE_SIMPLIFY", "VERTICES_POST_SIMPLIFY", "VERTICES_REDUCE_PCT", "SIMPLIFY_TOLERANCE"])
cursor.insertRow((tsa_num, tsa_nam, polyCountClip, polyCountElim, iInVerts, iOutVerts, reductionRatio, simplifyTol))
del cursor
finData = os.path.join(outWrk, 'vri_tsa_{0}'.format(tsa_num))
DeleteExists(finData)
arcpy.Rename_management(currOutput, finData)
DeleteExists(simpTemp)
DeleteExists(simpTempPnt)
DeleteExists(clipTemp)
DeleteExists(elimTemp)
DeleteExists(partTemp)
lyrVRI = ''
lyrClipTemp = ''
lyrElimTemp = ''
lyrTSA = ''
DeleteExists(lyrVRI)
DeleteExists(lyrClipTemp)
DeleteExists(lyrTSA)
DeleteExists(lyrElimTemp)
del lyrVRI, lyrClipTemp, lyrElimTemp
gc.collect()
WriteLog(kennyloggins, '----Done Processing TSA-{0}\n'.format(tsa_num), True)
WriteLog(kennyloggins, '----Output Data is {1} TSA-{0}\n'.format(tsa_num, finData), True)
WriteLog(kennyloggins, '---------------------------------------------------------------------------\n'.format(tsa_num), True)
except Exception, e:
WriteLog(kennyloggins, '*****Error Processing TSA-{0}\n'.format(tsa_num), True)
WriteLog(kennyloggins, '*****Error Message:\n {0}\n'.format(str(e)), True)
WriteLog(kennyloggins, '---------------------------------------------------------------------------\n'.format(tsa_num), True)
WriteLog(kennyloggins, '---------------------------------------------------------------------------\n', True)
WriteLog(kennyloggins, '----Script Complete\n', True)
WriteLog(kennyloggins, '---------------------------------------------------------------------------\n', True)
if __name__ == '__main__':
arcpy.env.parallelProcessingFactor = "100%"
arcpy.env.overwriteOutput = True
arcpy.env.outputMFlag = "Disabled"
kennyloggins = CreateLogFile(True)
outWrk = CreateTempDB(wrk, name='VRI_By_TSA_Group{0}'.format(processGroup))
ProcessByTSA(outWrk, tsaWC="TSA_NUMBER NOT IN ('99', '98') AND PROCESS_GROUP = '{0}'".format(processGroup))
kennyloggins.close()
| false | true |
f72fe9a85ffacd84ac9c14f50a694fd687e510c2 | 3,681 | py | Python | third_party/lxml_xpath_qs.py | bpuderer/python-snippets27 | 8d51ff34c48bee1247575536d8ed506eafde8631 | [
"MIT"
] | 3 | 2015-11-20T14:30:53.000Z | 2015-12-19T05:55:19.000Z | third_party/lxml_xpath_qs.py | bpuderer/python-snippets27 | 8d51ff34c48bee1247575536d8ed506eafde8631 | [
"MIT"
] | null | null | null | third_party/lxml_xpath_qs.py | bpuderer/python-snippets27 | 8d51ff34c48bee1247575536d8ed506eafde8631 | [
"MIT"
] | 1 | 2016-01-05T20:54:49.000Z | 2016-01-05T20:54:49.000Z | from lxml import etree
from StringIO import StringIO
# lxml provides full XPath syntax unlike ElementTree's ElementPath
# https://www.w3.org/TR/xpath/
# http://lxml.de/xpathxslt.html
# http://lxml.de/api/lxml.etree._ElementTree-class.html#xpath
# http://www.ibm.com/developerworks/library/x-hiperfparse/
# http://infohost.nmt.edu/tcc/help/pubs/pylxml/web/xpath.html
# https://docs.python.org/2/library/xml.etree.elementtree.html#supported-xpath-syntax
simple = '<foo><bar attr1="attrval1_1" attr2="attrval1_2">barval1</bar>first bar tail<bar attr2="attrval2_2">barval2</bar></foo>'
#tree = etree.fromstring(simple)
tree = etree.parse(StringIO(simple))
for r in tree.xpath('/foo/bar'):
print "tag:", r.tag
print "attrib:", r.attrib
print "text:", r.text
print "tail:", r.tail
print "-"
print "----"
xml_text = """<?xml version="1.0"?>
<actors xmlns:fictional="http://characters.example.com"
xmlns="http://people.example.com">
<actor name="John Cleese">
<birthplace>Weston-super-Mare, Somerset, England</birthplace>
<fictional:character>Black Knight</fictional:character>
<fictional:character>First Centurion</fictional:character>
<fictional:character>Robin Hood</fictional:character>
<fictional:character>Archie Leach</fictional:character>
</actor>
<actor name="Graham Chapman">
<birthplace>Leicester, England</birthplace>
<fictional:character>King Arthur</fictional:character>
<fictional:character>Brian</fictional:character>
</actor>
<actor name="Eric Idle">
<birthplace>South Shields, County Durham, England</birthplace>
<fictional:character>The Dead Collector</fictional:character>
<fictional:character>Harry the Haggler</fictional:character>
<fictional:character>Gunther</fictional:character>
<fictional:character>Berthold</fictional:character>
</actor>
<actor name="Nigel Terry">
<birthplace>Bristol, Gloucestershire, England</birthplace>
<fictional:character>King Arthur</fictional:character>
<fictional:character>General Cobb</fictional:character>
</actor>
<actor name="Michael Palin">
<birthplace>Broomhill, Sheffield, West Riding of Yorkshire, England</birthplace>
<fictional:character>Sir Galahad</fictional:character>
<fictional:character>Mr. Big Nose</fictional:character>
<fictional:character>Jack Lint</fictional:character>
<fictional:character>Ken Pile</fictional:character>
</actor>
<extras>
<artist name="Mel Ferrer">
<birthplace>Elberon, New Jersey, U.S.</birthplace>
<fictional:character>King Arthur</fictional:character>
</artist>
</extras>
</actors>
"""
tree = etree.parse(StringIO(xml_text))
ns = {'real_person': 'http://people.example.com',
'role': 'http://characters.example.com'}
print "Birthplaces:", tree.xpath('//real_person:birthplace/text()', namespaces=ns)
print "Actor names:", tree.xpath('//real_person:actor/@name', namespaces=ns)
print "Characters:", tree.xpath('//role:character/text()', namespaces=ns)
# float is always returned if XPath result is numeric
print "Actor count:", int(tree.xpath('count(//real_person:actor)', namespaces=ns))
print "Character Jack Lint found:", tree.xpath("boolean(//role:character[text()='Jack Lint'])", namespaces=ns)
# can make a callable function from an XPath expression
# better performance when evaluating the same XPath over and over
michael_palin_found = etree.XPath("boolean(//real_person:actor[@name='Michael Palin'])", namespaces=ns)
print "Actor Michael Palin found:", michael_palin_found(tree)
| 40.450549 | 129 | 0.704428 | from lxml import etree
from StringIO import StringIO
# https://www.w3.org/TR/xpath/
# http://lxml.de/xpathxslt.html
# http://lxml.de/api/lxml.etree._ElementTree-class.html#xpath
# http://www.ibm.com/developerworks/library/x-hiperfparse/
# http://infohost.nmt.edu/tcc/help/pubs/pylxml/web/xpath.html
# https://docs.python.org/2/library/xml.etree.elementtree.html#supported-xpath-syntax
simple = '<foo><bar attr1="attrval1_1" attr2="attrval1_2">barval1</bar>first bar tail<bar attr2="attrval2_2">barval2</bar></foo>'
#tree = etree.fromstring(simple)
tree = etree.parse(StringIO(simple))
for r in tree.xpath('/foo/bar'):
print "tag:", r.tag
print "attrib:", r.attrib
print "text:", r.text
print "tail:", r.tail
print "-"
print "----"
xml_text = """<?xml version="1.0"?>
<actors xmlns:fictional="http://characters.example.com"
xmlns="http://people.example.com">
<actor name="John Cleese">
<birthplace>Weston-super-Mare, Somerset, England</birthplace>
<fictional:character>Black Knight</fictional:character>
<fictional:character>First Centurion</fictional:character>
<fictional:character>Robin Hood</fictional:character>
<fictional:character>Archie Leach</fictional:character>
</actor>
<actor name="Graham Chapman">
<birthplace>Leicester, England</birthplace>
<fictional:character>King Arthur</fictional:character>
<fictional:character>Brian</fictional:character>
</actor>
<actor name="Eric Idle">
<birthplace>South Shields, County Durham, England</birthplace>
<fictional:character>The Dead Collector</fictional:character>
<fictional:character>Harry the Haggler</fictional:character>
<fictional:character>Gunther</fictional:character>
<fictional:character>Berthold</fictional:character>
</actor>
<actor name="Nigel Terry">
<birthplace>Bristol, Gloucestershire, England</birthplace>
<fictional:character>King Arthur</fictional:character>
<fictional:character>General Cobb</fictional:character>
</actor>
<actor name="Michael Palin">
<birthplace>Broomhill, Sheffield, West Riding of Yorkshire, England</birthplace>
<fictional:character>Sir Galahad</fictional:character>
<fictional:character>Mr. Big Nose</fictional:character>
<fictional:character>Jack Lint</fictional:character>
<fictional:character>Ken Pile</fictional:character>
</actor>
<extras>
<artist name="Mel Ferrer">
<birthplace>Elberon, New Jersey, U.S.</birthplace>
<fictional:character>King Arthur</fictional:character>
</artist>
</extras>
</actors>
"""
tree = etree.parse(StringIO(xml_text))
ns = {'real_person': 'http://people.example.com',
'role': 'http://characters.example.com'}
print "Birthplaces:", tree.xpath('//real_person:birthplace/text()', namespaces=ns)
print "Actor names:", tree.xpath('//real_person:actor/@name', namespaces=ns)
print "Characters:", tree.xpath('//role:character/text()', namespaces=ns)
# float is always returned if XPath result is numeric
print "Actor count:", int(tree.xpath('count(//real_person:actor)', namespaces=ns))
print "Character Jack Lint found:", tree.xpath("boolean(//role:character[text()='Jack Lint'])", namespaces=ns)
# can make a callable function from an XPath expression
# better performance when evaluating the same XPath over and over
michael_palin_found = etree.XPath("boolean(//real_person:actor[@name='Michael Palin'])", namespaces=ns)
print "Actor Michael Palin found:", michael_palin_found(tree)
| false | true |
f72fea862619713252e7fba20316ffcd135413b8 | 21,695 | py | Python | external/mmdetection/tests/ote_params_validation/test_ote_data_utils_params_validation.py | opencv/openvino_training_extensions | f5d809741e192a2345558efc75899a475019cf98 | [
"Apache-2.0"
] | 775 | 2019-03-01T02:13:33.000Z | 2020-09-07T22:49:15.000Z | external/mmdetection/tests/ote_params_validation/test_ote_data_utils_params_validation.py | opencv/openvino_training_extensions | f5d809741e192a2345558efc75899a475019cf98 | [
"Apache-2.0"
] | 229 | 2019-02-28T21:37:08.000Z | 2020-09-07T15:11:49.000Z | external/mmdetection/tests/ote_params_validation/test_ote_data_utils_params_validation.py | opencv/openvino_training_extensions | f5d809741e192a2345558efc75899a475019cf98 | [
"Apache-2.0"
] | 290 | 2019-02-28T20:32:11.000Z | 2020-09-07T05:51:41.000Z | # Copyright (C) 2021-2022 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
#
import os.path as osp
import tempfile
import mmcv
import pytest
from detection_tasks.extension.datasets.data_utils import (
CocoDataset,
LoadAnnotations,
find_label_by_name,
format_list_to_str,
get_anchor_boxes,
get_classes_from_annotation,
get_sizes_from_dataset_entity,
load_dataset_items_coco_format,
)
from ote_sdk.entities.datasets import DatasetEntity
from ote_sdk.entities.label import Domain, LabelEntity
from ote_sdk.test_suite.e2e_test_system import e2e_pytest_unit
from ote_sdk.tests.parameters_validation.validation_helper import (
check_value_error_exception_raised,
)
def _create_dummy_coco_json(json_name):
image = {
"id": 0,
"width": 640,
"height": 640,
"file_name": "fake_name.jpg",
}
annotation_1 = {
"id": 1,
"image_id": 0,
"category_id": 0,
"area": 400,
"bbox": [50, 60, 20, 20],
"iscrowd": 0,
}
annotation_2 = {
"id": 2,
"image_id": 0,
"category_id": 0,
"area": 900,
"bbox": [100, 120, 30, 30],
"iscrowd": 0,
}
categories = [
{
"id": 0,
"name": "car",
"supercategory": "car",
}
]
fake_json = {
"images": [image],
"annotations": [annotation_1, annotation_2],
"categories": categories,
}
mmcv.dump(fake_json, json_name)
class TestDataUtilsFunctionsInputParamsValidation:
@e2e_pytest_unit
def test_get_classes_from_annotation_input_params_validation(self):
"""
<b>Description:</b>
Check "get_classes_from_annotation" function input parameters validation
<b>Input data:</b>
"path" unexpected object
<b>Expected results:</b>
Test passes if ValueError exception is raised when unexpected type object is specified as input parameter for
"get_classes_from_annotation" function
"""
for unexpected_value in [
# non string object is specified as "path" parameter
1,
# Empty string is specified as "path" parameter
"",
# Path to file with unexpected extension is specified as "path" parameter
"./unexpected_extension.yaml",
# Path to non-existing file is specified as "path" parameter
"./non_existing.json",
# Path with null character is specified as "path" parameter
"./null\0char.json",
# Path with non-printable character is specified as "path" parameter
"./\non_printable_char.json",
]:
with pytest.raises(ValueError):
get_classes_from_annotation(path=unexpected_value)
@e2e_pytest_unit
def test_find_label_by_name_params_validation(self):
"""
<b>Description:</b>
Check "find_label_by_name" function input parameters validation
<b>Input data:</b>
"find_label_by_name" function unexpected-type input parameters
<b>Expected results:</b>
Test passes if ValueError exception is raised when unexpected type object is specified as
input parameter for "find_label_by_name" function
"""
label = LabelEntity(name="test label", domain=Domain.DETECTION)
correct_values_dict = {
"labels": [label],
"name": "test label",
"domain": Domain.DETECTION,
}
unexpected_int = 1
unexpected_values = [
# Unexpected integer is specified as "labels" parameter
("labels", unexpected_int),
# Unexpected integer is specified as nested label
("labels", [label, unexpected_int]),
# Unexpected integer is specified as "name" parameter
("name", unexpected_int),
# Unexpected integer is specified as "domain" parameter
("domain", unexpected_int),
]
check_value_error_exception_raised(
correct_parameters=correct_values_dict,
unexpected_values=unexpected_values,
class_or_function=find_label_by_name,
)
@e2e_pytest_unit
def test_load_dataset_items_coco_format_params_validation(self):
"""
<b>Description:</b>
Check "load_dataset_items_coco_format" function input parameters validation
<b>Input data:</b>
"load_dataset_items_coco_format" function unexpected-type input parameters
<b>Expected results:</b>
Test passes if ValueError exception is raised when unexpected type object is specified as
input parameter for "load_dataset_items_coco_format" function
"""
tmp_dir = tempfile.TemporaryDirectory()
fake_json_file = osp.join(tmp_dir.name, "fake_data.json")
_create_dummy_coco_json(fake_json_file)
label = LabelEntity(name="test label", domain=Domain.DETECTION)
correct_values_dict = {
"ann_file_path": fake_json_file,
"data_root_dir": tmp_dir.name,
"domain": Domain.DETECTION,
}
unexpected_int = 1
unexpected_values = [
# Unexpected integer is specified as "ann_file_path" parameter
("ann_file_path", unexpected_int),
# Empty string is specified as "ann_file_path" parameter
("ann_file_path", ""),
# Path to non-json file is specified as "ann_file_path" parameter
("ann_file_path", osp.join(tmp_dir.name, "non_json.jpg")),
# Path with null character is specified as "ann_file_path" parameter
("ann_file_path", osp.join(tmp_dir.name, "\0fake_data.json")),
# Path with non-printable character is specified as "ann_file_path" parameter
("ann_file_path", osp.join(tmp_dir.name, "\nfake_data.json")),
# Path to non-existing file is specified as "ann_file_path" parameter
("ann_file_path", osp.join(tmp_dir.name, "non_existing.json")),
# Unexpected integer is specified as "data_root_dir" parameter
("data_root_dir", unexpected_int),
# Empty string is specified as "data_root_dir" parameter
("data_root_dir", ""),
# Path with null character is specified as "data_root_dir" parameter
("data_root_dir", "./\0null_char"),
# Path with non-printable character is specified as "data_root_dir" parameter
("data_root_dir", "./\non_printable_char"),
# Unexpected integer is specified as "domain" parameter
("domain", unexpected_int),
# Unexpected integer is specified as "subset" parameter
("subset", unexpected_int),
# Unexpected integer is specified as "labels_list" parameter
("labels_list", unexpected_int),
# Unexpected integer is specified as nested label
("labels_list", [label, unexpected_int]),
# Unexpected string is specified as "with_mask" parameter
("with_mask", "unexpected string"),
]
check_value_error_exception_raised(
correct_parameters=correct_values_dict,
unexpected_values=unexpected_values,
class_or_function=load_dataset_items_coco_format,
)
@e2e_pytest_unit
def test_get_sizes_from_dataset_entity_params_validation(self):
"""
<b>Description:</b>
Check "get_sizes_from_dataset_entity" function input parameters validation
<b>Input data:</b>
"get_sizes_from_dataset_entity" function unexpected-type input parameters
<b>Expected results:</b>
Test passes if ValueError exception is raised when unexpected type object is specified as
input parameter for "get_sizes_from_dataset_entity" function
"""
correct_values_dict = {
"dataset": DatasetEntity(),
"target_wh": [(0.1, 0.1)],
}
unexpected_int = 1
unexpected_values = [
# Unexpected integer is specified as "dataset" parameter
("dataset", unexpected_int),
# Unexpected integer is specified as "target_wh" parameter
("target_wh", unexpected_int),
# Unexpected integer is specified as nested target_wh
("target_wh", [(0.1, 0.1), unexpected_int]),
]
check_value_error_exception_raised(
correct_parameters=correct_values_dict,
unexpected_values=unexpected_values,
class_or_function=get_sizes_from_dataset_entity,
)
@e2e_pytest_unit
def test_format_list_to_str_params_validation(self):
"""
<b>Description:</b>
Check "format_list_to_str" function input parameters validation
<b>Input data:</b>
"value_lists" unexpected type object
<b>Expected results:</b>
Test passes if ValueError exception is raised when unexpected type object is specified as
input parameter for "format_list_to_str" function
"""
with pytest.raises(ValueError):
format_list_to_str(value_lists="unexpected string") # type: ignore
@e2e_pytest_unit
def test_get_anchor_boxes_params_validation(self):
"""
<b>Description:</b>
Check "get_anchor_boxes" function input parameters validation
<b>Input data:</b>
"get_anchor_boxes" function unexpected-type input parameters
<b>Expected results:</b>
Test passes if ValueError exception is raised when unexpected type object is specified as
input parameter for "get_anchor_boxes" function
"""
correct_values_dict = {
"wh_stats": [("wh_stat_1", 1), ("wh_stat_2", 2)],
"group_as": [0, 1, 2],
}
unexpected_str = "unexpected string"
unexpected_values = [
# Unexpected string is specified as "wh_stats" parameter
("wh_stats", unexpected_str),
# Unexpected string is specified as nested "wh_stat"
("wh_stats", [("wh_stat_1", 1), unexpected_str]),
# Unexpected string is specified as "group_as" parameter
("group_as", unexpected_str),
# Unexpected string is specified as nested "group_as"
("group_as", [0, 1, 2, unexpected_str]),
]
check_value_error_exception_raised(
correct_parameters=correct_values_dict,
unexpected_values=unexpected_values,
class_or_function=get_anchor_boxes,
)
class TestLoadAnnotationsInputParamsValidation:
@e2e_pytest_unit
def test_load_annotations_init_params_validation(self):
"""
<b>Description:</b>
Check LoadAnnotations object initialization parameters validation
<b>Input data:</b>
LoadAnnotations object initialization parameters with unexpected type
<b>Expected results:</b>
Test passes if ValueError exception is raised when unexpected type object is specified as
LoadAnnotations initialization parameter
"""
for parameter in ["with_bbox", "with_label", "with_mask"]:
with pytest.raises(ValueError):
LoadAnnotations(**{parameter: "unexpected string"})
@e2e_pytest_unit
def test_load_annotations_call_params_validation(self):
"""
<b>Description:</b>
Check LoadAnnotations object "__call__" method input parameters validation
<b>Input data:</b>
"results" parameter with unexpected type
<b>Expected results:</b>
Test passes if ValueError exception is raised when unexpected type object is specified as
input parameter for "__call__" method
"""
load_annotations = LoadAnnotations()
unexpected_int = 1
for unexpected_value in [
# Unexpected integer is specified as "results" parameter
unexpected_int,
# Unexpected integer is specified as "results" dictionary key
{"result_1": "some results", unexpected_int: "unexpected results"},
]:
with pytest.raises(ValueError):
load_annotations(results=unexpected_value)
class TestCocoDatasetInputParamsValidation:
@staticmethod
def create_fake_json_file():
tmp_dir = tempfile.TemporaryDirectory()
fake_json_file = osp.join(tmp_dir.name, "fake_data.json")
_create_dummy_coco_json(fake_json_file)
return fake_json_file
@staticmethod
def dataset():
tmp_dir = tempfile.TemporaryDirectory()
fake_json_file = osp.join(tmp_dir.name, "fake_data.json")
_create_dummy_coco_json(fake_json_file)
return CocoDataset(fake_json_file)
@e2e_pytest_unit
def test_coco_dataset_init_params_validation(self):
"""
<b>Description:</b>
Check CocoDataset object initialization parameters validation
<b>Input data:</b>
CocoDataset object initialization parameters with unexpected type
<b>Expected results:</b>
Test passes if ValueError exception is raised when unexpected type object is specified as
CocoDataset object initialization parameter
"""
tmp_dir = tempfile.TemporaryDirectory()
fake_json_file = osp.join(tmp_dir.name, "fake_data.json")
_create_dummy_coco_json(fake_json_file)
correct_values_dict = {
"ann_file": fake_json_file,
}
unexpected_str = "unexpected string"
unexpected_int = 1
unexpected_values = [
# Unexpected integer is specified as "ann_file" parameter
("ann_file", unexpected_int),
# Empty string is specified as "ann_file" parameter
("ann_file", ""),
# Path to non-json file is specified as "ann_file" parameter
("ann_file", osp.join(tmp_dir.name, "non_json.jpg")),
# Path with null character is specified as "ann_file" parameter
("ann_file", osp.join(tmp_dir.name, "\0fake_data.json")),
# Path with non-printable character is specified as "ann_file" parameter
("ann_file", osp.join(tmp_dir.name, "\nfake_data.json")),
# Path to non-existing file is specified as "ann_file" parameter
("ann_file", osp.join(tmp_dir.name, "non_existing.json")),
# Unexpected integer is specified as "classes" parameter
("classes", unexpected_int),
# Unexpected integer is specified nested class
("classes", ["class_1", unexpected_int]),
# Unexpected integer is specified as "data_root" parameter
("data_root", unexpected_int),
# Empty string is specified as "data_root" parameter
("data_root", ""),
# Path with null character is specified as "data_root" parameter
("data_root", "./\0null_char"),
# Path with non-printable character is specified as "data_root" parameter
("data_root", "./\non_printable_char"),
# Unexpected integer is specified as "img_prefix" parameter
("img_prefix", unexpected_int),
# Unexpected string is specified as "test_mode" parameter
("test_mode", unexpected_str),
# Unexpected string is specified as "filter_empty_gt" parameter
("filter_empty_gt", unexpected_str),
# Unexpected string is specified as "min_size" parameter
("min_size", unexpected_str),
# Unexpected string is specified as "with_mask" parameter
("with_mask", unexpected_str),
]
check_value_error_exception_raised(
correct_parameters=correct_values_dict,
unexpected_values=unexpected_values,
class_or_function=CocoDataset,
)
@e2e_pytest_unit
def test_coco_dataset_pre_pipeline_params_validation(self):
"""
<b>Description:</b>
Check CocoDataset object "pre_pipeline" method input parameters validation
<b>Input data:</b>
CocoDataset object, "results" parameter with unexpected type
<b>Expected results:</b>
Test passes if ValueError exception is raised when unexpected type object is specified as
input parameter for "pre_pipeline" method
"""
dataset = self.dataset()
unexpected_int = 1
for unexpected_value in [
# Unexpected integer is specified as "results" parameter
unexpected_int,
# Unexpected integer is specified as "results" dictionary key
{"result_1": "some results", unexpected_int: "unexpected results"},
]:
with pytest.raises(ValueError):
dataset.pre_pipeline(results=unexpected_value)
@e2e_pytest_unit
def test_coco_dataset_get_item_params_validation(self):
"""
<b>Description:</b>
Check CocoDataset object "__getitem__" method input parameters validation
<b>Input data:</b>
CocoDataset object, "idx" non-integer type parameter
<b>Expected results:</b>
Test passes if ValueError exception is raised when unexpected type object is specified as
input parameter for "__getitem__" method
"""
dataset = self.dataset()
with pytest.raises(ValueError):
dataset.__getitem__(idx="unexpected string") # type: ignore
@e2e_pytest_unit
def test_coco_dataset_prepare_img_params_validation(self):
"""
<b>Description:</b>
Check CocoDataset object "prepare_img" method input parameters validation
<b>Input data:</b>
CocoDataset object, "idx" non-integer type parameter
<b>Expected results:</b>
Test passes if ValueError exception is raised when unexpected type object is specified as
input parameter for "prepare_img" method
"""
dataset = self.dataset()
with pytest.raises(ValueError):
dataset.prepare_img(idx="unexpected string") # type: ignore
@e2e_pytest_unit
def test_coco_dataset_get_classes_params_validation(self):
"""
<b>Description:</b>
Check CocoDataset object "get_classes" method input parameters validation
<b>Input data:</b>
CocoDataset object, "classes" parameter with unexpected type
<b>Expected results:</b>
Test passes if ValueError exception is raised when unexpected type object is specified as
input parameter for "get_classes" method
"""
dataset = self.dataset()
unexpected_int = 1
for unexpected_value in [
# Unexpected integer is specified as "classes" parameter
unexpected_int,
# Unexpected integer is specified as nested "classes" element
["class_1", unexpected_int],
]:
with pytest.raises(ValueError):
dataset.get_classes(classes=unexpected_value) # type: ignore
@e2e_pytest_unit
def test_coco_dataset_load_annotations_params_validation(self):
"""
<b>Description:</b>
Check CocoDataset object "load_annotations" method input parameters validation
<b>Input data:</b>
CocoDataset object, "ann_file" unexpected object
<b>Expected results:</b>
Test passes if ValueError exception is raised when unexpected type object is specified as
input parameter for "load_annotations" method
"""
dataset = self.dataset()
for unexpected_value in [
# Unexpected integer is specified as "ann_file" parameter
1,
# Empty string is specified as "ann_file" parameter
"",
# Path to non-existing file is specified as "ann_file" parameter
"./non_existing.json",
# Path to non-json file is specified as "ann_file" parameter
"./unexpected_type.jpg",
# Path Null character is specified in "ann_file" parameter
"./null\0char.json",
# Path with non-printable character is specified as "input_config" parameter
"./null\nchar.json",
]:
with pytest.raises(ValueError):
dataset.load_annotations(ann_file=unexpected_value)
@e2e_pytest_unit
def test_coco_dataset_get_ann_info_params_validation(self):
"""
<b>Description:</b>
Check CocoDataset object "get_ann_info" method input parameters validation
<b>Input data:</b>
CocoDataset object, "idx" non-integer type parameter
<b>Expected results:</b>
Test passes if ValueError exception is raised when unexpected type object is specified as
input parameter for "get_ann_info" method
"""
dataset = self.dataset()
with pytest.raises(ValueError):
dataset.get_ann_info(idx="unexpected string") # type: ignore
@e2e_pytest_unit
def test_coco_dataset_get_cat_ids_params_validation(self):
"""
<b>Description:</b>
Check CocoDataset object "get_cat_ids" method input parameters validation
<b>Input data:</b>
CocoDataset object, "idx" non-integer type parameter
<b>Expected results:</b>
Test passes if ValueError exception is raised when unexpected type object is specified as
input parameter for "get_cat_ids" method
"""
dataset = self.dataset()
with pytest.raises(ValueError):
dataset.get_cat_ids(idx="unexpected string") # type: ignore
| 39.445455 | 117 | 0.638442 |
import os.path as osp
import tempfile
import mmcv
import pytest
from detection_tasks.extension.datasets.data_utils import (
CocoDataset,
LoadAnnotations,
find_label_by_name,
format_list_to_str,
get_anchor_boxes,
get_classes_from_annotation,
get_sizes_from_dataset_entity,
load_dataset_items_coco_format,
)
from ote_sdk.entities.datasets import DatasetEntity
from ote_sdk.entities.label import Domain, LabelEntity
from ote_sdk.test_suite.e2e_test_system import e2e_pytest_unit
from ote_sdk.tests.parameters_validation.validation_helper import (
check_value_error_exception_raised,
)
def _create_dummy_coco_json(json_name):
image = {
"id": 0,
"width": 640,
"height": 640,
"file_name": "fake_name.jpg",
}
annotation_1 = {
"id": 1,
"image_id": 0,
"category_id": 0,
"area": 400,
"bbox": [50, 60, 20, 20],
"iscrowd": 0,
}
annotation_2 = {
"id": 2,
"image_id": 0,
"category_id": 0,
"area": 900,
"bbox": [100, 120, 30, 30],
"iscrowd": 0,
}
categories = [
{
"id": 0,
"name": "car",
"supercategory": "car",
}
]
fake_json = {
"images": [image],
"annotations": [annotation_1, annotation_2],
"categories": categories,
}
mmcv.dump(fake_json, json_name)
class TestDataUtilsFunctionsInputParamsValidation:
@e2e_pytest_unit
def test_get_classes_from_annotation_input_params_validation(self):
for unexpected_value in [
1,
"",
"./unexpected_extension.yaml",
"./non_existing.json",
"./null\0char.json",
"./\non_printable_char.json",
]:
with pytest.raises(ValueError):
get_classes_from_annotation(path=unexpected_value)
@e2e_pytest_unit
def test_find_label_by_name_params_validation(self):
label = LabelEntity(name="test label", domain=Domain.DETECTION)
correct_values_dict = {
"labels": [label],
"name": "test label",
"domain": Domain.DETECTION,
}
unexpected_int = 1
unexpected_values = [
("labels", unexpected_int),
("labels", [label, unexpected_int]),
("name", unexpected_int),
("domain", unexpected_int),
]
check_value_error_exception_raised(
correct_parameters=correct_values_dict,
unexpected_values=unexpected_values,
class_or_function=find_label_by_name,
)
@e2e_pytest_unit
def test_load_dataset_items_coco_format_params_validation(self):
tmp_dir = tempfile.TemporaryDirectory()
fake_json_file = osp.join(tmp_dir.name, "fake_data.json")
_create_dummy_coco_json(fake_json_file)
label = LabelEntity(name="test label", domain=Domain.DETECTION)
correct_values_dict = {
"ann_file_path": fake_json_file,
"data_root_dir": tmp_dir.name,
"domain": Domain.DETECTION,
}
unexpected_int = 1
unexpected_values = [
("ann_file_path", unexpected_int),
("ann_file_path", ""),
("ann_file_path", osp.join(tmp_dir.name, "non_json.jpg")),
("ann_file_path", osp.join(tmp_dir.name, "\0fake_data.json")),
("ann_file_path", osp.join(tmp_dir.name, "\nfake_data.json")),
("ann_file_path", osp.join(tmp_dir.name, "non_existing.json")),
("data_root_dir", unexpected_int),
("data_root_dir", ""),
("data_root_dir", "./\0null_char"),
("data_root_dir", "./\non_printable_char"),
("domain", unexpected_int),
("subset", unexpected_int),
("labels_list", unexpected_int),
("labels_list", [label, unexpected_int]),
("with_mask", "unexpected string"),
]
check_value_error_exception_raised(
correct_parameters=correct_values_dict,
unexpected_values=unexpected_values,
class_or_function=load_dataset_items_coco_format,
)
@e2e_pytest_unit
def test_get_sizes_from_dataset_entity_params_validation(self):
correct_values_dict = {
"dataset": DatasetEntity(),
"target_wh": [(0.1, 0.1)],
}
unexpected_int = 1
unexpected_values = [
("dataset", unexpected_int),
("target_wh", unexpected_int),
("target_wh", [(0.1, 0.1), unexpected_int]),
]
check_value_error_exception_raised(
correct_parameters=correct_values_dict,
unexpected_values=unexpected_values,
class_or_function=get_sizes_from_dataset_entity,
)
@e2e_pytest_unit
def test_format_list_to_str_params_validation(self):
with pytest.raises(ValueError):
format_list_to_str(value_lists="unexpected string")
@e2e_pytest_unit
def test_get_anchor_boxes_params_validation(self):
correct_values_dict = {
"wh_stats": [("wh_stat_1", 1), ("wh_stat_2", 2)],
"group_as": [0, 1, 2],
}
unexpected_str = "unexpected string"
unexpected_values = [
("wh_stats", unexpected_str),
("wh_stats", [("wh_stat_1", 1), unexpected_str]),
("group_as", unexpected_str),
("group_as", [0, 1, 2, unexpected_str]),
]
check_value_error_exception_raised(
correct_parameters=correct_values_dict,
unexpected_values=unexpected_values,
class_or_function=get_anchor_boxes,
)
class TestLoadAnnotationsInputParamsValidation:
@e2e_pytest_unit
def test_load_annotations_init_params_validation(self):
for parameter in ["with_bbox", "with_label", "with_mask"]:
with pytest.raises(ValueError):
LoadAnnotations(**{parameter: "unexpected string"})
@e2e_pytest_unit
def test_load_annotations_call_params_validation(self):
load_annotations = LoadAnnotations()
unexpected_int = 1
for unexpected_value in [
unexpected_int,
{"result_1": "some results", unexpected_int: "unexpected results"},
]:
with pytest.raises(ValueError):
load_annotations(results=unexpected_value)
class TestCocoDatasetInputParamsValidation:
@staticmethod
def create_fake_json_file():
tmp_dir = tempfile.TemporaryDirectory()
fake_json_file = osp.join(tmp_dir.name, "fake_data.json")
_create_dummy_coco_json(fake_json_file)
return fake_json_file
@staticmethod
def dataset():
tmp_dir = tempfile.TemporaryDirectory()
fake_json_file = osp.join(tmp_dir.name, "fake_data.json")
_create_dummy_coco_json(fake_json_file)
return CocoDataset(fake_json_file)
@e2e_pytest_unit
def test_coco_dataset_init_params_validation(self):
tmp_dir = tempfile.TemporaryDirectory()
fake_json_file = osp.join(tmp_dir.name, "fake_data.json")
_create_dummy_coco_json(fake_json_file)
correct_values_dict = {
"ann_file": fake_json_file,
}
unexpected_str = "unexpected string"
unexpected_int = 1
unexpected_values = [
("ann_file", unexpected_int),
("ann_file", ""),
("ann_file", osp.join(tmp_dir.name, "non_json.jpg")),
("ann_file", osp.join(tmp_dir.name, "\0fake_data.json")),
("ann_file", osp.join(tmp_dir.name, "\nfake_data.json")),
("ann_file", osp.join(tmp_dir.name, "non_existing.json")),
("classes", unexpected_int),
("classes", ["class_1", unexpected_int]),
("data_root", unexpected_int),
("data_root", ""),
("data_root", "./\0null_char"),
("data_root", "./\non_printable_char"),
("img_prefix", unexpected_int),
("test_mode", unexpected_str),
("filter_empty_gt", unexpected_str),
("min_size", unexpected_str),
("with_mask", unexpected_str),
]
check_value_error_exception_raised(
correct_parameters=correct_values_dict,
unexpected_values=unexpected_values,
class_or_function=CocoDataset,
)
@e2e_pytest_unit
def test_coco_dataset_pre_pipeline_params_validation(self):
dataset = self.dataset()
unexpected_int = 1
for unexpected_value in [
unexpected_int,
{"result_1": "some results", unexpected_int: "unexpected results"},
]:
with pytest.raises(ValueError):
dataset.pre_pipeline(results=unexpected_value)
@e2e_pytest_unit
def test_coco_dataset_get_item_params_validation(self):
dataset = self.dataset()
with pytest.raises(ValueError):
dataset.__getitem__(idx="unexpected string")
@e2e_pytest_unit
def test_coco_dataset_prepare_img_params_validation(self):
dataset = self.dataset()
with pytest.raises(ValueError):
dataset.prepare_img(idx="unexpected string")
@e2e_pytest_unit
def test_coco_dataset_get_classes_params_validation(self):
dataset = self.dataset()
unexpected_int = 1
for unexpected_value in [
unexpected_int,
["class_1", unexpected_int],
]:
with pytest.raises(ValueError):
dataset.get_classes(classes=unexpected_value)
@e2e_pytest_unit
def test_coco_dataset_load_annotations_params_validation(self):
dataset = self.dataset()
for unexpected_value in [
1,
"",
"./non_existing.json",
"./unexpected_type.jpg",
"./null\0char.json",
"./null\nchar.json",
]:
with pytest.raises(ValueError):
dataset.load_annotations(ann_file=unexpected_value)
@e2e_pytest_unit
def test_coco_dataset_get_ann_info_params_validation(self):
dataset = self.dataset()
with pytest.raises(ValueError):
dataset.get_ann_info(idx="unexpected string")
@e2e_pytest_unit
def test_coco_dataset_get_cat_ids_params_validation(self):
dataset = self.dataset()
with pytest.raises(ValueError):
dataset.get_cat_ids(idx="unexpected string")
| true | true |
f72fea9931e22e9f239b53d7134f8989231f7dc2 | 2,129 | py | Python | aiida/backends/djsite/db/migrations/0014_add_node_uuid_unique_constraint.py | azadoks/aiida-core | b806b7fef8fc79090deccfe2019b77cb922e0581 | [
"MIT",
"BSD-3-Clause"
] | 180 | 2019-07-12T07:45:26.000Z | 2022-03-22T13:16:57.000Z | aiida/backends/djsite/db/migrations/0014_add_node_uuid_unique_constraint.py | azadoks/aiida-core | b806b7fef8fc79090deccfe2019b77cb922e0581 | [
"MIT",
"BSD-3-Clause"
] | 2,325 | 2019-07-04T13:41:44.000Z | 2022-03-31T12:17:10.000Z | aiida/backends/djsite/db/migrations/0014_add_node_uuid_unique_constraint.py | azadoks/aiida-core | b806b7fef8fc79090deccfe2019b77cb922e0581 | [
"MIT",
"BSD-3-Clause"
] | 88 | 2019-07-06T01:42:39.000Z | 2022-03-18T14:20:09.000Z | # -*- coding: utf-8 -*-
###########################################################################
# Copyright (c), The AiiDA team. All rights reserved. #
# This file is part of the AiiDA code. #
# #
# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core #
# For further information on the license, see the LICENSE.txt file #
# For further information please visit http://www.aiida.net #
###########################################################################
# pylint: disable=invalid-name
"""Add a uniqueness constraint to the uuid column of DbNode table."""
from django.db import migrations, models
from aiida.backends.djsite.db.migrations import upgrade_schema_version
from aiida.common.utils import get_new_uuid
REVISION = '1.0.14'
DOWN_REVISION = '1.0.13'
def verify_node_uuid_uniqueness(_, __):
"""Check whether the database contains nodes with duplicate UUIDS.
Note that we have to redefine this method from aiida.manage.database.integrity.verify_node_uuid_uniqueness
because the migrations.RunPython command that will invoke this function, will pass two arguments and therefore
this wrapper needs to have a different function signature.
:raises: IntegrityError if database contains nodes with duplicate UUIDS.
"""
from aiida.backends.general.migrations.duplicate_uuids import verify_uuid_uniqueness
verify_uuid_uniqueness(table='db_dbnode')
def reverse_code(_, __):
pass
class Migration(migrations.Migration):
"""Add a uniqueness constraint to the uuid column of DbNode table."""
dependencies = [
('db', '0013_django_1_8'),
]
operations = [
migrations.RunPython(verify_node_uuid_uniqueness, reverse_code=reverse_code),
migrations.AlterField(
model_name='dbnode',
name='uuid',
field=models.CharField(max_length=36, default=get_new_uuid, unique=True),
),
upgrade_schema_version(REVISION, DOWN_REVISION)
]
| 38.709091 | 114 | 0.627055 | true | true | |
f72feae6ab211e77121bc7730e459830daa3eb1d | 826 | py | Python | pyadlml/dataset/obj.py | tcsvn/pyadlml | 9b87d223ba0ef9814ba830263dd35fc6432fae87 | [
"MIT"
] | 4 | 2020-11-11T17:29:10.000Z | 2021-01-08T20:55:47.000Z | pyadlml/dataset/obj.py | tcsvn/pyadlml | 9b87d223ba0ef9814ba830263dd35fc6432fae87 | [
"MIT"
] | null | null | null | pyadlml/dataset/obj.py | tcsvn/pyadlml | 9b87d223ba0ef9814ba830263dd35fc6432fae87 | [
"MIT"
] | 5 | 2020-10-05T03:23:31.000Z | 2022-01-25T19:15:34.000Z | from pyadlml.dataset._representations.raw import create_raw
from pyadlml.dataset._representations.changepoint import create_changepoint
from pyadlml.dataset.activities import check_activities
class Data():
def __init__(self, activities, devices, activity_list, device_list):
#assert check_activities(activities)
#assert check_devices(devices)
self.df_activities = activities
self.df_devices = devices
# list of activities and devices
self.lst_activities = activity_list
self.lst_devices = device_list
def create_cp(self, t_res):
raise NotImplementedError
def create_raw(self, t_res=None, idle=False):
self.df_raw = create_raw(self.df_devices, self.df_activities, t_res)
def create_lastfired(self):
raise NotImplementedError | 34.416667 | 76 | 0.737288 | from pyadlml.dataset._representations.raw import create_raw
from pyadlml.dataset._representations.changepoint import create_changepoint
from pyadlml.dataset.activities import check_activities
class Data():
def __init__(self, activities, devices, activity_list, device_list):
self.df_activities = activities
self.df_devices = devices
self.lst_activities = activity_list
self.lst_devices = device_list
def create_cp(self, t_res):
raise NotImplementedError
def create_raw(self, t_res=None, idle=False):
self.df_raw = create_raw(self.df_devices, self.df_activities, t_res)
def create_lastfired(self):
raise NotImplementedError | true | true |
f72fec11a0ec5517350c9336346de65477e1cb36 | 87,391 | py | Python | python3/pyinotify.py | koto/pyinotify | b828a124bcf2310df7e2e7683b0902fcd78a08bf | [
"MIT"
] | 1 | 2020-03-31T21:41:57.000Z | 2020-03-31T21:41:57.000Z | python3/pyinotify.py | koto/pyinotify | b828a124bcf2310df7e2e7683b0902fcd78a08bf | [
"MIT"
] | null | null | null | python3/pyinotify.py | koto/pyinotify | b828a124bcf2310df7e2e7683b0902fcd78a08bf | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# pyinotify.py - python interface to inotify
# Copyright (c) 2005-2011 Sebastien Martini <seb@dbzteam.org>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
pyinotify
@author: Sebastien Martini
@license: MIT License
@contact: seb@dbzteam.org
"""
class PyinotifyError(Exception):
"""Indicates exceptions raised by a Pyinotify class."""
pass
class UnsupportedPythonVersionError(PyinotifyError):
"""
Raised on unsupported Python versions.
"""
def __init__(self, version):
"""
@param version: Current Python version
@type version: string
"""
PyinotifyError.__init__(self,
('Python %s is unsupported, requires '
'at least Python 3.0') % version)
# Check Python version
import sys
if sys.version_info < (3, 0):
raise UnsupportedPythonVersionError(sys.version)
# Import directives
import threading
import os
import select
import struct
import fcntl
import errno
import termios
import array
import logging
import atexit
from collections import deque
from datetime import datetime, timedelta
import time
import re
import asyncore
import glob
import locale
import subprocess
try:
from functools import reduce
except ImportError:
pass # Will fail on Python 2.4 which has reduce() builtin anyway.
try:
import ctypes
import ctypes.util
except ImportError:
ctypes = None
try:
import inotify_syscalls
except ImportError:
inotify_syscalls = None
__author__ = "seb@dbzteam.org (Sebastien Martini)"
__version__ = "0.9.4"
# Compatibity mode: set to True to improve compatibility with
# Pyinotify 0.7.1. Do not set this variable yourself, call the
# function compatibility_mode() instead.
COMPATIBILITY_MODE = False
class InotifyBindingNotFoundError(PyinotifyError):
"""
Raised when no inotify support couldn't be found.
"""
def __init__(self):
err = "Couldn't find any inotify binding"
PyinotifyError.__init__(self, err)
class INotifyWrapper:
"""
Abstract class wrapping access to inotify's functions. This is an
internal class.
"""
@staticmethod
def create():
"""
Factory method instanciating and returning the right wrapper.
"""
# First, try to use ctypes.
if ctypes:
inotify = _CtypesLibcINotifyWrapper()
if inotify.init():
return inotify
# Second, see if C extension is compiled.
if inotify_syscalls:
inotify = _INotifySyscallsWrapper()
if inotify.init():
return inotify
def get_errno(self):
"""
Return None is no errno code is available.
"""
return self._get_errno()
def str_errno(self):
code = self.get_errno()
if code is None:
return 'Errno: no errno support'
return 'Errno=%s (%s)' % (os.strerror(code), errno.errorcode[code])
def inotify_init(self):
return self._inotify_init()
def inotify_add_watch(self, fd, pathname, mask):
# Unicode strings must be encoded to string prior to calling this
# method.
assert isinstance(pathname, str)
return self._inotify_add_watch(fd, pathname, mask)
def inotify_rm_watch(self, fd, wd):
return self._inotify_rm_watch(fd, wd)
class _INotifySyscallsWrapper(INotifyWrapper):
def __init__(self):
# Stores the last errno value.
self._last_errno = None
def init(self):
assert inotify_syscalls
return True
def _get_errno(self):
return self._last_errno
def _inotify_init(self):
try:
fd = inotify_syscalls.inotify_init()
except IOError as err:
self._last_errno = err.errno
return -1
return fd
def _inotify_add_watch(self, fd, pathname, mask):
try:
wd = inotify_syscalls.inotify_add_watch(fd, pathname, mask)
except IOError as err:
self._last_errno = err.errno
return -1
return wd
def _inotify_rm_watch(self, fd, wd):
try:
ret = inotify_syscalls.inotify_rm_watch(fd, wd)
except IOError as err:
self._last_errno = err.errno
return -1
return ret
class _CtypesLibcINotifyWrapper(INotifyWrapper):
def __init__(self):
self._libc = None
self._get_errno_func = None
def init(self):
assert ctypes
libc_name = None
try:
libc_name = ctypes.util.find_library('c')
except (OSError, IOError):
pass # Will attemp to load it with None anyway.
self._libc = ctypes.CDLL(libc_name, use_errno=True)
self._get_errno_func = ctypes.get_errno
# Eventually check that libc has needed inotify bindings.
if (not hasattr(self._libc, 'inotify_init') or
not hasattr(self._libc, 'inotify_add_watch') or
not hasattr(self._libc, 'inotify_rm_watch')):
return False
self._libc.inotify_init.argtypes = []
self._libc.inotify_init.restype = ctypes.c_int
self._libc.inotify_add_watch.argtypes = [ctypes.c_int, ctypes.c_char_p,
ctypes.c_uint32]
self._libc.inotify_add_watch.restype = ctypes.c_int
self._libc.inotify_rm_watch.argtypes = [ctypes.c_int, ctypes.c_int]
self._libc.inotify_rm_watch.restype = ctypes.c_int
return True
def _get_errno(self):
assert self._get_errno_func
return self._get_errno_func()
def _inotify_init(self):
assert self._libc is not None
return self._libc.inotify_init()
def _inotify_add_watch(self, fd, pathname, mask):
assert self._libc is not None
# Encodes path to a bytes string. This conversion seems required because
# ctypes.create_string_buffer seems to manipulate bytes internally.
# Moreover it seems that inotify_add_watch does not work very well when
# it receives an ctypes.create_unicode_buffer instance as argument.
pathname = pathname.encode(sys.getfilesystemencoding())
pathname = ctypes.create_string_buffer(pathname)
return self._libc.inotify_add_watch(fd, pathname, mask)
def _inotify_rm_watch(self, fd, wd):
assert self._libc is not None
return self._libc.inotify_rm_watch(fd, wd)
def _sysctl(self, *args):
assert self._libc is not None
return self._libc.sysctl(*args)
# Logging
def logger_init():
"""Initialize logger instance."""
log = logging.getLogger("pyinotify")
console_handler = logging.StreamHandler()
console_handler.setFormatter(
logging.Formatter("[%(asctime)s %(name)s %(levelname)s] %(message)s"))
log.addHandler(console_handler)
log.setLevel(20)
return log
log = logger_init()
# inotify's variables
class SysCtlINotify:
"""
Access (read, write) inotify's variables through sysctl. Usually it
requires administrator rights to update them.
Examples:
- Read max_queued_events attribute: myvar = max_queued_events.value
- Update max_queued_events attribute: max_queued_events.value = 42
"""
inotify_attrs = {'max_user_instances': 1,
'max_user_watches': 2,
'max_queued_events': 3}
def __init__(self, attrname, inotify_wrapper):
# FIXME: right now only supporting ctypes
assert ctypes
self._attrname = attrname
self._inotify_wrapper = inotify_wrapper
sino = ctypes.c_int * 3
self._attr = sino(5, 20, SysCtlINotify.inotify_attrs[attrname])
@staticmethod
def create(attrname):
# FIXME: right now only supporting ctypes
if ctypes is None:
return None
inotify_wrapper = _CtypesLibcINotifyWrapper()
if not inotify_wrapper.init():
return None
return SysCtlINotify(attrname, inotify_wrapper)
def get_val(self):
"""
Gets attribute's value.
@return: stored value.
@rtype: int
"""
oldv = ctypes.c_int(0)
size = ctypes.c_int(ctypes.sizeof(oldv))
self._inotify_wrapper._sysctl(self._attr, 3,
ctypes.c_voidp(ctypes.addressof(oldv)),
ctypes.addressof(size),
None, 0)
return oldv.value
def set_val(self, nval):
"""
Sets new attribute's value.
@param nval: replaces current value by nval.
@type nval: int
"""
oldv = ctypes.c_int(0)
sizeo = ctypes.c_int(ctypes.sizeof(oldv))
newv = ctypes.c_int(nval)
sizen = ctypes.c_int(ctypes.sizeof(newv))
self._inotify_wrapper._sysctl(self._attr, 3,
ctypes.c_voidp(ctypes.addressof(oldv)),
ctypes.addressof(sizeo),
ctypes.c_voidp(ctypes.addressof(newv)),
ctypes.addressof(sizen))
value = property(get_val, set_val)
def __repr__(self):
return '<%s=%d>' % (self._attrname, self.get_val())
# Inotify's variables
#
# FIXME: currently these variables are only accessible when ctypes is used,
# otherwise there are set to None.
#
# read: myvar = max_queued_events.value
# update: max_queued_events.value = 42
#
for attrname in ('max_queued_events', 'max_user_instances', 'max_user_watches'):
globals()[attrname] = SysCtlINotify.create(attrname)
class EventsCodes:
"""
Set of codes corresponding to each kind of events.
Some of these flags are used to communicate with inotify, whereas
the others are sent to userspace by inotify notifying some events.
@cvar IN_ACCESS: File was accessed.
@type IN_ACCESS: int
@cvar IN_MODIFY: File was modified.
@type IN_MODIFY: int
@cvar IN_ATTRIB: Metadata changed.
@type IN_ATTRIB: int
@cvar IN_CLOSE_WRITE: Writtable file was closed.
@type IN_CLOSE_WRITE: int
@cvar IN_CLOSE_NOWRITE: Unwrittable file closed.
@type IN_CLOSE_NOWRITE: int
@cvar IN_OPEN: File was opened.
@type IN_OPEN: int
@cvar IN_MOVED_FROM: File was moved from X.
@type IN_MOVED_FROM: int
@cvar IN_MOVED_TO: File was moved to Y.
@type IN_MOVED_TO: int
@cvar IN_CREATE: Subfile was created.
@type IN_CREATE: int
@cvar IN_DELETE: Subfile was deleted.
@type IN_DELETE: int
@cvar IN_DELETE_SELF: Self (watched item itself) was deleted.
@type IN_DELETE_SELF: int
@cvar IN_MOVE_SELF: Self (watched item itself) was moved.
@type IN_MOVE_SELF: int
@cvar IN_UNMOUNT: Backing fs was unmounted.
@type IN_UNMOUNT: int
@cvar IN_Q_OVERFLOW: Event queued overflowed.
@type IN_Q_OVERFLOW: int
@cvar IN_IGNORED: File was ignored.
@type IN_IGNORED: int
@cvar IN_ONLYDIR: only watch the path if it is a directory (new
in kernel 2.6.15).
@type IN_ONLYDIR: int
@cvar IN_DONT_FOLLOW: don't follow a symlink (new in kernel 2.6.15).
IN_ONLYDIR we can make sure that we don't watch
the target of symlinks.
@type IN_DONT_FOLLOW: int
@cvar IN_EXCL_UNLINK: Events are not generated for children after they
have been unlinked from the watched directory.
(new in kernel 2.6.36).
@type IN_EXCL_UNLINK: int
@cvar IN_MASK_ADD: add to the mask of an already existing watch (new
in kernel 2.6.14).
@type IN_MASK_ADD: int
@cvar IN_ISDIR: Event occurred against dir.
@type IN_ISDIR: int
@cvar IN_ONESHOT: Only send event once.
@type IN_ONESHOT: int
@cvar ALL_EVENTS: Alias for considering all of the events.
@type ALL_EVENTS: int
"""
# The idea here is 'configuration-as-code' - this way, we get our nice class
# constants, but we also get nice human-friendly text mappings to do lookups
# against as well, for free:
FLAG_COLLECTIONS = {'OP_FLAGS': {
'IN_ACCESS' : 0x00000001, # File was accessed
'IN_MODIFY' : 0x00000002, # File was modified
'IN_ATTRIB' : 0x00000004, # Metadata changed
'IN_CLOSE_WRITE' : 0x00000008, # Writable file was closed
'IN_CLOSE_NOWRITE' : 0x00000010, # Unwritable file closed
'IN_OPEN' : 0x00000020, # File was opened
'IN_MOVED_FROM' : 0x00000040, # File was moved from X
'IN_MOVED_TO' : 0x00000080, # File was moved to Y
'IN_CREATE' : 0x00000100, # Subfile was created
'IN_DELETE' : 0x00000200, # Subfile was deleted
'IN_DELETE_SELF' : 0x00000400, # Self (watched item itself)
# was deleted
'IN_MOVE_SELF' : 0x00000800, # Self (watched item itself) was moved
},
'EVENT_FLAGS': {
'IN_UNMOUNT' : 0x00002000, # Backing fs was unmounted
'IN_Q_OVERFLOW' : 0x00004000, # Event queued overflowed
'IN_IGNORED' : 0x00008000, # File was ignored
},
'SPECIAL_FLAGS': {
'IN_ONLYDIR' : 0x01000000, # only watch the path if it is a
# directory
'IN_DONT_FOLLOW' : 0x02000000, # don't follow a symlink
'IN_EXCL_UNLINK' : 0x04000000, # exclude events on unlinked objects
'IN_MASK_ADD' : 0x20000000, # add to the mask of an already
# existing watch
'IN_ISDIR' : 0x40000000, # event occurred against dir
'IN_ONESHOT' : 0x80000000, # only send event once
},
}
def maskname(mask):
"""
Returns the event name associated to mask. IN_ISDIR is appended to
the result when appropriate. Note: only one event is returned, because
only one event can be raised at a given time.
@param mask: mask.
@type mask: int
@return: event name.
@rtype: str
"""
ms = mask
name = '%s'
if mask & IN_ISDIR:
ms = mask - IN_ISDIR
name = '%s|IN_ISDIR'
return name % EventsCodes.ALL_VALUES[ms]
maskname = staticmethod(maskname)
# So let's now turn the configuration into code
EventsCodes.ALL_FLAGS = {}
EventsCodes.ALL_VALUES = {}
for flagc, valc in EventsCodes.FLAG_COLLECTIONS.items():
# Make the collections' members directly accessible through the
# class dictionary
setattr(EventsCodes, flagc, valc)
# Collect all the flags under a common umbrella
EventsCodes.ALL_FLAGS.update(valc)
# Make the individual masks accessible as 'constants' at globals() scope
# and masknames accessible by values.
for name, val in valc.items():
globals()[name] = val
EventsCodes.ALL_VALUES[val] = name
# all 'normal' events
ALL_EVENTS = reduce(lambda x, y: x | y, EventsCodes.OP_FLAGS.values())
EventsCodes.ALL_FLAGS['ALL_EVENTS'] = ALL_EVENTS
EventsCodes.ALL_VALUES[ALL_EVENTS] = 'ALL_EVENTS'
class _Event:
"""
Event structure, represent events raised by the system. This
is the base class and should be subclassed.
"""
def __init__(self, dict_):
"""
Attach attributes (contained in dict_) to self.
@param dict_: Set of attributes.
@type dict_: dictionary
"""
for tpl in dict_.items():
setattr(self, *tpl)
def __repr__(self):
"""
@return: Generic event string representation.
@rtype: str
"""
s = ''
for attr, value in sorted(self.__dict__.items(), key=lambda x: x[0]):
if attr.startswith('_'):
continue
if attr == 'mask':
value = hex(getattr(self, attr))
elif isinstance(value, str) and not value:
value = "''"
s += ' %s%s%s' % (output_format.field_name(attr),
output_format.punctuation('='),
output_format.field_value(value))
s = '%s%s%s %s' % (output_format.punctuation('<'),
output_format.class_name(self.__class__.__name__),
s,
output_format.punctuation('>'))
return s
def __str__(self):
return repr(self)
class _RawEvent(_Event):
"""
Raw event, it contains only the informations provided by the system.
It doesn't infer anything.
"""
def __init__(self, wd, mask, cookie, name):
"""
@param wd: Watch Descriptor.
@type wd: int
@param mask: Bitmask of events.
@type mask: int
@param cookie: Cookie.
@type cookie: int
@param name: Basename of the file or directory against which the
event was raised in case where the watched directory
is the parent directory. None if the event was raised
on the watched item itself.
@type name: string or None
"""
# Use this variable to cache the result of str(self), this object
# is immutable.
self._str = None
# name: remove trailing '\0'
d = {'wd': wd,
'mask': mask,
'cookie': cookie,
'name': name.rstrip('\0')}
_Event.__init__(self, d)
log.debug(str(self))
def __str__(self):
if self._str is None:
self._str = _Event.__str__(self)
return self._str
class Event(_Event):
"""
This class contains all the useful informations about the observed
event. However, the presence of each field is not guaranteed and
depends on the type of event. In effect, some fields are irrelevant
for some kind of event (for example 'cookie' is meaningless for
IN_CREATE whereas it is mandatory for IN_MOVE_TO).
The possible fields are:
- wd (int): Watch Descriptor.
- mask (int): Mask.
- maskname (str): Readable event name.
- path (str): path of the file or directory being watched.
- name (str): Basename of the file or directory against which the
event was raised in case where the watched directory
is the parent directory. None if the event was raised
on the watched item itself. This field is always provided
even if the string is ''.
- pathname (str): Concatenation of 'path' and 'name'.
- src_pathname (str): Only present for IN_MOVED_TO events and only in
the case where IN_MOVED_FROM events are watched too. Holds the
source pathname from where pathname was moved from.
- cookie (int): Cookie.
- dir (bool): True if the event was raised against a directory.
"""
def __init__(self, raw):
"""
Concretely, this is the raw event plus inferred infos.
"""
_Event.__init__(self, raw)
self.maskname = EventsCodes.maskname(self.mask)
if COMPATIBILITY_MODE:
self.event_name = self.maskname
try:
if self.name:
self.pathname = os.path.abspath(os.path.join(self.path,
self.name))
else:
self.pathname = os.path.abspath(self.path)
except AttributeError as err:
# Usually it is not an error some events are perfectly valids
# despite the lack of these attributes.
log.debug(err)
class ProcessEventError(PyinotifyError):
"""
ProcessEventError Exception. Raised on ProcessEvent error.
"""
def __init__(self, err):
"""
@param err: Exception error description.
@type err: string
"""
PyinotifyError.__init__(self, err)
class _ProcessEvent:
"""
Abstract processing event class.
"""
def __call__(self, event):
"""
To behave like a functor the object must be callable.
This method is a dispatch method. Its lookup order is:
1. process_MASKNAME method
2. process_FAMILY_NAME method
3. otherwise calls process_default
@param event: Event to be processed.
@type event: Event object
@return: By convention when used from the ProcessEvent class:
- Returning False or None (default value) means keep on
executing next chained functors (see chain.py example).
- Returning True instead means do not execute next
processing functions.
@rtype: bool
@raise ProcessEventError: Event object undispatchable,
unknown event.
"""
stripped_mask = event.mask - (event.mask & IN_ISDIR)
maskname = EventsCodes.ALL_VALUES.get(stripped_mask)
if maskname is None:
raise ProcessEventError("Unknown mask 0x%08x" % stripped_mask)
# 1- look for process_MASKNAME
meth = getattr(self, 'process_' + maskname, None)
if meth is not None:
return meth(event)
# 2- look for process_FAMILY_NAME
meth = getattr(self, 'process_IN_' + maskname.split('_')[1], None)
if meth is not None:
return meth(event)
# 3- default call method process_default
return self.process_default(event)
def __repr__(self):
return '<%s>' % self.__class__.__name__
class _SysProcessEvent(_ProcessEvent):
"""
There is three kind of processing according to each event:
1. special handling (deletion from internal container, bug, ...).
2. default treatment: which is applied to the majority of events.
3. IN_ISDIR is never sent alone, he is piggybacked with a standard
event, he is not processed as the others events, instead, its
value is captured and appropriately aggregated to dst event.
"""
def __init__(self, wm, notifier):
"""
@param wm: Watch Manager.
@type wm: WatchManager instance
@param notifier: Notifier.
@type notifier: Notifier instance
"""
self._watch_manager = wm # watch manager
self._notifier = notifier # notifier
self._mv_cookie = {} # {cookie(int): (src_path(str), date), ...}
self._mv = {} # {src_path(str): (dst_path(str), date), ...}
def cleanup(self):
"""
Cleanup (delete) old (>1mn) records contained in self._mv_cookie
and self._mv.
"""
date_cur_ = datetime.now()
for seq in (self._mv_cookie, self._mv):
for k in list(seq.keys()):
if (date_cur_ - seq[k][1]) > timedelta(minutes=1):
log.debug('Cleanup: deleting entry %s', seq[k][0])
del seq[k]
def process_IN_CREATE(self, raw_event):
"""
If the event affects a directory and the auto_add flag of the
targetted watch is set to True, a new watch is added on this
new directory, with the same attribute values than those of
this watch.
"""
if raw_event.mask & IN_ISDIR:
watch_ = self._watch_manager.get_watch(raw_event.wd)
created_dir = os.path.join(watch_.path, raw_event.name)
if watch_.auto_add and not watch_.exclude_filter(created_dir):
addw = self._watch_manager.add_watch
# The newly monitored directory inherits attributes from its
# parent directory.
addw_ret = addw(created_dir, watch_.mask,
proc_fun=watch_.proc_fun,
rec=False, auto_add=watch_.auto_add,
exclude_filter=watch_.exclude_filter)
# Trick to handle mkdir -p /d1/d2/t3 where d1 is watched and
# d2 and t3 (directory or file) are created.
# Since the directory d2 is new, then everything inside it must
# also be new.
created_dir_wd = addw_ret.get(created_dir)
if (created_dir_wd is not None) and (created_dir_wd > 0):
for name in os.listdir(created_dir):
inner = os.path.join(created_dir, name)
if self._watch_manager.get_wd(inner) is not None:
continue
# Generate (simulate) creation events for sub-
# directories and files.
if os.path.isfile(inner):
# symlinks are handled as files.
flags = IN_CREATE
elif os.path.isdir(inner):
flags = IN_CREATE | IN_ISDIR
else:
# This path should not be taken.
continue
rawevent = _RawEvent(created_dir_wd, flags, 0, name)
self._notifier.append_event(rawevent)
return self.process_default(raw_event)
def process_IN_MOVED_FROM(self, raw_event):
"""
Map the cookie with the source path (+ date for cleaning).
"""
watch_ = self._watch_manager.get_watch(raw_event.wd)
path_ = watch_.path
src_path = os.path.normpath(os.path.join(path_, raw_event.name))
self._mv_cookie[raw_event.cookie] = (src_path, datetime.now())
return self.process_default(raw_event, {'cookie': raw_event.cookie})
def process_IN_MOVED_TO(self, raw_event):
"""
Map the source path with the destination path (+ date for
cleaning).
"""
watch_ = self._watch_manager.get_watch(raw_event.wd)
path_ = watch_.path
dst_path = os.path.normpath(os.path.join(path_, raw_event.name))
mv_ = self._mv_cookie.get(raw_event.cookie)
to_append = {'cookie': raw_event.cookie}
if mv_ is not None:
self._mv[mv_[0]] = (dst_path, datetime.now())
# Let's assume that IN_MOVED_FROM event is always queued before
# that its associated (they share a common cookie) IN_MOVED_TO
# event is queued itself. It is then possible in that scenario
# to provide as additional information to the IN_MOVED_TO event
# the original pathname of the moved file/directory.
to_append['src_pathname'] = mv_[0]
elif (raw_event.mask & IN_ISDIR and watch_.auto_add and
not watch_.exclude_filter(dst_path)):
# We got a diretory that's "moved in" from an unknown source and
# auto_add is enabled. Manually add watches to the inner subtrees.
# The newly monitored directory inherits attributes from its
# parent directory.
self._watch_manager.add_watch(dst_path, watch_.mask,
proc_fun=watch_.proc_fun,
rec=True, auto_add=True,
exclude_filter=watch_.exclude_filter)
return self.process_default(raw_event, to_append)
def process_IN_MOVE_SELF(self, raw_event):
"""
STATUS: the following bug has been fixed in recent kernels (FIXME:
which version ?). Now it raises IN_DELETE_SELF instead.
Old kernels were bugged, this event raised when the watched item
were moved, so we had to update its path, but under some circumstances
it was impossible: if its parent directory and its destination
directory wasn't watched. The kernel (see include/linux/fsnotify.h)
doesn't bring us enough informations like the destination path of
moved items.
"""
watch_ = self._watch_manager.get_watch(raw_event.wd)
src_path = watch_.path
mv_ = self._mv.get(src_path)
if mv_:
dest_path = mv_[0]
watch_.path = dest_path
# add the separator to the source path to avoid overlapping
# path issue when testing with startswith()
src_path += os.path.sep
src_path_len = len(src_path)
# The next loop renames all watches with src_path as base path.
# It seems that IN_MOVE_SELF does not provide IN_ISDIR information
# therefore the next loop is iterated even if raw_event is a file.
for w in self._watch_manager.watches.values():
if w.path.startswith(src_path):
# Note that dest_path is a normalized path.
w.path = os.path.join(dest_path, w.path[src_path_len:])
else:
log.error("The pathname '%s' of this watch %s has probably changed "
"and couldn't be updated, so it cannot be trusted "
"anymore. To fix this error move directories/files only "
"between watched parents directories, in this case e.g. "
"put a watch on '%s'.",
watch_.path, watch_,
os.path.normpath(os.path.join(watch_.path,
os.path.pardir)))
if not watch_.path.endswith('-unknown-path'):
watch_.path += '-unknown-path'
return self.process_default(raw_event)
def process_IN_Q_OVERFLOW(self, raw_event):
"""
Only signal an overflow, most of the common flags are irrelevant
for this event (path, wd, name).
"""
return Event({'mask': raw_event.mask})
def process_IN_IGNORED(self, raw_event):
"""
The watch descriptor raised by this event is now ignored (forever),
it can be safely deleted from the watch manager dictionary.
After this event we can be sure that neither the event queue nor
the system will raise an event associated to this wd again.
"""
event_ = self.process_default(raw_event)
self._watch_manager.del_watch(raw_event.wd)
return event_
def process_default(self, raw_event, to_append=None):
"""
Commons handling for the followings events:
IN_ACCESS, IN_MODIFY, IN_ATTRIB, IN_CLOSE_WRITE, IN_CLOSE_NOWRITE,
IN_OPEN, IN_DELETE, IN_DELETE_SELF, IN_UNMOUNT.
"""
watch_ = self._watch_manager.get_watch(raw_event.wd)
if raw_event.mask & (IN_DELETE_SELF | IN_MOVE_SELF):
# Unfornulately this information is not provided by the kernel
dir_ = watch_.dir
else:
dir_ = bool(raw_event.mask & IN_ISDIR)
dict_ = {'wd': raw_event.wd,
'mask': raw_event.mask,
'path': watch_.path,
'name': raw_event.name,
'dir': dir_}
if COMPATIBILITY_MODE:
dict_['is_dir'] = dir_
if to_append is not None:
dict_.update(to_append)
return Event(dict_)
class ProcessEvent(_ProcessEvent):
"""
Process events objects, can be specialized via subclassing, thus its
behavior can be overriden:
Note: you should not override __init__ in your subclass instead define
a my_init() method, this method will be called automatically from the
constructor of this class with its optionals parameters.
1. Provide specialized individual methods, e.g. process_IN_DELETE for
processing a precise type of event (e.g. IN_DELETE in this case).
2. Or/and provide methods for processing events by 'family', e.g.
process_IN_CLOSE method will process both IN_CLOSE_WRITE and
IN_CLOSE_NOWRITE events (if process_IN_CLOSE_WRITE and
process_IN_CLOSE_NOWRITE aren't defined though).
3. Or/and override process_default for catching and processing all
the remaining types of events.
"""
pevent = None
def __init__(self, pevent=None, **kargs):
"""
Enable chaining of ProcessEvent instances.
@param pevent: Optional callable object, will be called on event
processing (before self).
@type pevent: callable
@param kargs: This constructor is implemented as a template method
delegating its optionals keyworded arguments to the
method my_init().
@type kargs: dict
"""
self.pevent = pevent
self.my_init(**kargs)
def my_init(self, **kargs):
"""
This method is called from ProcessEvent.__init__(). This method is
empty here and must be redefined to be useful. In effect, if you
need to specifically initialize your subclass' instance then you
just have to override this method in your subclass. Then all the
keyworded arguments passed to ProcessEvent.__init__() will be
transmitted as parameters to this method. Beware you MUST pass
keyword arguments though.
@param kargs: optional delegated arguments from __init__().
@type kargs: dict
"""
pass
def __call__(self, event):
stop_chaining = False
if self.pevent is not None:
# By default methods return None so we set as guideline
# that methods asking for stop chaining must explicitely
# return non None or non False values, otherwise the default
# behavior will be to accept chain call to the corresponding
# local method.
stop_chaining = self.pevent(event)
if not stop_chaining:
return _ProcessEvent.__call__(self, event)
def nested_pevent(self):
return self.pevent
def process_IN_Q_OVERFLOW(self, event):
"""
By default this method only reports warning messages, you can overredide
it by subclassing ProcessEvent and implement your own
process_IN_Q_OVERFLOW method. The actions you can take on receiving this
event is either to update the variable max_queued_events in order to
handle more simultaneous events or to modify your code in order to
accomplish a better filtering diminishing the number of raised events.
Because this method is defined, IN_Q_OVERFLOW will never get
transmitted as arguments to process_default calls.
@param event: IN_Q_OVERFLOW event.
@type event: dict
"""
log.warning('Event queue overflowed.')
def process_default(self, event):
"""
Default processing event method. By default does nothing. Subclass
ProcessEvent and redefine this method in order to modify its behavior.
@param event: Event to be processed. Can be of any type of events but
IN_Q_OVERFLOW events (see method process_IN_Q_OVERFLOW).
@type event: Event instance
"""
pass
class PrintAllEvents(ProcessEvent):
"""
Dummy class used to print events strings representations. For instance this
class is used from command line to print all received events to stdout.
"""
def my_init(self, out=None):
"""
@param out: Where events will be written.
@type out: Object providing a valid file object interface.
"""
if out is None:
out = sys.stdout
self._out = out
def process_default(self, event):
"""
Writes event string representation to file object provided to
my_init().
@param event: Event to be processed. Can be of any type of events but
IN_Q_OVERFLOW events (see method process_IN_Q_OVERFLOW).
@type event: Event instance
"""
self._out.write(str(event))
self._out.write('\n')
self._out.flush()
class ChainIfTrue(ProcessEvent):
"""
Makes conditional chaining depending on the result of the nested
processing instance.
"""
def my_init(self, func):
"""
Method automatically called from base class constructor.
"""
self._func = func
def process_default(self, event):
return not self._func(event)
class Stats(ProcessEvent):
"""
Compute and display trivial statistics about processed events.
"""
def my_init(self):
"""
Method automatically called from base class constructor.
"""
self._start_time = time.time()
self._stats = {}
self._stats_lock = threading.Lock()
def process_default(self, event):
"""
Processes |event|.
"""
self._stats_lock.acquire()
try:
events = event.maskname.split('|')
for event_name in events:
count = self._stats.get(event_name, 0)
self._stats[event_name] = count + 1
finally:
self._stats_lock.release()
def _stats_copy(self):
self._stats_lock.acquire()
try:
return self._stats.copy()
finally:
self._stats_lock.release()
def __repr__(self):
stats = self._stats_copy()
elapsed = int(time.time() - self._start_time)
elapsed_str = ''
if elapsed < 60:
elapsed_str = str(elapsed) + 'sec'
elif 60 <= elapsed < 3600:
elapsed_str = '%dmn%dsec' % (elapsed / 60, elapsed % 60)
elif 3600 <= elapsed < 86400:
elapsed_str = '%dh%dmn' % (elapsed / 3600, (elapsed % 3600) / 60)
elif elapsed >= 86400:
elapsed_str = '%dd%dh' % (elapsed / 86400, (elapsed % 86400) / 3600)
stats['ElapsedTime'] = elapsed_str
l = []
for ev, value in sorted(stats.items(), key=lambda x: x[0]):
l.append(' %s=%s' % (output_format.field_name(ev),
output_format.field_value(value)))
s = '<%s%s >' % (output_format.class_name(self.__class__.__name__),
''.join(l))
return s
def dump(self, filename):
"""
Dumps statistics.
@param filename: filename where stats will be dumped, filename is
created and must not exist prior to this call.
@type filename: string
"""
flags = os.O_WRONLY|os.O_CREAT|os.O_NOFOLLOW|os.O_EXCL
fd = os.open(filename, flags, 0o0600)
os.write(fd, bytes(self.__str__(), locale.getpreferredencoding()))
os.close(fd)
def __str__(self, scale=45):
stats = self._stats_copy()
if not stats:
return ''
m = max(stats.values())
unity = scale / m
fmt = '%%-26s%%-%ds%%s' % (len(output_format.field_value('@' * scale))
+ 1)
def func(x):
return fmt % (output_format.field_name(x[0]),
output_format.field_value('@' * int(x[1] * unity)),
output_format.simple('%d' % x[1], 'yellow'))
s = '\n'.join(map(func, sorted(stats.items(), key=lambda x: x[0])))
return s
class NotifierError(PyinotifyError):
"""
Notifier Exception. Raised on Notifier error.
"""
def __init__(self, err):
"""
@param err: Exception string's description.
@type err: string
"""
PyinotifyError.__init__(self, err)
class Notifier:
"""
Read notifications, process events.
"""
def __init__(self, watch_manager, default_proc_fun=None, read_freq=0,
threshold=0, timeout=None):
"""
Initialization. read_freq, threshold and timeout parameters are used
when looping.
@param watch_manager: Watch Manager.
@type watch_manager: WatchManager instance
@param default_proc_fun: Default processing method. If None, a new
instance of PrintAllEvents will be assigned.
@type default_proc_fun: instance of ProcessEvent
@param read_freq: if read_freq == 0, events are read asap,
if read_freq is > 0, this thread sleeps
max(0, read_freq - timeout) seconds. But if
timeout is None it may be different because
poll is blocking waiting for something to read.
@type read_freq: int
@param threshold: File descriptor will be read only if the accumulated
size to read becomes >= threshold. If != 0, you likely
want to use it in combination with an appropriate
value for read_freq because without that you would
keep looping without really reading anything and that
until the amount of events to read is >= threshold.
At least with read_freq set you might sleep.
@type threshold: int
@param timeout:
http://docs.python.org/lib/poll-objects.html#poll-objects
@type timeout: int
"""
# Watch Manager instance
self._watch_manager = watch_manager
# File descriptor
self._fd = self._watch_manager.get_fd()
# Poll object and registration
self._pollobj = select.poll()
self._pollobj.register(self._fd, select.POLLIN)
# This pipe is correctely initialized and used by ThreadedNotifier
self._pipe = (-1, -1)
# Event queue
self._eventq = deque()
# System processing functor, common to all events
self._sys_proc_fun = _SysProcessEvent(self._watch_manager, self)
# Default processing method
self._default_proc_fun = default_proc_fun
if default_proc_fun is None:
self._default_proc_fun = PrintAllEvents()
# Loop parameters
self._read_freq = read_freq
self._threshold = threshold
self._timeout = timeout
# Coalesce events option
self._coalesce = False
# set of str(raw_event), only used when coalesce option is True
self._eventset = set()
def append_event(self, event):
"""
Append a raw event to the event queue.
@param event: An event.
@type event: _RawEvent instance.
"""
self._eventq.append(event)
def proc_fun(self):
return self._default_proc_fun
def coalesce_events(self, coalesce=True):
"""
Coalescing events. Events are usually processed by batchs, their size
depend on various factors. Thus, before processing them, events received
from inotify are aggregated in a fifo queue. If this coalescing
option is enabled events are filtered based on their unicity, only
unique events are enqueued, doublons are discarded. An event is unique
when the combination of its fields (wd, mask, cookie, name) is unique
among events of a same batch. After a batch of events is processed any
events is accepted again. By default this option is disabled, you have
to explictly call this function to turn it on.
@param coalesce: Optional new coalescing value. True by default.
@type coalesce: Bool
"""
self._coalesce = coalesce
if not coalesce:
self._eventset.clear()
def check_events(self, timeout=None):
"""
Check for new events available to read, blocks up to timeout
milliseconds.
@param timeout: If specified it overrides the corresponding instance
attribute _timeout.
@type timeout: int
@return: New events to read.
@rtype: bool
"""
while True:
try:
# blocks up to 'timeout' milliseconds
if timeout is None:
timeout = self._timeout
ret = self._pollobj.poll(timeout)
except select.error as err:
if err.args[0] == errno.EINTR:
continue # interrupted, retry
else:
raise
else:
break
if not ret or (self._pipe[0] == ret[0][0]):
return False
# only one fd is polled
return ret[0][1] & select.POLLIN
def read_events(self):
"""
Read events from device, build _RawEvents, and enqueue them.
"""
buf_ = array.array('i', [0])
# get event queue size
if fcntl.ioctl(self._fd, termios.FIONREAD, buf_, 1) == -1:
return
queue_size = buf_[0]
if queue_size < self._threshold:
log.debug('(fd: %d) %d bytes available to read but threshold is '
'fixed to %d bytes', self._fd, queue_size,
self._threshold)
return
try:
# Read content from file
r = os.read(self._fd, queue_size)
except Exception as msg:
raise NotifierError(msg)
log.debug('Event queue size: %d', queue_size)
rsum = 0 # counter
while rsum < queue_size:
s_size = 16
# Retrieve wd, mask, cookie and fname_len
wd, mask, cookie, fname_len = struct.unpack('iIII',
r[rsum:rsum+s_size])
# Retrieve name
bname, = struct.unpack('%ds' % fname_len,
r[rsum + s_size:rsum + s_size + fname_len])
# FIXME: should we explictly call sys.getdefaultencoding() here ??
uname = bname.decode()
rawevent = _RawEvent(wd, mask, cookie, uname)
if self._coalesce:
# Only enqueue new (unique) events.
raweventstr = str(rawevent)
if raweventstr not in self._eventset:
self._eventset.add(raweventstr)
self._eventq.append(rawevent)
else:
self._eventq.append(rawevent)
rsum += s_size + fname_len
def process_events(self):
"""
Routine for processing events from queue by calling their
associated proccessing method (an instance of ProcessEvent).
It also does internal processings, to keep the system updated.
"""
while self._eventq:
raw_event = self._eventq.popleft() # pop next event
watch_ = self._watch_manager.get_watch(raw_event.wd)
if (watch_ is None) and not (raw_event.mask & IN_Q_OVERFLOW):
if not (raw_event.mask & IN_IGNORED):
# Not really sure how we ended up here, nor how we should
# handle these types of events and if it is appropriate to
# completly skip them (like we are doing here).
log.warning("Unable to retrieve Watch object associated to %s",
repr(raw_event))
continue
revent = self._sys_proc_fun(raw_event) # system processings
if watch_ and watch_.proc_fun:
watch_.proc_fun(revent) # user processings
else:
self._default_proc_fun(revent)
self._sys_proc_fun.cleanup() # remove olds MOVED_* events records
if self._coalesce:
self._eventset.clear()
def __daemonize(self, pid_file=None, stdin=os.devnull, stdout=os.devnull,
stderr=os.devnull):
"""
pid_file: file where the pid will be written. If pid_file=None the pid
is written to /var/run/<sys.argv[0]|pyinotify>.pid, if
pid_file=False no pid_file is written.
stdin, stdout, stderr: files associated to common streams.
"""
if pid_file is None:
dirname = '/var/run/'
basename = os.path.basename(sys.argv[0]) or 'pyinotify'
pid_file = os.path.join(dirname, basename + '.pid')
if pid_file != False and os.path.lexists(pid_file):
err = 'Cannot daemonize: pid file %s already exists.' % pid_file
raise NotifierError(err)
def fork_daemon():
# Adapted from Chad J. Schroeder's recipe
# @see http://code.activestate.com/recipes/278731/
pid = os.fork()
if (pid == 0):
# parent 2
os.setsid()
pid = os.fork()
if (pid == 0):
# child
os.chdir('/')
os.umask(0o022)
else:
# parent 2
os._exit(0)
else:
# parent 1
os._exit(0)
fd_inp = os.open(stdin, os.O_RDONLY)
os.dup2(fd_inp, 0)
fd_out = os.open(stdout, os.O_WRONLY|os.O_CREAT, 0o0600)
os.dup2(fd_out, 1)
fd_err = os.open(stderr, os.O_WRONLY|os.O_CREAT, 0o0600)
os.dup2(fd_err, 2)
# Detach task
fork_daemon()
# Write pid
if pid_file != False:
flags = os.O_WRONLY|os.O_CREAT|os.O_NOFOLLOW|os.O_EXCL
fd_pid = os.open(pid_file, flags, 0o0600)
os.write(fd_pid, bytes(str(os.getpid()) + '\n',
locale.getpreferredencoding()))
os.close(fd_pid)
# Register unlink function
atexit.register(lambda : os.unlink(pid_file))
def _sleep(self, ref_time):
# Only consider sleeping if read_freq is > 0
if self._read_freq > 0:
cur_time = time.time()
sleep_amount = self._read_freq - (cur_time - ref_time)
if sleep_amount > 0:
log.debug('Now sleeping %d seconds', sleep_amount)
time.sleep(sleep_amount)
def loop(self, callback=None, daemonize=False, **args):
"""
Events are read only one time every min(read_freq, timeout)
seconds at best and only if the size to read is >= threshold.
After this method returns it must not be called again for the same
instance.
@param callback: Functor called after each event processing iteration.
Expects to receive the notifier object (self) as first
parameter. If this function returns True the loop is
immediately terminated otherwise the loop method keeps
looping.
@type callback: callable object or function
@param daemonize: This thread is daemonized if set to True.
@type daemonize: boolean
@param args: Optional and relevant only if daemonize is True. Remaining
keyworded arguments are directly passed to daemonize see
__daemonize() method. If pid_file=None or is set to a
pathname the caller must ensure the file does not exist
before this method is called otherwise an exception
pyinotify.NotifierError will be raised. If pid_file=False
it is still daemonized but the pid is not written in any
file.
@type args: various
"""
if daemonize:
self.__daemonize(**args)
# Read and process events forever
while 1:
try:
self.process_events()
if (callback is not None) and (callback(self) is True):
break
ref_time = time.time()
# check_events is blocking
if self.check_events():
self._sleep(ref_time)
self.read_events()
except KeyboardInterrupt:
# Stop monitoring if sigint is caught (Control-C).
log.debug('Pyinotify stops monitoring.')
break
# Close internals
self.stop()
def stop(self):
"""
Close inotify's instance (close its file descriptor).
It destroys all existing watches, pending events,...
This method is automatically called at the end of loop().
"""
self._pollobj.unregister(self._fd)
os.close(self._fd)
class ThreadedNotifier(threading.Thread, Notifier):
"""
This notifier inherits from threading.Thread for instanciating a separate
thread, and also inherits from Notifier, because it is a threaded notifier.
Note that every functionality provided by this class is also provided
through Notifier class. Moreover Notifier should be considered first because
it is not threaded and could be easily daemonized.
"""
def __init__(self, watch_manager, default_proc_fun=None, read_freq=0,
threshold=0, timeout=None):
"""
Initialization, initialize base classes. read_freq, threshold and
timeout parameters are used when looping.
@param watch_manager: Watch Manager.
@type watch_manager: WatchManager instance
@param default_proc_fun: Default processing method. See base class.
@type default_proc_fun: instance of ProcessEvent
@param read_freq: if read_freq == 0, events are read asap,
if read_freq is > 0, this thread sleeps
max(0, read_freq - timeout) seconds.
@type read_freq: int
@param threshold: File descriptor will be read only if the accumulated
size to read becomes >= threshold. If != 0, you likely
want to use it in combination with an appropriate
value set for read_freq because without that you would
keep looping without really reading anything and that
until the amount of events to read is >= threshold. At
least with read_freq you might sleep.
@type threshold: int
@param timeout:
see http://docs.python.org/lib/poll-objects.html#poll-objects
@type timeout: int
"""
# Init threading base class
threading.Thread.__init__(self)
# Stop condition
self._stop_event = threading.Event()
# Init Notifier base class
Notifier.__init__(self, watch_manager, default_proc_fun, read_freq,
threshold, timeout)
# Create a new pipe used for thread termination
self._pipe = os.pipe()
self._pollobj.register(self._pipe[0], select.POLLIN)
def stop(self):
"""
Stop notifier's loop. Stop notification. Join the thread.
"""
self._stop_event.set()
os.write(self._pipe[1], b'stop')
threading.Thread.join(self)
Notifier.stop(self)
self._pollobj.unregister(self._pipe[0])
os.close(self._pipe[0])
os.close(self._pipe[1])
def loop(self):
"""
Thread's main loop. Don't meant to be called by user directly.
Call inherited start() method instead.
Events are read only once time every min(read_freq, timeout)
seconds at best and only if the size of events to read is >= threshold.
"""
# When the loop must be terminated .stop() is called, 'stop'
# is written to pipe fd so poll() returns and .check_events()
# returns False which make evaluate the While's stop condition
# ._stop_event.isSet() wich put an end to the thread's execution.
while not self._stop_event.isSet():
self.process_events()
ref_time = time.time()
if self.check_events():
self._sleep(ref_time)
self.read_events()
def run(self):
"""
Start thread's loop: read and process events until the method
stop() is called.
Never call this method directly, instead call the start() method
inherited from threading.Thread, which then will call run() in
its turn.
"""
self.loop()
class AsyncNotifier(asyncore.file_dispatcher, Notifier):
"""
This notifier inherits from asyncore.file_dispatcher in order to be able to
use pyinotify along with the asyncore framework.
"""
def __init__(self, watch_manager, default_proc_fun=None, read_freq=0,
threshold=0, timeout=None, channel_map=None):
"""
Initializes the async notifier. The only additional parameter is
'channel_map' which is the optional asyncore private map. See
Notifier class for the meaning of the others parameters.
"""
Notifier.__init__(self, watch_manager, default_proc_fun, read_freq,
threshold, timeout)
asyncore.file_dispatcher.__init__(self, self._fd, channel_map)
def handle_read(self):
"""
When asyncore tells us we can read from the fd, we proceed processing
events. This method can be overridden for handling a notification
differently.
"""
self.read_events()
self.process_events()
class TornadoAsyncNotifier(Notifier):
"""
Tornado ioloop adapter.
"""
def __init__(self, watch_manager, ioloop, callback=None,
default_proc_fun=None, read_freq=0, threshold=0, timeout=None,
channel_map=None):
"""
Note that if later you must call ioloop.close() be sure to let the
default parameter to all_fds=False.
See example tornado_notifier.py for an example using this notifier.
@param ioloop: Tornado's IO loop.
@type ioloop: tornado.ioloop.IOLoop instance.
@param callback: Functor called at the end of each call to handle_read
(IOLoop's read handler). Expects to receive the
notifier object (self) as single parameter.
@type callback: callable object or function
"""
self.io_loop = ioloop
self.handle_read_callback = callback
Notifier.__init__(self, watch_manager, default_proc_fun, read_freq,
threshold, timeout)
ioloop.add_handler(self._fd, self.handle_read, ioloop.READ)
def handle_read(self, *args, **kwargs):
"""
See comment in AsyncNotifier.
"""
self.read_events()
self.process_events()
if self.handle_read_callback is not None:
self.handle_read_callback(self)
class Watch:
"""
Represent a watch, i.e. a file or directory being watched.
"""
__slots__ = ('wd', 'path', 'mask', 'proc_fun', 'auto_add',
'exclude_filter', 'dir')
def __init__(self, wd, path, mask, proc_fun, auto_add, exclude_filter):
"""
Initializations.
@param wd: Watch descriptor.
@type wd: int
@param path: Path of the file or directory being watched.
@type path: str
@param mask: Mask.
@type mask: int
@param proc_fun: Processing callable object.
@type proc_fun:
@param auto_add: Automatically add watches on new directories.
@type auto_add: bool
@param exclude_filter: Boolean function, used to exclude new
directories from being automatically watched.
See WatchManager.__init__
@type exclude_filter: callable object
"""
self.wd = wd
self.path = path
self.mask = mask
self.proc_fun = proc_fun
self.auto_add = auto_add
self.exclude_filter = exclude_filter
self.dir = os.path.isdir(self.path)
def __repr__(self):
"""
@return: String representation.
@rtype: str
"""
s = ' '.join(['%s%s%s' % (output_format.field_name(attr),
output_format.punctuation('='),
output_format.field_value(getattr(self,
attr))) \
for attr in self.__slots__ if not attr.startswith('_')])
s = '%s%s %s %s' % (output_format.punctuation('<'),
output_format.class_name(self.__class__.__name__),
s,
output_format.punctuation('>'))
return s
class ExcludeFilter:
"""
ExcludeFilter is an exclusion filter.
"""
def __init__(self, arg_lst):
"""
Examples:
ef1 = ExcludeFilter(["^/etc/rc.*", "^/etc/hostname"])
ef2 = ExcludeFilter("/my/path/exclude.lst")
Where exclude.lst contains:
^/etc/rc.*
^/etc/hostname
Note: it is not possible to exclude a file if its encapsulating
directory is itself watched. See this issue for more details
https://github.com/seb-m/pyinotify/issues/31
@param arg_lst: is either a list of patterns or a filename from which
patterns will be loaded.
@type arg_lst: list of str or str
"""
if isinstance(arg_lst, str):
lst = self._load_patterns_from_file(arg_lst)
elif isinstance(arg_lst, list):
lst = arg_lst
else:
raise TypeError
self._lregex = []
for regex in lst:
self._lregex.append(re.compile(regex, re.UNICODE))
def _load_patterns_from_file(self, filename):
lst = []
with open(filename, 'r') as file_obj:
for line in file_obj.readlines():
# Trim leading an trailing whitespaces
pattern = line.strip()
if not pattern or pattern.startswith('#'):
continue
lst.append(pattern)
return lst
def _match(self, regex, path):
return regex.match(path) is not None
def __call__(self, path):
"""
@param path: Path to match against provided regexps.
@type path: str
@return: Return True if path has been matched and should
be excluded, False otherwise.
@rtype: bool
"""
for regex in self._lregex:
if self._match(regex, path):
return True
return False
class WatchManagerError(Exception):
"""
WatchManager Exception. Raised on error encountered on watches
operations.
"""
def __init__(self, msg, wmd):
"""
@param msg: Exception string's description.
@type msg: string
@param wmd: This dictionary contains the wd assigned to paths of the
same call for which watches were successfully added.
@type wmd: dict
"""
self.wmd = wmd
Exception.__init__(self, msg)
class WatchManager:
"""
Provide operations for watching files and directories. Its internal
dictionary is used to reference watched items. When used inside
threaded code, one must instanciate as many WatchManager instances as
there are ThreadedNotifier instances.
"""
def __init__(self, exclude_filter=lambda path: False):
"""
Initialization: init inotify, init watch manager dictionary.
Raise OSError if initialization fails, raise InotifyBindingNotFoundError
if no inotify binding was found (through ctypes or from direct access to
syscalls).
@param exclude_filter: boolean function, returns True if current
path must be excluded from being watched.
Convenient for providing a common exclusion
filter for every call to add_watch.
@type exclude_filter: callable object
"""
self._exclude_filter = exclude_filter
self._wmd = {} # watch dict key: watch descriptor, value: watch
self._inotify_wrapper = INotifyWrapper.create()
if self._inotify_wrapper is None:
raise InotifyBindingNotFoundError()
self._fd = self._inotify_wrapper.inotify_init() # file descriptor
if self._fd < 0:
err = 'Cannot initialize new instance of inotify, %s'
raise OSError(err % self._inotify_wrapper.str_errno())
def close(self):
"""
Close inotify's file descriptor, this action will also automatically
remove (i.e. stop watching) all its associated watch descriptors.
After a call to this method the WatchManager's instance become useless
and cannot be reused, a new instance must then be instanciated. It
makes sense to call this method in few situations for instance if
several independant WatchManager must be instanciated or if all watches
must be removed and no other watches need to be added.
"""
os.close(self._fd)
def get_fd(self):
"""
Return assigned inotify's file descriptor.
@return: File descriptor.
@rtype: int
"""
return self._fd
def get_watch(self, wd):
"""
Get watch from provided watch descriptor wd.
@param wd: Watch descriptor.
@type wd: int
"""
return self._wmd.get(wd)
def del_watch(self, wd):
"""
Remove watch entry associated to watch descriptor wd.
@param wd: Watch descriptor.
@type wd: int
"""
try:
del self._wmd[wd]
except KeyError as err:
log.error('Cannot delete unknown watch descriptor %s' % str(err))
@property
def watches(self):
"""
Get a reference on the internal watch manager dictionary.
@return: Internal watch manager dictionary.
@rtype: dict
"""
return self._wmd
def __format_path(self, path):
"""
Format path to its internal (stored in watch manager) representation.
"""
# path must be a unicode string (str) and is just normalized.
return os.path.normpath(path)
def __add_watch(self, path, mask, proc_fun, auto_add, exclude_filter):
"""
Add a watch on path, build a Watch object and insert it in the
watch manager dictionary. Return the wd value.
"""
path = self.__format_path(path)
if auto_add and not mask & IN_CREATE:
mask |= IN_CREATE
wd = self._inotify_wrapper.inotify_add_watch(self._fd, path, mask)
if wd < 0:
return wd
watch = Watch(wd=wd, path=path, mask=mask, proc_fun=proc_fun,
auto_add=auto_add, exclude_filter=exclude_filter)
# wd are _always_ indexed with their original unicode paths in wmd.
self._wmd[wd] = watch
log.debug('New %s', watch)
return wd
def __glob(self, path, do_glob):
if do_glob:
return glob.iglob(path)
else:
return [path]
def add_watch(self, path, mask, proc_fun=None, rec=False,
auto_add=False, do_glob=False, quiet=True,
exclude_filter=None):
"""
Add watch(s) on the provided |path|(s) with associated |mask| flag
value and optionally with a processing |proc_fun| function and
recursive flag |rec| set to True.
All |path| components _must_ be str (i.e. unicode) objects.
If |path| is already watched it is ignored, but if it is called with
option rec=True a watch is put on each one of its not-watched
subdirectory.
@param path: Path to watch, the path can either be a file or a
directory. Also accepts a sequence (list) of paths.
@type path: string or list of strings
@param mask: Bitmask of events.
@type mask: int
@param proc_fun: Processing object.
@type proc_fun: function or ProcessEvent instance or instance of
one of its subclasses or callable object.
@param rec: Recursively add watches from path on all its
subdirectories, set to False by default (doesn't
follows symlinks in any case).
@type rec: bool
@param auto_add: Automatically add watches on newly created
directories in watched parent |path| directory.
If |auto_add| is True, IN_CREATE is ored with |mask|
when the watch is added.
@type auto_add: bool
@param do_glob: Do globbing on pathname (see standard globbing
module for more informations).
@type do_glob: bool
@param quiet: if False raises a WatchManagerError exception on
error. See example not_quiet.py.
@type quiet: bool
@param exclude_filter: predicate (boolean function), which returns
True if the current path must be excluded
from being watched. This argument has
precedence over exclude_filter passed to
the class' constructor.
@type exclude_filter: callable object
@return: dict of paths associated to watch descriptors. A wd value
is positive if the watch was added sucessfully, otherwise
the value is negative. If the path was invalid or was already
watched it is not included into this returned dictionary.
@rtype: dict of {str: int}
"""
ret_ = {} # return {path: wd, ...}
if exclude_filter is None:
exclude_filter = self._exclude_filter
# normalize args as list elements
for npath in self.__format_param(path):
# Require that path be a unicode string
if not isinstance(npath, str):
ret_[path] = -3
continue
# unix pathname pattern expansion
for apath in self.__glob(npath, do_glob):
# recursively list subdirs according to rec param
for rpath in self.__walk_rec(apath, rec):
if not exclude_filter(rpath):
wd = ret_[rpath] = self.__add_watch(rpath, mask,
proc_fun,
auto_add,
exclude_filter)
if wd < 0:
err = ('add_watch: cannot watch %s WD=%d, %s' % \
(rpath, wd,
self._inotify_wrapper.str_errno()))
if quiet:
log.error(err)
else:
raise WatchManagerError(err, ret_)
else:
# Let's say -2 means 'explicitely excluded
# from watching'.
ret_[rpath] = -2
return ret_
def __get_sub_rec(self, lpath):
"""
Get every wd from self._wmd if its path is under the path of
one (at least) of those in lpath. Doesn't follow symlinks.
@param lpath: list of watch descriptor
@type lpath: list of int
@return: list of watch descriptor
@rtype: list of int
"""
for d in lpath:
root = self.get_path(d)
if root is not None:
# always keep root
yield d
else:
# if invalid
continue
# nothing else to expect
if not os.path.isdir(root):
continue
# normalization
root = os.path.normpath(root)
# recursion
lend = len(root)
for iwd in self._wmd.items():
cur = iwd[1].path
pref = os.path.commonprefix([root, cur])
if root == os.sep or (len(pref) == lend and \
len(cur) > lend and \
cur[lend] == os.sep):
yield iwd[1].wd
def update_watch(self, wd, mask=None, proc_fun=None, rec=False,
auto_add=False, quiet=True):
"""
Update existing watch descriptors |wd|. The |mask| value, the
processing object |proc_fun|, the recursive param |rec| and the
|auto_add| and |quiet| flags can all be updated.
@param wd: Watch Descriptor to update. Also accepts a list of
watch descriptors.
@type wd: int or list of int
@param mask: Optional new bitmask of events.
@type mask: int
@param proc_fun: Optional new processing function.
@type proc_fun: function or ProcessEvent instance or instance of
one of its subclasses or callable object.
@param rec: Optionally adds watches recursively on all
subdirectories contained into |wd| directory.
@type rec: bool
@param auto_add: Automatically adds watches on newly created
directories in the watch's path corresponding to |wd|.
If |auto_add| is True, IN_CREATE is ored with |mask|
when the watch is updated.
@type auto_add: bool
@param quiet: If False raises a WatchManagerError exception on
error. See example not_quiet.py
@type quiet: bool
@return: dict of watch descriptors associated to booleans values.
True if the corresponding wd has been successfully
updated, False otherwise.
@rtype: dict of {int: bool}
"""
lwd = self.__format_param(wd)
if rec:
lwd = self.__get_sub_rec(lwd)
ret_ = {} # return {wd: bool, ...}
for awd in lwd:
apath = self.get_path(awd)
if not apath or awd < 0:
err = 'update_watch: invalid WD=%d' % awd
if quiet:
log.error(err)
continue
raise WatchManagerError(err, ret_)
if mask:
wd_ = self._inotify_wrapper.inotify_add_watch(self._fd, apath,
mask)
if wd_ < 0:
ret_[awd] = False
err = ('update_watch: cannot update %s WD=%d, %s' % \
(apath, wd_, self._inotify_wrapper.str_errno()))
if quiet:
log.error(err)
continue
raise WatchManagerError(err, ret_)
assert(awd == wd_)
if proc_fun or auto_add:
watch_ = self._wmd[awd]
if proc_fun:
watch_.proc_fun = proc_fun
if auto_add:
watch_.auto_add = auto_add
ret_[awd] = True
log.debug('Updated watch - %s', self._wmd[awd])
return ret_
def __format_param(self, param):
"""
@param param: Parameter.
@type param: string or int
@return: wrap param.
@rtype: list of type(param)
"""
if isinstance(param, list):
for p_ in param:
yield p_
else:
yield param
def get_wd(self, path):
"""
Returns the watch descriptor associated to path. This method
presents a prohibitive cost, always prefer to keep the WD
returned by add_watch(). If the path is unknown it returns None.
@param path: Path.
@type path: str
@return: WD or None.
@rtype: int or None
"""
path = self.__format_path(path)
for iwd in self._wmd.items():
if iwd[1].path == path:
return iwd[0]
def get_path(self, wd):
"""
Returns the path associated to WD, if WD is unknown it returns None.
@param wd: Watch descriptor.
@type wd: int
@return: Path or None.
@rtype: string or None
"""
watch_ = self._wmd.get(wd)
if watch_ is not None:
return watch_.path
def __walk_rec(self, top, rec):
"""
Yields each subdirectories of top, doesn't follow symlinks.
If rec is false, only yield top.
@param top: root directory.
@type top: string
@param rec: recursive flag.
@type rec: bool
@return: path of one subdirectory.
@rtype: string
"""
if not rec or os.path.islink(top) or not os.path.isdir(top):
yield top
else:
for root, dirs, files in os.walk(top):
yield root
def rm_watch(self, wd, rec=False, quiet=True):
"""
Removes watch(s).
@param wd: Watch Descriptor of the file or directory to unwatch.
Also accepts a list of WDs.
@type wd: int or list of int.
@param rec: Recursively removes watches on every already watched
subdirectories and subfiles.
@type rec: bool
@param quiet: If False raises a WatchManagerError exception on
error. See example not_quiet.py
@type quiet: bool
@return: dict of watch descriptors associated to booleans values.
True if the corresponding wd has been successfully
removed, False otherwise.
@rtype: dict of {int: bool}
"""
lwd = self.__format_param(wd)
if rec:
lwd = self.__get_sub_rec(lwd)
ret_ = {} # return {wd: bool, ...}
for awd in lwd:
# remove watch
wd_ = self._inotify_wrapper.inotify_rm_watch(self._fd, awd)
if wd_ < 0:
ret_[awd] = False
err = ('rm_watch: cannot remove WD=%d, %s' % \
(awd, self._inotify_wrapper.str_errno()))
if quiet:
log.error(err)
continue
raise WatchManagerError(err, ret_)
# Remove watch from our dictionary
if awd in self._wmd:
del self._wmd[awd]
ret_[awd] = True
log.debug('Watch WD=%d (%s) removed', awd, self.get_path(awd))
return ret_
def watch_transient_file(self, filename, mask, proc_class):
"""
Watch a transient file, which will be created and deleted frequently
over time (e.g. pid file).
@attention: Currently under the call to this function it is not
possible to correctly watch the events triggered into the same
base directory than the directory where is located this watched
transient file. For instance it would be wrong to make these
two successive calls: wm.watch_transient_file('/var/run/foo.pid', ...)
and wm.add_watch('/var/run/', ...)
@param filename: Filename.
@type filename: string
@param mask: Bitmask of events, should contain IN_CREATE and IN_DELETE.
@type mask: int
@param proc_class: ProcessEvent (or of one of its subclass), beware of
accepting a ProcessEvent's instance as argument into
__init__, see transient_file.py example for more
details.
@type proc_class: ProcessEvent's instance or of one of its subclasses.
@return: Same as add_watch().
@rtype: Same as add_watch().
"""
dirname = os.path.dirname(filename)
if dirname == '':
return {} # Maintains coherence with add_watch()
basename = os.path.basename(filename)
# Assuming we are watching at least for IN_CREATE and IN_DELETE
mask |= IN_CREATE | IN_DELETE
def cmp_name(event):
if getattr(event, 'name') is None:
return False
return basename == event.name
return self.add_watch(dirname, mask,
proc_fun=proc_class(ChainIfTrue(func=cmp_name)),
rec=False,
auto_add=False, do_glob=False,
exclude_filter=lambda path: False)
class RawOutputFormat:
"""
Format string representations.
"""
def __init__(self, format=None):
self.format = format or {}
def simple(self, s, attribute):
if not isinstance(s, str):
s = str(s)
return (self.format.get(attribute, '') + s +
self.format.get('normal', ''))
def punctuation(self, s):
"""Punctuation color."""
return self.simple(s, 'normal')
def field_value(self, s):
"""Field value color."""
return self.simple(s, 'purple')
def field_name(self, s):
"""Field name color."""
return self.simple(s, 'blue')
def class_name(self, s):
"""Class name color."""
return self.format.get('red', '') + self.simple(s, 'bold')
output_format = RawOutputFormat()
class ColoredOutputFormat(RawOutputFormat):
"""
Format colored string representations.
"""
def __init__(self):
f = {'normal': '\033[0m',
'black': '\033[30m',
'red': '\033[31m',
'green': '\033[32m',
'yellow': '\033[33m',
'blue': '\033[34m',
'purple': '\033[35m',
'cyan': '\033[36m',
'bold': '\033[1m',
'uline': '\033[4m',
'blink': '\033[5m',
'invert': '\033[7m'}
RawOutputFormat.__init__(self, f)
def compatibility_mode():
"""
Use this function to turn on the compatibility mode. The compatibility
mode is used to improve compatibility with Pyinotify 0.7.1 (or older)
programs. The compatibility mode provides additional variables 'is_dir',
'event_name', 'EventsCodes.IN_*' and 'EventsCodes.ALL_EVENTS' as
Pyinotify 0.7.1 provided. Do not call this function from new programs!!
Especially if there are developped for Pyinotify >= 0.8.x.
"""
setattr(EventsCodes, 'ALL_EVENTS', ALL_EVENTS)
for evname in globals():
if evname.startswith('IN_'):
setattr(EventsCodes, evname, globals()[evname])
global COMPATIBILITY_MODE
COMPATIBILITY_MODE = True
def command_line():
"""
By default the watched path is '/tmp' and all types of events are
monitored. Events monitoring serves forever, type c^c to stop it.
"""
from optparse import OptionParser
usage = "usage: %prog [options] [path1] [path2] [pathn]"
parser = OptionParser(usage=usage)
parser.add_option("-v", "--verbose", action="store_true",
dest="verbose", help="Verbose mode")
parser.add_option("-r", "--recursive", action="store_true",
dest="recursive",
help="Add watches recursively on paths")
parser.add_option("-a", "--auto_add", action="store_true",
dest="auto_add",
help="Automatically add watches on new directories")
parser.add_option("-e", "--events-list", metavar="EVENT[,...]",
dest="events_list",
help=("A comma-separated list of events to watch for - "
"see the documentation for valid options (defaults"
" to everything)"))
parser.add_option("-s", "--stats", action="store_true",
dest="stats",
help="Display dummy statistics")
parser.add_option("-V", "--version", action="store_true",
dest="version", help="Pyinotify version")
parser.add_option("-f", "--raw-format", action="store_true",
dest="raw_format",
help="Disable enhanced output format.")
parser.add_option("-c", "--command", action="store",
dest="command",
help="Shell command to run upon event")
(options, args) = parser.parse_args()
if options.verbose:
log.setLevel(10)
if options.version:
print(__version__)
if not options.raw_format:
global output_format
output_format = ColoredOutputFormat()
if len(args) < 1:
path = '/tmp' # default watched path
else:
path = args
# watch manager instance
wm = WatchManager()
# notifier instance and init
if options.stats:
notifier = Notifier(wm, default_proc_fun=Stats(), read_freq=5)
else:
notifier = Notifier(wm, default_proc_fun=PrintAllEvents())
# What mask to apply
mask = 0
if options.events_list:
events_list = options.events_list.split(',')
for ev in events_list:
evcode = EventsCodes.ALL_FLAGS.get(ev, 0)
if evcode:
mask |= evcode
else:
parser.error("The event '%s' specified with option -e"
" is not valid" % ev)
else:
mask = ALL_EVENTS
# stats
cb_fun = None
if options.stats:
def cb(s):
sys.stdout.write(repr(s.proc_fun()))
sys.stdout.write('\n')
sys.stdout.write(str(s.proc_fun()))
sys.stdout.write('\n')
sys.stdout.flush()
cb_fun = cb
# External command
if options.command:
def cb(s):
subprocess.Popen(options.command, shell=True)
cb_fun = cb
log.debug('Start monitoring %s, (press c^c to halt pyinotify)' % path)
wm.add_watch(path, mask, rec=options.recursive, auto_add=options.auto_add)
# Loop forever (until sigint signal get caught)
notifier.loop(callback=cb_fun)
if __name__ == '__main__':
command_line()
| 37.603701 | 83 | 0.58276 |
class PyinotifyError(Exception):
pass
class UnsupportedPythonVersionError(PyinotifyError):
def __init__(self, version):
PyinotifyError.__init__(self,
('Python %s is unsupported, requires '
'at least Python 3.0') % version)
import sys
if sys.version_info < (3, 0):
raise UnsupportedPythonVersionError(sys.version)
import threading
import os
import select
import struct
import fcntl
import errno
import termios
import array
import logging
import atexit
from collections import deque
from datetime import datetime, timedelta
import time
import re
import asyncore
import glob
import locale
import subprocess
try:
from functools import reduce
except ImportError:
pass
try:
import ctypes
import ctypes.util
except ImportError:
ctypes = None
try:
import inotify_syscalls
except ImportError:
inotify_syscalls = None
__author__ = "seb@dbzteam.org (Sebastien Martini)"
__version__ = "0.9.4"
COMPATIBILITY_MODE = False
class InotifyBindingNotFoundError(PyinotifyError):
def __init__(self):
err = "Couldn't find any inotify binding"
PyinotifyError.__init__(self, err)
class INotifyWrapper:
@staticmethod
def create():
# First, try to use ctypes.
if ctypes:
inotify = _CtypesLibcINotifyWrapper()
if inotify.init():
return inotify
# Second, see if C extension is compiled.
if inotify_syscalls:
inotify = _INotifySyscallsWrapper()
if inotify.init():
return inotify
def get_errno(self):
return self._get_errno()
def str_errno(self):
code = self.get_errno()
if code is None:
return 'Errno: no errno support'
return 'Errno=%s (%s)' % (os.strerror(code), errno.errorcode[code])
def inotify_init(self):
return self._inotify_init()
def inotify_add_watch(self, fd, pathname, mask):
# Unicode strings must be encoded to string prior to calling this
# method.
assert isinstance(pathname, str)
return self._inotify_add_watch(fd, pathname, mask)
def inotify_rm_watch(self, fd, wd):
return self._inotify_rm_watch(fd, wd)
class _INotifySyscallsWrapper(INotifyWrapper):
def __init__(self):
# Stores the last errno value.
self._last_errno = None
def init(self):
assert inotify_syscalls
return True
def _get_errno(self):
return self._last_errno
def _inotify_init(self):
try:
fd = inotify_syscalls.inotify_init()
except IOError as err:
self._last_errno = err.errno
return -1
return fd
def _inotify_add_watch(self, fd, pathname, mask):
try:
wd = inotify_syscalls.inotify_add_watch(fd, pathname, mask)
except IOError as err:
self._last_errno = err.errno
return -1
return wd
def _inotify_rm_watch(self, fd, wd):
try:
ret = inotify_syscalls.inotify_rm_watch(fd, wd)
except IOError as err:
self._last_errno = err.errno
return -1
return ret
class _CtypesLibcINotifyWrapper(INotifyWrapper):
def __init__(self):
self._libc = None
self._get_errno_func = None
def init(self):
assert ctypes
libc_name = None
try:
libc_name = ctypes.util.find_library('c')
except (OSError, IOError):
pass # Will attemp to load it with None anyway.
self._libc = ctypes.CDLL(libc_name, use_errno=True)
self._get_errno_func = ctypes.get_errno
# Eventually check that libc has needed inotify bindings.
if (not hasattr(self._libc, 'inotify_init') or
not hasattr(self._libc, 'inotify_add_watch') or
not hasattr(self._libc, 'inotify_rm_watch')):
return False
self._libc.inotify_init.argtypes = []
self._libc.inotify_init.restype = ctypes.c_int
self._libc.inotify_add_watch.argtypes = [ctypes.c_int, ctypes.c_char_p,
ctypes.c_uint32]
self._libc.inotify_add_watch.restype = ctypes.c_int
self._libc.inotify_rm_watch.argtypes = [ctypes.c_int, ctypes.c_int]
self._libc.inotify_rm_watch.restype = ctypes.c_int
return True
def _get_errno(self):
assert self._get_errno_func
return self._get_errno_func()
def _inotify_init(self):
assert self._libc is not None
return self._libc.inotify_init()
def _inotify_add_watch(self, fd, pathname, mask):
assert self._libc is not None
# Encodes path to a bytes string. This conversion seems required because
# ctypes.create_string_buffer seems to manipulate bytes internally.
# Moreover it seems that inotify_add_watch does not work very well when
# it receives an ctypes.create_unicode_buffer instance as argument.
pathname = pathname.encode(sys.getfilesystemencoding())
pathname = ctypes.create_string_buffer(pathname)
return self._libc.inotify_add_watch(fd, pathname, mask)
def _inotify_rm_watch(self, fd, wd):
assert self._libc is not None
return self._libc.inotify_rm_watch(fd, wd)
def _sysctl(self, *args):
assert self._libc is not None
return self._libc.sysctl(*args)
# Logging
def logger_init():
log = logging.getLogger("pyinotify")
console_handler = logging.StreamHandler()
console_handler.setFormatter(
logging.Formatter("[%(asctime)s %(name)s %(levelname)s] %(message)s"))
log.addHandler(console_handler)
log.setLevel(20)
return log
log = logger_init()
# inotify's variables
class SysCtlINotify:
inotify_attrs = {'max_user_instances': 1,
'max_user_watches': 2,
'max_queued_events': 3}
def __init__(self, attrname, inotify_wrapper):
assert ctypes
self._attrname = attrname
self._inotify_wrapper = inotify_wrapper
sino = ctypes.c_int * 3
self._attr = sino(5, 20, SysCtlINotify.inotify_attrs[attrname])
@staticmethod
def create(attrname):
if ctypes is None:
return None
inotify_wrapper = _CtypesLibcINotifyWrapper()
if not inotify_wrapper.init():
return None
return SysCtlINotify(attrname, inotify_wrapper)
def get_val(self):
oldv = ctypes.c_int(0)
size = ctypes.c_int(ctypes.sizeof(oldv))
self._inotify_wrapper._sysctl(self._attr, 3,
ctypes.c_voidp(ctypes.addressof(oldv)),
ctypes.addressof(size),
None, 0)
return oldv.value
def set_val(self, nval):
oldv = ctypes.c_int(0)
sizeo = ctypes.c_int(ctypes.sizeof(oldv))
newv = ctypes.c_int(nval)
sizen = ctypes.c_int(ctypes.sizeof(newv))
self._inotify_wrapper._sysctl(self._attr, 3,
ctypes.c_voidp(ctypes.addressof(oldv)),
ctypes.addressof(sizeo),
ctypes.c_voidp(ctypes.addressof(newv)),
ctypes.addressof(sizen))
value = property(get_val, set_val)
def __repr__(self):
return '<%s=%d>' % (self._attrname, self.get_val())
#
# FIXME: currently these variables are only accessible when ctypes is used,
# otherwise there are set to None.
#
# read: myvar = max_queued_events.value
# update: max_queued_events.value = 42
#
for attrname in ('max_queued_events', 'max_user_instances', 'max_user_watches'):
globals()[attrname] = SysCtlINotify.create(attrname)
class EventsCodes:
# The idea here is 'configuration-as-code' - this way, we get our nice class
# constants, but we also get nice human-friendly text mappings to do lookups
# against as well, for free:
FLAG_COLLECTIONS = {'OP_FLAGS': {
'IN_ACCESS' : 0x00000001, # File was accessed
'IN_MODIFY' : 0x00000002, # File was modified
'IN_ATTRIB' : 0x00000004, # Metadata changed
'IN_CLOSE_WRITE' : 0x00000008, # Writable file was closed
'IN_CLOSE_NOWRITE' : 0x00000010, # Unwritable file closed
'IN_OPEN' : 0x00000020, # File was opened
'IN_MOVED_FROM' : 0x00000040, # File was moved from X
'IN_MOVED_TO' : 0x00000080, # File was moved to Y
'IN_CREATE' : 0x00000100, # Subfile was created
'IN_DELETE' : 0x00000200, # Subfile was deleted
'IN_DELETE_SELF' : 0x00000400, # Self (watched item itself)
# was deleted
'IN_MOVE_SELF' : 0x00000800, # Self (watched item itself) was moved
},
'EVENT_FLAGS': {
'IN_UNMOUNT' : 0x00002000, # Backing fs was unmounted
'IN_Q_OVERFLOW' : 0x00004000, # Event queued overflowed
'IN_IGNORED' : 0x00008000, # File was ignored
},
'SPECIAL_FLAGS': {
'IN_ONLYDIR' : 0x01000000, # only watch the path if it is a
# directory
'IN_DONT_FOLLOW' : 0x02000000, # don't follow a symlink
'IN_EXCL_UNLINK' : 0x04000000,
'IN_MASK_ADD' : 0x20000000,
'IN_ISDIR' : 0x40000000,
'IN_ONESHOT' : 0x80000000,
},
}
def maskname(mask):
ms = mask
name = '%s'
if mask & IN_ISDIR:
ms = mask - IN_ISDIR
name = '%s|IN_ISDIR'
return name % EventsCodes.ALL_VALUES[ms]
maskname = staticmethod(maskname)
EventsCodes.ALL_FLAGS = {}
EventsCodes.ALL_VALUES = {}
for flagc, valc in EventsCodes.FLAG_COLLECTIONS.items():
# Make the collections' members directly accessible through the
setattr(EventsCodes, flagc, valc)
EventsCodes.ALL_FLAGS.update(valc)
for name, val in valc.items():
globals()[name] = val
EventsCodes.ALL_VALUES[val] = name
ALL_EVENTS = reduce(lambda x, y: x | y, EventsCodes.OP_FLAGS.values())
EventsCodes.ALL_FLAGS['ALL_EVENTS'] = ALL_EVENTS
EventsCodes.ALL_VALUES[ALL_EVENTS] = 'ALL_EVENTS'
class _Event:
def __init__(self, dict_):
for tpl in dict_.items():
setattr(self, *tpl)
def __repr__(self):
s = ''
for attr, value in sorted(self.__dict__.items(), key=lambda x: x[0]):
if attr.startswith('_'):
continue
if attr == 'mask':
value = hex(getattr(self, attr))
elif isinstance(value, str) and not value:
value = "''"
s += ' %s%s%s' % (output_format.field_name(attr),
output_format.punctuation('='),
output_format.field_value(value))
s = '%s%s%s %s' % (output_format.punctuation('<'),
output_format.class_name(self.__class__.__name__),
s,
output_format.punctuation('>'))
return s
def __str__(self):
return repr(self)
class _RawEvent(_Event):
def __init__(self, wd, mask, cookie, name):
self._str = None
d = {'wd': wd,
'mask': mask,
'cookie': cookie,
'name': name.rstrip('\0')}
_Event.__init__(self, d)
log.debug(str(self))
def __str__(self):
if self._str is None:
self._str = _Event.__str__(self)
return self._str
class Event(_Event):
def __init__(self, raw):
_Event.__init__(self, raw)
self.maskname = EventsCodes.maskname(self.mask)
if COMPATIBILITY_MODE:
self.event_name = self.maskname
try:
if self.name:
self.pathname = os.path.abspath(os.path.join(self.path,
self.name))
else:
self.pathname = os.path.abspath(self.path)
except AttributeError as err:
log.debug(err)
class ProcessEventError(PyinotifyError):
def __init__(self, err):
PyinotifyError.__init__(self, err)
class _ProcessEvent:
def __call__(self, event):
stripped_mask = event.mask - (event.mask & IN_ISDIR)
maskname = EventsCodes.ALL_VALUES.get(stripped_mask)
if maskname is None:
raise ProcessEventError("Unknown mask 0x%08x" % stripped_mask)
meth = getattr(self, 'process_' + maskname, None)
if meth is not None:
return meth(event)
meth = getattr(self, 'process_IN_' + maskname.split('_')[1], None)
if meth is not None:
return meth(event)
return self.process_default(event)
def __repr__(self):
return '<%s>' % self.__class__.__name__
class _SysProcessEvent(_ProcessEvent):
def __init__(self, wm, notifier):
self._watch_manager = wm
self._notifier = notifier
self._mv_cookie = {}
self._mv = {}
def cleanup(self):
date_cur_ = datetime.now()
for seq in (self._mv_cookie, self._mv):
for k in list(seq.keys()):
if (date_cur_ - seq[k][1]) > timedelta(minutes=1):
log.debug('Cleanup: deleting entry %s', seq[k][0])
del seq[k]
def process_IN_CREATE(self, raw_event):
if raw_event.mask & IN_ISDIR:
watch_ = self._watch_manager.get_watch(raw_event.wd)
created_dir = os.path.join(watch_.path, raw_event.name)
if watch_.auto_add and not watch_.exclude_filter(created_dir):
addw = self._watch_manager.add_watch
addw_ret = addw(created_dir, watch_.mask,
proc_fun=watch_.proc_fun,
rec=False, auto_add=watch_.auto_add,
exclude_filter=watch_.exclude_filter)
created_dir_wd = addw_ret.get(created_dir)
if (created_dir_wd is not None) and (created_dir_wd > 0):
for name in os.listdir(created_dir):
inner = os.path.join(created_dir, name)
if self._watch_manager.get_wd(inner) is not None:
continue
if os.path.isfile(inner):
flags = IN_CREATE
elif os.path.isdir(inner):
flags = IN_CREATE | IN_ISDIR
else:
continue
rawevent = _RawEvent(created_dir_wd, flags, 0, name)
self._notifier.append_event(rawevent)
return self.process_default(raw_event)
def process_IN_MOVED_FROM(self, raw_event):
watch_ = self._watch_manager.get_watch(raw_event.wd)
path_ = watch_.path
src_path = os.path.normpath(os.path.join(path_, raw_event.name))
self._mv_cookie[raw_event.cookie] = (src_path, datetime.now())
return self.process_default(raw_event, {'cookie': raw_event.cookie})
def process_IN_MOVED_TO(self, raw_event):
watch_ = self._watch_manager.get_watch(raw_event.wd)
path_ = watch_.path
dst_path = os.path.normpath(os.path.join(path_, raw_event.name))
mv_ = self._mv_cookie.get(raw_event.cookie)
to_append = {'cookie': raw_event.cookie}
if mv_ is not None:
self._mv[mv_[0]] = (dst_path, datetime.now())
# that its associated (they share a common cookie) IN_MOVED_TO
# event is queued itself. It is then possible in that scenario
# to provide as additional information to the IN_MOVED_TO event
# the original pathname of the moved file/directory.
to_append['src_pathname'] = mv_[0]
elif (raw_event.mask & IN_ISDIR and watch_.auto_add and
not watch_.exclude_filter(dst_path)):
# We got a diretory that's "moved in" from an unknown source and
self._watch_manager.add_watch(dst_path, watch_.mask,
proc_fun=watch_.proc_fun,
rec=True, auto_add=True,
exclude_filter=watch_.exclude_filter)
return self.process_default(raw_event, to_append)
def process_IN_MOVE_SELF(self, raw_event):
watch_ = self._watch_manager.get_watch(raw_event.wd)
src_path = watch_.path
mv_ = self._mv.get(src_path)
if mv_:
dest_path = mv_[0]
watch_.path = dest_path
src_path += os.path.sep
src_path_len = len(src_path)
for w in self._watch_manager.watches.values():
if w.path.startswith(src_path):
w.path = os.path.join(dest_path, w.path[src_path_len:])
else:
log.error("The pathname '%s' of this watch %s has probably changed "
"and couldn't be updated, so it cannot be trusted "
"anymore. To fix this error move directories/files only "
"between watched parents directories, in this case e.g. "
"put a watch on '%s'.",
watch_.path, watch_,
os.path.normpath(os.path.join(watch_.path,
os.path.pardir)))
if not watch_.path.endswith('-unknown-path'):
watch_.path += '-unknown-path'
return self.process_default(raw_event)
def process_IN_Q_OVERFLOW(self, raw_event):
return Event({'mask': raw_event.mask})
def process_IN_IGNORED(self, raw_event):
event_ = self.process_default(raw_event)
self._watch_manager.del_watch(raw_event.wd)
return event_
def process_default(self, raw_event, to_append=None):
watch_ = self._watch_manager.get_watch(raw_event.wd)
if raw_event.mask & (IN_DELETE_SELF | IN_MOVE_SELF):
# Unfornulately this information is not provided by the kernel
dir_ = watch_.dir
else:
dir_ = bool(raw_event.mask & IN_ISDIR)
dict_ = {'wd': raw_event.wd,
'mask': raw_event.mask,
'path': watch_.path,
'name': raw_event.name,
'dir': dir_}
if COMPATIBILITY_MODE:
dict_['is_dir'] = dir_
if to_append is not None:
dict_.update(to_append)
return Event(dict_)
class ProcessEvent(_ProcessEvent):
pevent = None
def __init__(self, pevent=None, **kargs):
self.pevent = pevent
self.my_init(**kargs)
def my_init(self, **kargs):
pass
def __call__(self, event):
stop_chaining = False
if self.pevent is not None:
# By default methods return None so we set as guideline
# that methods asking for stop chaining must explicitely
# return non None or non False values, otherwise the default
# behavior will be to accept chain call to the corresponding
# local method.
stop_chaining = self.pevent(event)
if not stop_chaining:
return _ProcessEvent.__call__(self, event)
def nested_pevent(self):
return self.pevent
def process_IN_Q_OVERFLOW(self, event):
log.warning('Event queue overflowed.')
def process_default(self, event):
pass
class PrintAllEvents(ProcessEvent):
def my_init(self, out=None):
if out is None:
out = sys.stdout
self._out = out
def process_default(self, event):
self._out.write(str(event))
self._out.write('\n')
self._out.flush()
class ChainIfTrue(ProcessEvent):
def my_init(self, func):
self._func = func
def process_default(self, event):
return not self._func(event)
class Stats(ProcessEvent):
def my_init(self):
self._start_time = time.time()
self._stats = {}
self._stats_lock = threading.Lock()
def process_default(self, event):
self._stats_lock.acquire()
try:
events = event.maskname.split('|')
for event_name in events:
count = self._stats.get(event_name, 0)
self._stats[event_name] = count + 1
finally:
self._stats_lock.release()
def _stats_copy(self):
self._stats_lock.acquire()
try:
return self._stats.copy()
finally:
self._stats_lock.release()
def __repr__(self):
stats = self._stats_copy()
elapsed = int(time.time() - self._start_time)
elapsed_str = ''
if elapsed < 60:
elapsed_str = str(elapsed) + 'sec'
elif 60 <= elapsed < 3600:
elapsed_str = '%dmn%dsec' % (elapsed / 60, elapsed % 60)
elif 3600 <= elapsed < 86400:
elapsed_str = '%dh%dmn' % (elapsed / 3600, (elapsed % 3600) / 60)
elif elapsed >= 86400:
elapsed_str = '%dd%dh' % (elapsed / 86400, (elapsed % 86400) / 3600)
stats['ElapsedTime'] = elapsed_str
l = []
for ev, value in sorted(stats.items(), key=lambda x: x[0]):
l.append(' %s=%s' % (output_format.field_name(ev),
output_format.field_value(value)))
s = '<%s%s >' % (output_format.class_name(self.__class__.__name__),
''.join(l))
return s
def dump(self, filename):
flags = os.O_WRONLY|os.O_CREAT|os.O_NOFOLLOW|os.O_EXCL
fd = os.open(filename, flags, 0o0600)
os.write(fd, bytes(self.__str__(), locale.getpreferredencoding()))
os.close(fd)
def __str__(self, scale=45):
stats = self._stats_copy()
if not stats:
return ''
m = max(stats.values())
unity = scale / m
fmt = '%%-26s%%-%ds%%s' % (len(output_format.field_value('@' * scale))
+ 1)
def func(x):
return fmt % (output_format.field_name(x[0]),
output_format.field_value('@' * int(x[1] * unity)),
output_format.simple('%d' % x[1], 'yellow'))
s = '\n'.join(map(func, sorted(stats.items(), key=lambda x: x[0])))
return s
class NotifierError(PyinotifyError):
def __init__(self, err):
PyinotifyError.__init__(self, err)
class Notifier:
def __init__(self, watch_manager, default_proc_fun=None, read_freq=0,
threshold=0, timeout=None):
# Watch Manager instance
self._watch_manager = watch_manager
# File descriptor
self._fd = self._watch_manager.get_fd()
# Poll object and registration
self._pollobj = select.poll()
self._pollobj.register(self._fd, select.POLLIN)
# This pipe is correctely initialized and used by ThreadedNotifier
self._pipe = (-1, -1)
# Event queue
self._eventq = deque()
# System processing functor, common to all events
self._sys_proc_fun = _SysProcessEvent(self._watch_manager, self)
# Default processing method
self._default_proc_fun = default_proc_fun
if default_proc_fun is None:
self._default_proc_fun = PrintAllEvents()
# Loop parameters
self._read_freq = read_freq
self._threshold = threshold
self._timeout = timeout
# Coalesce events option
self._coalesce = False
# set of str(raw_event), only used when coalesce option is True
self._eventset = set()
def append_event(self, event):
self._eventq.append(event)
def proc_fun(self):
return self._default_proc_fun
def coalesce_events(self, coalesce=True):
self._coalesce = coalesce
if not coalesce:
self._eventset.clear()
def check_events(self, timeout=None):
while True:
try:
# blocks up to 'timeout' milliseconds
if timeout is None:
timeout = self._timeout
ret = self._pollobj.poll(timeout)
except select.error as err:
if err.args[0] == errno.EINTR:
continue # interrupted, retry
else:
raise
else:
break
if not ret or (self._pipe[0] == ret[0][0]):
return False
# only one fd is polled
return ret[0][1] & select.POLLIN
def read_events(self):
buf_ = array.array('i', [0])
# get event queue size
if fcntl.ioctl(self._fd, termios.FIONREAD, buf_, 1) == -1:
return
queue_size = buf_[0]
if queue_size < self._threshold:
log.debug('(fd: %d) %d bytes available to read but threshold is '
'fixed to %d bytes', self._fd, queue_size,
self._threshold)
return
try:
# Read content from file
r = os.read(self._fd, queue_size)
except Exception as msg:
raise NotifierError(msg)
log.debug('Event queue size: %d', queue_size)
rsum = 0 # counter
while rsum < queue_size:
s_size = 16
# Retrieve wd, mask, cookie and fname_len
wd, mask, cookie, fname_len = struct.unpack('iIII',
r[rsum:rsum+s_size])
# Retrieve name
bname, = struct.unpack('%ds' % fname_len,
r[rsum + s_size:rsum + s_size + fname_len])
# FIXME: should we explictly call sys.getdefaultencoding() here ??
uname = bname.decode()
rawevent = _RawEvent(wd, mask, cookie, uname)
if self._coalesce:
# Only enqueue new (unique) events.
raweventstr = str(rawevent)
if raweventstr not in self._eventset:
self._eventset.add(raweventstr)
self._eventq.append(rawevent)
else:
self._eventq.append(rawevent)
rsum += s_size + fname_len
def process_events(self):
while self._eventq:
raw_event = self._eventq.popleft() # pop next event
watch_ = self._watch_manager.get_watch(raw_event.wd)
if (watch_ is None) and not (raw_event.mask & IN_Q_OVERFLOW):
if not (raw_event.mask & IN_IGNORED):
# Not really sure how we ended up here, nor how we should
# handle these types of events and if it is appropriate to
# completly skip them (like we are doing here).
log.warning("Unable to retrieve Watch object associated to %s",
repr(raw_event))
continue
revent = self._sys_proc_fun(raw_event) # system processings
if watch_ and watch_.proc_fun:
watch_.proc_fun(revent) # user processings
else:
self._default_proc_fun(revent)
self._sys_proc_fun.cleanup() # remove olds MOVED_* events records
if self._coalesce:
self._eventset.clear()
def __daemonize(self, pid_file=None, stdin=os.devnull, stdout=os.devnull,
stderr=os.devnull):
if pid_file is None:
dirname = '/var/run/'
basename = os.path.basename(sys.argv[0]) or 'pyinotify'
pid_file = os.path.join(dirname, basename + '.pid')
if pid_file != False and os.path.lexists(pid_file):
err = 'Cannot daemonize: pid file %s already exists.' % pid_file
raise NotifierError(err)
def fork_daemon():
# Adapted from Chad J. Schroeder's recipe
pid = os.fork()
if (pid == 0):
os.setsid()
pid = os.fork()
if (pid == 0):
os.chdir('/')
os.umask(0o022)
else:
os._exit(0)
else:
os._exit(0)
fd_inp = os.open(stdin, os.O_RDONLY)
os.dup2(fd_inp, 0)
fd_out = os.open(stdout, os.O_WRONLY|os.O_CREAT, 0o0600)
os.dup2(fd_out, 1)
fd_err = os.open(stderr, os.O_WRONLY|os.O_CREAT, 0o0600)
os.dup2(fd_err, 2)
fork_daemon()
if pid_file != False:
flags = os.O_WRONLY|os.O_CREAT|os.O_NOFOLLOW|os.O_EXCL
fd_pid = os.open(pid_file, flags, 0o0600)
os.write(fd_pid, bytes(str(os.getpid()) + '\n',
locale.getpreferredencoding()))
os.close(fd_pid)
atexit.register(lambda : os.unlink(pid_file))
def _sleep(self, ref_time):
if self._read_freq > 0:
cur_time = time.time()
sleep_amount = self._read_freq - (cur_time - ref_time)
if sleep_amount > 0:
log.debug('Now sleeping %d seconds', sleep_amount)
time.sleep(sleep_amount)
def loop(self, callback=None, daemonize=False, **args):
if daemonize:
self.__daemonize(**args)
while 1:
try:
self.process_events()
if (callback is not None) and (callback(self) is True):
break
ref_time = time.time()
if self.check_events():
self._sleep(ref_time)
self.read_events()
except KeyboardInterrupt:
log.debug('Pyinotify stops monitoring.')
break
self.stop()
def stop(self):
self._pollobj.unregister(self._fd)
os.close(self._fd)
class ThreadedNotifier(threading.Thread, Notifier):
def __init__(self, watch_manager, default_proc_fun=None, read_freq=0,
threshold=0, timeout=None):
threading.Thread.__init__(self)
self._stop_event = threading.Event()
Notifier.__init__(self, watch_manager, default_proc_fun, read_freq,
threshold, timeout)
self._pipe = os.pipe()
self._pollobj.register(self._pipe[0], select.POLLIN)
def stop(self):
self._stop_event.set()
os.write(self._pipe[1], b'stop')
threading.Thread.join(self)
Notifier.stop(self)
self._pollobj.unregister(self._pipe[0])
os.close(self._pipe[0])
os.close(self._pipe[1])
def loop(self):
# ._stop_event.isSet() wich put an end to the thread's execution.
while not self._stop_event.isSet():
self.process_events()
ref_time = time.time()
if self.check_events():
self._sleep(ref_time)
self.read_events()
def run(self):
self.loop()
class AsyncNotifier(asyncore.file_dispatcher, Notifier):
def __init__(self, watch_manager, default_proc_fun=None, read_freq=0,
threshold=0, timeout=None, channel_map=None):
Notifier.__init__(self, watch_manager, default_proc_fun, read_freq,
threshold, timeout)
asyncore.file_dispatcher.__init__(self, self._fd, channel_map)
def handle_read(self):
self.read_events()
self.process_events()
class TornadoAsyncNotifier(Notifier):
def __init__(self, watch_manager, ioloop, callback=None,
default_proc_fun=None, read_freq=0, threshold=0, timeout=None,
channel_map=None):
self.io_loop = ioloop
self.handle_read_callback = callback
Notifier.__init__(self, watch_manager, default_proc_fun, read_freq,
threshold, timeout)
ioloop.add_handler(self._fd, self.handle_read, ioloop.READ)
def handle_read(self, *args, **kwargs):
self.read_events()
self.process_events()
if self.handle_read_callback is not None:
self.handle_read_callback(self)
class Watch:
__slots__ = ('wd', 'path', 'mask', 'proc_fun', 'auto_add',
'exclude_filter', 'dir')
def __init__(self, wd, path, mask, proc_fun, auto_add, exclude_filter):
self.wd = wd
self.path = path
self.mask = mask
self.proc_fun = proc_fun
self.auto_add = auto_add
self.exclude_filter = exclude_filter
self.dir = os.path.isdir(self.path)
def __repr__(self):
s = ' '.join(['%s%s%s' % (output_format.field_name(attr),
output_format.punctuation('='),
output_format.field_value(getattr(self,
attr))) \
for attr in self.__slots__ if not attr.startswith('_')])
s = '%s%s %s %s' % (output_format.punctuation('<'),
output_format.class_name(self.__class__.__name__),
s,
output_format.punctuation('>'))
return s
class ExcludeFilter:
def __init__(self, arg_lst):
if isinstance(arg_lst, str):
lst = self._load_patterns_from_file(arg_lst)
elif isinstance(arg_lst, list):
lst = arg_lst
else:
raise TypeError
self._lregex = []
for regex in lst:
self._lregex.append(re.compile(regex, re.UNICODE))
def _load_patterns_from_file(self, filename):
lst = []
with open(filename, 'r') as file_obj:
for line in file_obj.readlines():
pattern = line.strip()
if not pattern or pattern.startswith('#'):
continue
lst.append(pattern)
return lst
def _match(self, regex, path):
return regex.match(path) is not None
def __call__(self, path):
for regex in self._lregex:
if self._match(regex, path):
return True
return False
class WatchManagerError(Exception):
def __init__(self, msg, wmd):
self.wmd = wmd
Exception.__init__(self, msg)
class WatchManager:
def __init__(self, exclude_filter=lambda path: False):
self._exclude_filter = exclude_filter
self._wmd = {}
self._inotify_wrapper = INotifyWrapper.create()
if self._inotify_wrapper is None:
raise InotifyBindingNotFoundError()
self._fd = self._inotify_wrapper.inotify_init()
if self._fd < 0:
err = 'Cannot initialize new instance of inotify, %s'
raise OSError(err % self._inotify_wrapper.str_errno())
def close(self):
os.close(self._fd)
def get_fd(self):
return self._fd
def get_watch(self, wd):
return self._wmd.get(wd)
def del_watch(self, wd):
try:
del self._wmd[wd]
except KeyError as err:
log.error('Cannot delete unknown watch descriptor %s' % str(err))
@property
def watches(self):
return self._wmd
def __format_path(self, path):
return os.path.normpath(path)
def __add_watch(self, path, mask, proc_fun, auto_add, exclude_filter):
path = self.__format_path(path)
if auto_add and not mask & IN_CREATE:
mask |= IN_CREATE
wd = self._inotify_wrapper.inotify_add_watch(self._fd, path, mask)
if wd < 0:
return wd
watch = Watch(wd=wd, path=path, mask=mask, proc_fun=proc_fun,
auto_add=auto_add, exclude_filter=exclude_filter)
self._wmd[wd] = watch
log.debug('New %s', watch)
return wd
def __glob(self, path, do_glob):
if do_glob:
return glob.iglob(path)
else:
return [path]
def add_watch(self, path, mask, proc_fun=None, rec=False,
auto_add=False, do_glob=False, quiet=True,
exclude_filter=None):
ret_ = {}
if exclude_filter is None:
exclude_filter = self._exclude_filter
for npath in self.__format_param(path):
if not isinstance(npath, str):
ret_[path] = -3
continue
for apath in self.__glob(npath, do_glob):
for rpath in self.__walk_rec(apath, rec):
if not exclude_filter(rpath):
wd = ret_[rpath] = self.__add_watch(rpath, mask,
proc_fun,
auto_add,
exclude_filter)
if wd < 0:
err = ('add_watch: cannot watch %s WD=%d, %s' % \
(rpath, wd,
self._inotify_wrapper.str_errno()))
if quiet:
log.error(err)
else:
raise WatchManagerError(err, ret_)
else:
ret_[rpath] = -2
return ret_
def __get_sub_rec(self, lpath):
for d in lpath:
root = self.get_path(d)
if root is not None:
# always keep root
yield d
else:
# if invalid
continue
# nothing else to expect
if not os.path.isdir(root):
continue
# normalization
root = os.path.normpath(root)
# recursion
lend = len(root)
for iwd in self._wmd.items():
cur = iwd[1].path
pref = os.path.commonprefix([root, cur])
if root == os.sep or (len(pref) == lend and \
len(cur) > lend and \
cur[lend] == os.sep):
yield iwd[1].wd
def update_watch(self, wd, mask=None, proc_fun=None, rec=False,
auto_add=False, quiet=True):
lwd = self.__format_param(wd)
if rec:
lwd = self.__get_sub_rec(lwd)
ret_ = {} # return {wd: bool, ...}
for awd in lwd:
apath = self.get_path(awd)
if not apath or awd < 0:
err = 'update_watch: invalid WD=%d' % awd
if quiet:
log.error(err)
continue
raise WatchManagerError(err, ret_)
if mask:
wd_ = self._inotify_wrapper.inotify_add_watch(self._fd, apath,
mask)
if wd_ < 0:
ret_[awd] = False
err = ('update_watch: cannot update %s WD=%d, %s' % \
(apath, wd_, self._inotify_wrapper.str_errno()))
if quiet:
log.error(err)
continue
raise WatchManagerError(err, ret_)
assert(awd == wd_)
if proc_fun or auto_add:
watch_ = self._wmd[awd]
if proc_fun:
watch_.proc_fun = proc_fun
if auto_add:
watch_.auto_add = auto_add
ret_[awd] = True
log.debug('Updated watch - %s', self._wmd[awd])
return ret_
def __format_param(self, param):
if isinstance(param, list):
for p_ in param:
yield p_
else:
yield param
def get_wd(self, path):
path = self.__format_path(path)
for iwd in self._wmd.items():
if iwd[1].path == path:
return iwd[0]
def get_path(self, wd):
watch_ = self._wmd.get(wd)
if watch_ is not None:
return watch_.path
def __walk_rec(self, top, rec):
if not rec or os.path.islink(top) or not os.path.isdir(top):
yield top
else:
for root, dirs, files in os.walk(top):
yield root
def rm_watch(self, wd, rec=False, quiet=True):
lwd = self.__format_param(wd)
if rec:
lwd = self.__get_sub_rec(lwd)
ret_ = {} # return {wd: bool, ...}
for awd in lwd:
# remove watch
wd_ = self._inotify_wrapper.inotify_rm_watch(self._fd, awd)
if wd_ < 0:
ret_[awd] = False
err = ('rm_watch: cannot remove WD=%d, %s' % \
(awd, self._inotify_wrapper.str_errno()))
if quiet:
log.error(err)
continue
raise WatchManagerError(err, ret_)
# Remove watch from our dictionary
if awd in self._wmd:
del self._wmd[awd]
ret_[awd] = True
log.debug('Watch WD=%d (%s) removed', awd, self.get_path(awd))
return ret_
def watch_transient_file(self, filename, mask, proc_class):
dirname = os.path.dirname(filename)
if dirname == '':
return {} # Maintains coherence with add_watch()
basename = os.path.basename(filename)
# Assuming we are watching at least for IN_CREATE and IN_DELETE
mask |= IN_CREATE | IN_DELETE
def cmp_name(event):
if getattr(event, 'name') is None:
return False
return basename == event.name
return self.add_watch(dirname, mask,
proc_fun=proc_class(ChainIfTrue(func=cmp_name)),
rec=False,
auto_add=False, do_glob=False,
exclude_filter=lambda path: False)
class RawOutputFormat:
def __init__(self, format=None):
self.format = format or {}
def simple(self, s, attribute):
if not isinstance(s, str):
s = str(s)
return (self.format.get(attribute, '') + s +
self.format.get('normal', ''))
def punctuation(self, s):
return self.simple(s, 'normal')
def field_value(self, s):
return self.simple(s, 'purple')
def field_name(self, s):
return self.simple(s, 'blue')
def class_name(self, s):
return self.format.get('red', '') + self.simple(s, 'bold')
output_format = RawOutputFormat()
class ColoredOutputFormat(RawOutputFormat):
def __init__(self):
f = {'normal': '\033[0m',
'black': '\033[30m',
'red': '\033[31m',
'green': '\033[32m',
'yellow': '\033[33m',
'blue': '\033[34m',
'purple': '\033[35m',
'cyan': '\033[36m',
'bold': '\033[1m',
'uline': '\033[4m',
'blink': '\033[5m',
'invert': '\033[7m'}
RawOutputFormat.__init__(self, f)
def compatibility_mode():
setattr(EventsCodes, 'ALL_EVENTS', ALL_EVENTS)
for evname in globals():
if evname.startswith('IN_'):
setattr(EventsCodes, evname, globals()[evname])
global COMPATIBILITY_MODE
COMPATIBILITY_MODE = True
def command_line():
from optparse import OptionParser
usage = "usage: %prog [options] [path1] [path2] [pathn]"
parser = OptionParser(usage=usage)
parser.add_option("-v", "--verbose", action="store_true",
dest="verbose", help="Verbose mode")
parser.add_option("-r", "--recursive", action="store_true",
dest="recursive",
help="Add watches recursively on paths")
parser.add_option("-a", "--auto_add", action="store_true",
dest="auto_add",
help="Automatically add watches on new directories")
parser.add_option("-e", "--events-list", metavar="EVENT[,...]",
dest="events_list",
help=("A comma-separated list of events to watch for - "
"see the documentation for valid options (defaults"
" to everything)"))
parser.add_option("-s", "--stats", action="store_true",
dest="stats",
help="Display dummy statistics")
parser.add_option("-V", "--version", action="store_true",
dest="version", help="Pyinotify version")
parser.add_option("-f", "--raw-format", action="store_true",
dest="raw_format",
help="Disable enhanced output format.")
parser.add_option("-c", "--command", action="store",
dest="command",
help="Shell command to run upon event")
(options, args) = parser.parse_args()
if options.verbose:
log.setLevel(10)
if options.version:
print(__version__)
if not options.raw_format:
global output_format
output_format = ColoredOutputFormat()
if len(args) < 1:
path = '/tmp' # default watched path
else:
path = args
# watch manager instance
wm = WatchManager()
# notifier instance and init
if options.stats:
notifier = Notifier(wm, default_proc_fun=Stats(), read_freq=5)
else:
notifier = Notifier(wm, default_proc_fun=PrintAllEvents())
# What mask to apply
mask = 0
if options.events_list:
events_list = options.events_list.split(',')
for ev in events_list:
evcode = EventsCodes.ALL_FLAGS.get(ev, 0)
if evcode:
mask |= evcode
else:
parser.error("The event '%s' specified with option -e"
" is not valid" % ev)
else:
mask = ALL_EVENTS
# stats
cb_fun = None
if options.stats:
def cb(s):
sys.stdout.write(repr(s.proc_fun()))
sys.stdout.write('\n')
sys.stdout.write(str(s.proc_fun()))
sys.stdout.write('\n')
sys.stdout.flush()
cb_fun = cb
# External command
if options.command:
def cb(s):
subprocess.Popen(options.command, shell=True)
cb_fun = cb
log.debug('Start monitoring %s, (press c^c to halt pyinotify)' % path)
wm.add_watch(path, mask, rec=options.recursive, auto_add=options.auto_add)
# Loop forever (until sigint signal get caught)
notifier.loop(callback=cb_fun)
if __name__ == '__main__':
command_line()
| true | true |
f72fec409082a747247e54f3160b84531dff3bf0 | 49 | py | Python | pydotted/__init__.py | aredden/pydotted | 62ad1d3eaccc65edc94b3cf4a0673ad089a29c6a | [
"MIT"
] | null | null | null | pydotted/__init__.py | aredden/pydotted | 62ad1d3eaccc65edc94b3cf4a0673ad089a29c6a | [
"MIT"
] | null | null | null | pydotted/__init__.py | aredden/pydotted | 62ad1d3eaccc65edc94b3cf4a0673ad089a29c6a | [
"MIT"
] | null | null | null | from .pydotted import pydot
__ALL__ = ["pydot"]
| 12.25 | 27 | 0.714286 | from .pydotted import pydot
__ALL__ = ["pydot"]
| true | true |
f72fed24b1aa083de6ed1211270c3ee51f07a93e | 5,502 | py | Python | custom_components/panasonic_smart_app/sensor.py | sugoi-wada/panasonic_smart_app | 78c3e377165b93c415108fa21137067585cfc72d | [
"MIT"
] | null | null | null | custom_components/panasonic_smart_app/sensor.py | sugoi-wada/panasonic_smart_app | 78c3e377165b93c415108fa21137067585cfc72d | [
"MIT"
] | null | null | null | custom_components/panasonic_smart_app/sensor.py | sugoi-wada/panasonic_smart_app | 78c3e377165b93c415108fa21137067585cfc72d | [
"MIT"
] | null | null | null | from datetime import datetime, timedelta
import logging
from homeassistant.components.sensor import SensorEntity
from homeassistant.const import (
STATE_UNAVAILABLE,
DEVICE_CLASS_HUMIDITY,
DEVICE_CLASS_TEMPERATURE,
DEVICE_CLASS_ENERGY,
DEVICE_CLASS_PM25,
TEMP_CELSIUS,
ENERGY_KILO_WATT_HOUR,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
PERCENTAGE,
)
from .entity import PanasonicBaseEntity
from .const import (
DOMAIN,
DEVICE_TYPE_DEHUMIDIFIER,
DEVICE_TYPE_AC,
DATA_CLIENT,
DATA_COORDINATOR,
LABEL_PM25,
LABEL_HUMIDITY,
LABEL_OUTDOOR_TEMPERATURE,
LABEL_ENERGY,
ICON_PM25,
ICON_THERMOMETER,
ICON_HUMIDITY,
ICON_ENERGY,
STATE_MEASUREMENT,
STATE_TOTAL_INCREASING,
)
_LOGGER = logging.getLogger(__package__)
async def async_setup_entry(hass, entry, async_add_entities) -> bool:
client = hass.data[DOMAIN][entry.entry_id][DATA_CLIENT]
coordinator = hass.data[DOMAIN][entry.entry_id][DATA_COORDINATOR]
devices = coordinator.data
sensors = []
for index, device in enumerate(devices):
device_type = int(device.get("DeviceType"))
sensors.append(
PanasonicEnergySensor(
coordinator,
index,
client,
device,
)
)
if device_type == DEVICE_TYPE_DEHUMIDIFIER:
sensors.append(
PanasonicHumiditySensor(
coordinator,
index,
client,
device,
)
)
sensors.append(
PanasonicPM25Sensor(
coordinator,
index,
client,
device,
)
)
if device_type == DEVICE_TYPE_AC:
sensors.append(
PanasonicOutdoorTemperatureSensor(
coordinator,
index,
client,
device,
)
)
async_add_entities(sensors, True)
return True
class PanasonicHumiditySensor(PanasonicBaseEntity, SensorEntity):
""" Panasonic dehumidifier current humidity sensor """
@property
def label(self):
return f"{self.nickname} {LABEL_HUMIDITY}"
@property
def icon(self) -> str:
return ICON_HUMIDITY
@property
def device_class(self) -> str:
return DEVICE_CLASS_HUMIDITY
@property
def state(self) -> int:
status = self.coordinator.data[self.index]["status"]
_current_humd = status.get("0x07", None)
_LOGGER.debug(f"[{self.label}] state: {_current_humd}")
return _current_humd if _current_humd else STATE_UNAVAILABLE
@property
def state_class(self) -> str:
return STATE_MEASUREMENT
@property
def unit_of_measurement(self) -> str:
return PERCENTAGE
class PanasonicPM25Sensor(PanasonicBaseEntity, SensorEntity):
""" Panasonic dehumidifer PM2.5 sensor """
@property
def label(self) -> str:
return f"{self.nickname} {LABEL_PM25}"
@property
def icon(self) -> str:
return ICON_PM25
@property
def device_class(self) -> str:
return DEVICE_CLASS_PM25
@property
def state(self) -> int:
status = self.coordinator.data[self.index]["status"]
_pm25 = float(status.get("0x53", -1))
_LOGGER.debug(f"[{self.label}] state: {_pm25}")
return _pm25 if _pm25 >= 0 else STATE_UNAVAILABLE
@property
def state_class(self) -> str:
return STATE_MEASUREMENT
@property
def unit_of_measurement(self) -> str:
return CONCENTRATION_MICROGRAMS_PER_CUBIC_METER
class PanasonicOutdoorTemperatureSensor(PanasonicBaseEntity, SensorEntity):
""" Panasonic AC outdoor temperature sensor """
@property
def label(self) -> str:
return f"{self.nickname} {LABEL_OUTDOOR_TEMPERATURE}"
@property
def icon(self) -> str:
return ICON_THERMOMETER
@property
def device_class(self) -> str:
return DEVICE_CLASS_TEMPERATURE
@property
def state(self) -> int:
status = self.coordinator.data[self.index]["status"]
_outdoor_temperature = float(status.get("0x21", -1))
_LOGGER.debug(f"[{self.label}] state: {_outdoor_temperature}")
return _outdoor_temperature if _outdoor_temperature >= 0 else STATE_UNAVAILABLE
@property
def state_class(self) -> str:
return STATE_MEASUREMENT
@property
def unit_of_measurement(self) -> str:
return TEMP_CELSIUS
class PanasonicEnergySensor(PanasonicBaseEntity, SensorEntity):
""" Panasonic energy sensor """
@property
def label(self) -> str:
return f"{self.nickname} {LABEL_ENERGY}"
@property
def icon(self) -> str:
return ICON_ENERGY
@property
def device_class(self) -> str:
return DEVICE_CLASS_ENERGY
@property
def last_reset(self):
return datetime.today().replace(day=1)
@property
def state(self) -> int:
energy = self.coordinator.data[self.index]["energy"]
_LOGGER.debug(f"[{self.label}] state: {energy}")
return energy if energy >= 0 else STATE_UNAVAILABLE
@property
def state_class(self) -> str:
return STATE_TOTAL_INCREASING
@property
def unit_of_measurement(self) -> str:
return ENERGY_KILO_WATT_HOUR
| 25.71028 | 87 | 0.62341 | from datetime import datetime, timedelta
import logging
from homeassistant.components.sensor import SensorEntity
from homeassistant.const import (
STATE_UNAVAILABLE,
DEVICE_CLASS_HUMIDITY,
DEVICE_CLASS_TEMPERATURE,
DEVICE_CLASS_ENERGY,
DEVICE_CLASS_PM25,
TEMP_CELSIUS,
ENERGY_KILO_WATT_HOUR,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
PERCENTAGE,
)
from .entity import PanasonicBaseEntity
from .const import (
DOMAIN,
DEVICE_TYPE_DEHUMIDIFIER,
DEVICE_TYPE_AC,
DATA_CLIENT,
DATA_COORDINATOR,
LABEL_PM25,
LABEL_HUMIDITY,
LABEL_OUTDOOR_TEMPERATURE,
LABEL_ENERGY,
ICON_PM25,
ICON_THERMOMETER,
ICON_HUMIDITY,
ICON_ENERGY,
STATE_MEASUREMENT,
STATE_TOTAL_INCREASING,
)
_LOGGER = logging.getLogger(__package__)
async def async_setup_entry(hass, entry, async_add_entities) -> bool:
client = hass.data[DOMAIN][entry.entry_id][DATA_CLIENT]
coordinator = hass.data[DOMAIN][entry.entry_id][DATA_COORDINATOR]
devices = coordinator.data
sensors = []
for index, device in enumerate(devices):
device_type = int(device.get("DeviceType"))
sensors.append(
PanasonicEnergySensor(
coordinator,
index,
client,
device,
)
)
if device_type == DEVICE_TYPE_DEHUMIDIFIER:
sensors.append(
PanasonicHumiditySensor(
coordinator,
index,
client,
device,
)
)
sensors.append(
PanasonicPM25Sensor(
coordinator,
index,
client,
device,
)
)
if device_type == DEVICE_TYPE_AC:
sensors.append(
PanasonicOutdoorTemperatureSensor(
coordinator,
index,
client,
device,
)
)
async_add_entities(sensors, True)
return True
class PanasonicHumiditySensor(PanasonicBaseEntity, SensorEntity):
@property
def label(self):
return f"{self.nickname} {LABEL_HUMIDITY}"
@property
def icon(self) -> str:
return ICON_HUMIDITY
@property
def device_class(self) -> str:
return DEVICE_CLASS_HUMIDITY
@property
def state(self) -> int:
status = self.coordinator.data[self.index]["status"]
_current_humd = status.get("0x07", None)
_LOGGER.debug(f"[{self.label}] state: {_current_humd}")
return _current_humd if _current_humd else STATE_UNAVAILABLE
@property
def state_class(self) -> str:
return STATE_MEASUREMENT
@property
def unit_of_measurement(self) -> str:
return PERCENTAGE
class PanasonicPM25Sensor(PanasonicBaseEntity, SensorEntity):
@property
def label(self) -> str:
return f"{self.nickname} {LABEL_PM25}"
@property
def icon(self) -> str:
return ICON_PM25
@property
def device_class(self) -> str:
return DEVICE_CLASS_PM25
@property
def state(self) -> int:
status = self.coordinator.data[self.index]["status"]
_pm25 = float(status.get("0x53", -1))
_LOGGER.debug(f"[{self.label}] state: {_pm25}")
return _pm25 if _pm25 >= 0 else STATE_UNAVAILABLE
@property
def state_class(self) -> str:
return STATE_MEASUREMENT
@property
def unit_of_measurement(self) -> str:
return CONCENTRATION_MICROGRAMS_PER_CUBIC_METER
class PanasonicOutdoorTemperatureSensor(PanasonicBaseEntity, SensorEntity):
@property
def label(self) -> str:
return f"{self.nickname} {LABEL_OUTDOOR_TEMPERATURE}"
@property
def icon(self) -> str:
return ICON_THERMOMETER
@property
def device_class(self) -> str:
return DEVICE_CLASS_TEMPERATURE
@property
def state(self) -> int:
status = self.coordinator.data[self.index]["status"]
_outdoor_temperature = float(status.get("0x21", -1))
_LOGGER.debug(f"[{self.label}] state: {_outdoor_temperature}")
return _outdoor_temperature if _outdoor_temperature >= 0 else STATE_UNAVAILABLE
@property
def state_class(self) -> str:
return STATE_MEASUREMENT
@property
def unit_of_measurement(self) -> str:
return TEMP_CELSIUS
class PanasonicEnergySensor(PanasonicBaseEntity, SensorEntity):
@property
def label(self) -> str:
return f"{self.nickname} {LABEL_ENERGY}"
@property
def icon(self) -> str:
return ICON_ENERGY
@property
def device_class(self) -> str:
return DEVICE_CLASS_ENERGY
@property
def last_reset(self):
return datetime.today().replace(day=1)
@property
def state(self) -> int:
energy = self.coordinator.data[self.index]["energy"]
_LOGGER.debug(f"[{self.label}] state: {energy}")
return energy if energy >= 0 else STATE_UNAVAILABLE
@property
def state_class(self) -> str:
return STATE_TOTAL_INCREASING
@property
def unit_of_measurement(self) -> str:
return ENERGY_KILO_WATT_HOUR
| true | true |
f72fed563a8c29934c97216b6cbba861286ec271 | 3,487 | py | Python | IPython/core/tests/test_prompts.py | flexlee/ipython | 7528fbd76073c90262b9ac127de57c4c59b23a5c | [
"BSD-3-Clause-Clear"
] | 1 | 2022-03-13T23:06:43.000Z | 2022-03-13T23:06:43.000Z | IPython/core/tests/test_prompts.py | andreasjansson/ipython | 09b4311726f46945b936c699f7a6489d74d7397f | [
"BSD-3-Clause-Clear"
] | null | null | null | IPython/core/tests/test_prompts.py | andreasjansson/ipython | 09b4311726f46945b936c699f7a6489d74d7397f | [
"BSD-3-Clause-Clear"
] | 1 | 2020-05-03T10:25:12.000Z | 2020-05-03T10:25:12.000Z | # -*- coding: utf-8
"""Tests for prompt generation."""
import unittest
import os
import nose.tools as nt
from IPython.testing import tools as tt, decorators as dec
from IPython.core.prompts import PromptManager, LazyEvaluate
from IPython.testing.globalipapp import get_ipython
from IPython.utils import py3compat
from IPython.utils.tempdir import TemporaryDirectory
ip = get_ipython()
class PromptTests(unittest.TestCase):
def setUp(self):
self.pm = PromptManager(shell=ip, config=ip.config)
def test_multiline_prompt(self):
self.pm.in_template = "[In]\n>>>"
self.pm.render('in')
self.assertEqual(self.pm.width, 3)
self.assertEqual(self.pm.txtwidth, 3)
self.pm.in_template = '[In]\n'
self.pm.render('in')
self.assertEqual(self.pm.width, 0)
self.assertEqual(self.pm.txtwidth, 0)
def test_translate_abbreviations(self):
def do_translate(template):
self.pm.in_template = template
return self.pm.templates['in']
pairs = [(r'%n>', '{color.number}{count}{color.prompt}>'),
(r'\T', '{time}'),
(r'\n', '\n')
]
tt.check_pairs(do_translate, pairs)
def test_user_ns(self):
self.pm.color_scheme = 'NoColor'
ip.ex("foo='bar'")
self.pm.in_template = "In [{foo}]"
prompt = self.pm.render('in')
self.assertEqual(prompt, u'In [bar]')
def test_builtins(self):
self.pm.color_scheme = 'NoColor'
self.pm.in_template = "In [{int}]"
prompt = self.pm.render('in')
self.assertEqual(prompt, u"In [%r]" % int)
def test_undefined(self):
self.pm.color_scheme = 'NoColor'
self.pm.in_template = "In [{foo_dne}]"
prompt = self.pm.render('in')
self.assertEqual(prompt, u"In [<ERROR: 'foo_dne' not found>]")
def test_render(self):
self.pm.in_template = r'\#>'
self.assertEqual(self.pm.render('in',color=False), '%d>' % ip.execution_count)
def test_render_unicode_cwd(self):
save = os.getcwdu()
with TemporaryDirectory(u'ünicødé') as td:
os.chdir(td)
self.pm.in_template = r'\w [\#]'
p = self.pm.render('in', color=False)
self.assertEqual(p, u"%s [%i]" % (os.getcwdu(), ip.execution_count))
os.chdir(save)
def test_lazy_eval_unicode(self):
u = u'ünicødé'
lz = LazyEvaluate(lambda : u)
# str(lz) would fail
self.assertEqual(unicode(lz), u)
self.assertEqual(format(lz), u)
def test_lazy_eval_nonascii_bytes(self):
u = u'ünicødé'
b = u.encode('utf8')
lz = LazyEvaluate(lambda : b)
# unicode(lz) would fail
self.assertEqual(str(lz), str(b))
self.assertEqual(format(lz), str(b))
def test_lazy_eval_float(self):
f = 0.503
lz = LazyEvaluate(lambda : f)
self.assertEqual(str(lz), str(f))
self.assertEqual(unicode(lz), unicode(f))
self.assertEqual(format(lz), str(f))
self.assertEqual(format(lz, '.1'), '0.5')
@dec.skip_win32
def test_cwd_x(self):
self.pm.in_template = r"\X0"
save = os.getcwdu()
os.chdir(os.path.expanduser('~'))
p = self.pm.render('in', color=False)
try:
self.assertEqual(p, '~')
finally:
os.chdir(save)
| 31.133929 | 86 | 0.578721 |
import unittest
import os
import nose.tools as nt
from IPython.testing import tools as tt, decorators as dec
from IPython.core.prompts import PromptManager, LazyEvaluate
from IPython.testing.globalipapp import get_ipython
from IPython.utils import py3compat
from IPython.utils.tempdir import TemporaryDirectory
ip = get_ipython()
class PromptTests(unittest.TestCase):
def setUp(self):
self.pm = PromptManager(shell=ip, config=ip.config)
def test_multiline_prompt(self):
self.pm.in_template = "[In]\n>>>"
self.pm.render('in')
self.assertEqual(self.pm.width, 3)
self.assertEqual(self.pm.txtwidth, 3)
self.pm.in_template = '[In]\n'
self.pm.render('in')
self.assertEqual(self.pm.width, 0)
self.assertEqual(self.pm.txtwidth, 0)
def test_translate_abbreviations(self):
def do_translate(template):
self.pm.in_template = template
return self.pm.templates['in']
pairs = [(r'%n>', '{color.number}{count}{color.prompt}>'),
(r'\T', '{time}'),
(r'\n', '\n')
]
tt.check_pairs(do_translate, pairs)
def test_user_ns(self):
self.pm.color_scheme = 'NoColor'
ip.ex("foo='bar'")
self.pm.in_template = "In [{foo}]"
prompt = self.pm.render('in')
self.assertEqual(prompt, u'In [bar]')
def test_builtins(self):
self.pm.color_scheme = 'NoColor'
self.pm.in_template = "In [{int}]"
prompt = self.pm.render('in')
self.assertEqual(prompt, u"In [%r]" % int)
def test_undefined(self):
self.pm.color_scheme = 'NoColor'
self.pm.in_template = "In [{foo_dne}]"
prompt = self.pm.render('in')
self.assertEqual(prompt, u"In [<ERROR: 'foo_dne' not found>]")
def test_render(self):
self.pm.in_template = r'\#>'
self.assertEqual(self.pm.render('in',color=False), '%d>' % ip.execution_count)
def test_render_unicode_cwd(self):
save = os.getcwdu()
with TemporaryDirectory(u'ünicødé') as td:
os.chdir(td)
self.pm.in_template = r'\w [\#]'
p = self.pm.render('in', color=False)
self.assertEqual(p, u"%s [%i]" % (os.getcwdu(), ip.execution_count))
os.chdir(save)
def test_lazy_eval_unicode(self):
u = u'ünicødé'
lz = LazyEvaluate(lambda : u)
self.assertEqual(unicode(lz), u)
self.assertEqual(format(lz), u)
def test_lazy_eval_nonascii_bytes(self):
u = u'ünicødé'
b = u.encode('utf8')
lz = LazyEvaluate(lambda : b)
self.assertEqual(str(lz), str(b))
self.assertEqual(format(lz), str(b))
def test_lazy_eval_float(self):
f = 0.503
lz = LazyEvaluate(lambda : f)
self.assertEqual(str(lz), str(f))
self.assertEqual(unicode(lz), unicode(f))
self.assertEqual(format(lz), str(f))
self.assertEqual(format(lz, '.1'), '0.5')
@dec.skip_win32
def test_cwd_x(self):
self.pm.in_template = r"\X0"
save = os.getcwdu()
os.chdir(os.path.expanduser('~'))
p = self.pm.render('in', color=False)
try:
self.assertEqual(p, '~')
finally:
os.chdir(save)
| true | true |
f72fed7319c1d66dcc65177c208b1a6671806efd | 4,361 | py | Python | var/spack/repos/builtin/packages/ginkgo/package.py | robertodr/spack | 9b809e01b47d48f01b3d257912fe1b752943cd3d | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 1 | 2020-09-02T11:55:57.000Z | 2020-09-02T11:55:57.000Z | var/spack/repos/builtin/packages/ginkgo/package.py | robertodr/spack | 9b809e01b47d48f01b3d257912fe1b752943cd3d | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null | var/spack/repos/builtin/packages/ginkgo/package.py | robertodr/spack | 9b809e01b47d48f01b3d257912fe1b752943cd3d | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 2 | 2020-01-10T18:54:54.000Z | 2021-07-03T22:57:16.000Z | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
import sys
class Ginkgo(CMakePackage, CudaPackage):
"""High-performance linear algebra library for manycore systems,
with a focus on sparse solution of linear systems."""
homepage = "https://ginkgo-project.github.io/"
git = "https://github.com/ginkgo-project/ginkgo.git"
maintainers = ['tcojean', 'hartwiganzt']
version('develop', branch='develop')
version('master', branch='master')
version('1.3.0', commit='4678668c66f634169def81620a85c9a20b7cec78') # v1.3.0
version('1.2.0', commit='b4be2be961fd5db45c3d02b5e004d73550722e31') # v1.2.0
version('1.1.1', commit='08d2c5200d3c78015ac8a4fd488bafe1e4240cf5') # v1.1.1
version('1.1.0', commit='b9bec8225442b3eb2a85a870efa112ab767a17fb') # v1.1.0
version('1.0.0', commit='45244641e0c2b19ba33aecd25153c0bddbcbe1a0') # v1.0.0
variant('shared', default=True, description='Build shared libraries')
variant('full_optimizations', default=False, description='Compile with all optimizations')
variant('openmp', default=sys.platform != 'darwin', description='Build with OpenMP')
variant('develtools', default=False, description='Compile with develtools enabled')
variant('build_type', default='Release',
description='The build type to build',
values=('Debug', 'Release'))
variant('hip', default=False, description='Compile Ginkgo with HIP support')
depends_on('cmake@3.9:', type='build')
depends_on('cuda@9:', when='+cuda')
depends_on('hip', when='+hip')
depends_on('hipsparse', type="link", when='+hip')
depends_on('hipblas', type="link", when='+hip')
depends_on('rocrand', type="link", when='@develop+hip')
depends_on('rocthrust', type="build", when='+hip')
# Somehow, these dependencies not propagated by the HIP stack?
depends_on('rocm-device-libs', type="link", when='+hip')
depends_on('comgr', type="link", when='+hip')
conflicts('%gcc@:5.2.9')
conflicts("+hip", when="@:1.1.1")
# The HIP packages from spack doen't seem to work well with CUDA
# backend for now, so disable HIP with CUDA backend.
conflicts("+cuda", when="+hip")
def cmake_args(self):
# Check that the have the correct C++ standard is available
if self.spec.satisfies('@:1.2.0'):
try:
self.compiler.cxx11_flag
except UnsupportedCompilerFlag:
InstallError('Ginkgo requires a C++11-compliant C++ compiler')
else:
try:
self.compiler.cxx14_flag
except UnsupportedCompilerFlag:
InstallError('Ginkgo requires a C++14-compliant C++ compiler')
spec = self.spec
args = [
'-DGINKGO_BUILD_CUDA=%s' % ('ON' if '+cuda' in spec else 'OFF'),
'-DGINKGO_BUILD_OMP=%s' % ('ON' if '+openmp' in spec else 'OFF'),
'-DBUILD_SHARED_LIBS=%s' % ('ON' if '+shared' in spec else 'OFF'),
'-DGINKGO_JACOBI_FULL_OPTIMIZATIONS=%s' % (
'ON' if '+full_optimizations' in spec else 'OFF'),
'-DGINKGO_DEVEL_TOOLS=%s' % (
'ON' if '+develtools' in spec else 'OFF'),
'-DGINKGO_BUILD_HIP=%s' % ('ON' if '+hip' in spec else 'OFF'),
# As we are not exposing benchmarks, examples, tests nor doc
# as part of the installation, disable building them altogether.
'-DGINKGO_BUILD_BENCHMARKS=OFF',
'-DGINKGO_BUILD_DOC=OFF',
'-DGINKGO_BUILD_EXAMPLES=OFF',
'-DGINKGO_BUILD_TESTS=OFF'
]
if '+hip' in spec:
args.append('-DHIP_PATH={0}'. format(spec['hip'].prefix))
args.append('-DHIP_CLANG_PATH={0}/bin'.
format(spec['llvm-amdgpu'].prefix))
args.append('-DHIP_CLANG_INCLUDE_PATH={0}/include'.
format(spec['llvm-amdgpu'].prefix))
args.append('-DHIPSPARSE_PATH={0}'.
format(spec['hipsparse'].prefix))
args.append('-DHIPBLAS_PATH={0}'.
format(spec['hipblas'].prefix))
return args
| 45.427083 | 94 | 0.61706 |
from spack import *
import sys
class Ginkgo(CMakePackage, CudaPackage):
homepage = "https://ginkgo-project.github.io/"
git = "https://github.com/ginkgo-project/ginkgo.git"
maintainers = ['tcojean', 'hartwiganzt']
version('develop', branch='develop')
version('master', branch='master')
version('1.3.0', commit='4678668c66f634169def81620a85c9a20b7cec78')
version('1.2.0', commit='b4be2be961fd5db45c3d02b5e004d73550722e31')
version('1.1.1', commit='08d2c5200d3c78015ac8a4fd488bafe1e4240cf5')
version('1.1.0', commit='b9bec8225442b3eb2a85a870efa112ab767a17fb')
version('1.0.0', commit='45244641e0c2b19ba33aecd25153c0bddbcbe1a0')
variant('shared', default=True, description='Build shared libraries')
variant('full_optimizations', default=False, description='Compile with all optimizations')
variant('openmp', default=sys.platform != 'darwin', description='Build with OpenMP')
variant('develtools', default=False, description='Compile with develtools enabled')
variant('build_type', default='Release',
description='The build type to build',
values=('Debug', 'Release'))
variant('hip', default=False, description='Compile Ginkgo with HIP support')
depends_on('cmake@3.9:', type='build')
depends_on('cuda@9:', when='+cuda')
depends_on('hip', when='+hip')
depends_on('hipsparse', type="link", when='+hip')
depends_on('hipblas', type="link", when='+hip')
depends_on('rocrand', type="link", when='@develop+hip')
depends_on('rocthrust', type="build", when='+hip')
depends_on('rocm-device-libs', type="link", when='+hip')
depends_on('comgr', type="link", when='+hip')
conflicts('%gcc@:5.2.9')
conflicts("+hip", when="@:1.1.1")
# backend for now, so disable HIP with CUDA backend.
conflicts("+cuda", when="+hip")
def cmake_args(self):
# Check that the have the correct C++ standard is available
if self.spec.satisfies('@:1.2.0'):
try:
self.compiler.cxx11_flag
except UnsupportedCompilerFlag:
InstallError('Ginkgo requires a C++11-compliant C++ compiler')
else:
try:
self.compiler.cxx14_flag
except UnsupportedCompilerFlag:
InstallError('Ginkgo requires a C++14-compliant C++ compiler')
spec = self.spec
args = [
'-DGINKGO_BUILD_CUDA=%s' % ('ON' if '+cuda' in spec else 'OFF'),
'-DGINKGO_BUILD_OMP=%s' % ('ON' if '+openmp' in spec else 'OFF'),
'-DBUILD_SHARED_LIBS=%s' % ('ON' if '+shared' in spec else 'OFF'),
'-DGINKGO_JACOBI_FULL_OPTIMIZATIONS=%s' % (
'ON' if '+full_optimizations' in spec else 'OFF'),
'-DGINKGO_DEVEL_TOOLS=%s' % (
'ON' if '+develtools' in spec else 'OFF'),
'-DGINKGO_BUILD_HIP=%s' % ('ON' if '+hip' in spec else 'OFF'),
# As we are not exposing benchmarks, examples, tests nor doc
# as part of the installation, disable building them altogether.
'-DGINKGO_BUILD_BENCHMARKS=OFF',
'-DGINKGO_BUILD_DOC=OFF',
'-DGINKGO_BUILD_EXAMPLES=OFF',
'-DGINKGO_BUILD_TESTS=OFF'
]
if '+hip' in spec:
args.append('-DHIP_PATH={0}'. format(spec['hip'].prefix))
args.append('-DHIP_CLANG_PATH={0}/bin'.
format(spec['llvm-amdgpu'].prefix))
args.append('-DHIP_CLANG_INCLUDE_PATH={0}/include'.
format(spec['llvm-amdgpu'].prefix))
args.append('-DHIPSPARSE_PATH={0}'.
format(spec['hipsparse'].prefix))
args.append('-DHIPBLAS_PATH={0}'.
format(spec['hipblas'].prefix))
return args
| true | true |
f72fedd3534283eb11dfd4a84eada7c236ead59a | 10,438 | py | Python | src/quart/wrappers/request.py | MarkoShiva/quart | f6709c6082a3cab9dffdcd937122f4d65a5990f7 | [
"MIT"
] | null | null | null | src/quart/wrappers/request.py | MarkoShiva/quart | f6709c6082a3cab9dffdcd937122f4d65a5990f7 | [
"MIT"
] | null | null | null | src/quart/wrappers/request.py | MarkoShiva/quart | f6709c6082a3cab9dffdcd937122f4d65a5990f7 | [
"MIT"
] | null | null | null | from __future__ import annotations
import asyncio
import io
from cgi import FieldStorage, parse_header
from typing import Any, AnyStr, Awaitable, Callable, Generator, Optional
from urllib.parse import parse_qs
from werkzeug.datastructures import CombinedMultiDict, Headers, MultiDict
from .base import BaseRequestWebsocket, JSONMixin
from ..datastructures import FileStorage
SERVER_PUSH_HEADERS_TO_COPY = {
"accept",
"accept-encoding",
"accept-language",
"cache-control",
"user-agent",
}
class Body:
"""A request body container.
The request body can either be iterated over and consumed in parts
(without building up memory usage) or awaited.
.. code-block:: python
async for data in body:
...
# or simply
complete = await body
Note: It is not possible to iterate over the data and then await
it.
"""
def __init__(
self, expected_content_length: Optional[int], max_content_length: Optional[int]
) -> None:
self._data = bytearray()
self._complete: asyncio.Event = asyncio.Event()
self._has_data: asyncio.Event = asyncio.Event()
self._max_content_length = max_content_length
# Exceptions must be raised within application (not ASGI)
# calls, this is achieved by having the ASGI methods set this
# to an exception on error.
self._must_raise: Optional[Exception] = None
if (
expected_content_length is not None
and max_content_length is not None
and expected_content_length > max_content_length
):
from ..exceptions import RequestEntityTooLarge # noqa Avoiding circular import
self._must_raise = RequestEntityTooLarge()
def __aiter__(self) -> "Body":
return self
async def __anext__(self) -> bytes:
if self._must_raise is not None:
raise self._must_raise
# if we got all of the data in the first shot, then self._complete is
# set and self._has_data will not get set again, so skip the await
# if we already have completed everything
if not self._complete.is_set():
await self._has_data.wait()
if self._complete.is_set() and len(self._data) == 0:
raise StopAsyncIteration()
data = bytes(self._data)
self._data.clear()
self._has_data.clear()
return data
def __await__(self) -> Generator[Any, None, Any]:
# Must check the _must_raise before and after waiting on the
# completion event as it may change whilst waiting and the
# event may not be set if there is already an issue.
if self._must_raise is not None:
raise self._must_raise
yield from self._complete.wait().__await__()
if self._must_raise is not None:
raise self._must_raise
return bytes(self._data)
def append(self, data: bytes) -> None:
if data == b"" or self._must_raise is not None:
return
self._data.extend(data)
self._has_data.set()
if self._max_content_length is not None and len(self._data) > self._max_content_length:
from ..exceptions import RequestEntityTooLarge # noqa Avoiding circular import
self._must_raise = RequestEntityTooLarge()
self.set_complete()
def set_complete(self) -> None:
self._complete.set()
self._has_data.set()
def set_result(self, data: bytes) -> None:
"""Convienience method, mainly for testing."""
self.append(data)
self.set_complete()
class Request(BaseRequestWebsocket, JSONMixin):
"""This class represents a request.
It can be subclassed and the subclassed used in preference by
replacing the :attr:`~quart.Quart.request_class` with your
subclass.
Attributes:
body_class: The class to store the body data within.
"""
body_class = Body
def __init__(
self,
method: str,
scheme: str,
path: str,
query_string: bytes,
headers: Headers,
root_path: str,
http_version: str,
scope: dict,
*,
max_content_length: Optional[int] = None,
body_timeout: Optional[int] = None,
send_push_promise: Callable[[str, Headers], Awaitable[None]],
) -> None:
"""Create a request object.
Arguments:
method: The HTTP verb.
scheme: The scheme used for the request.
path: The full unquoted path of the request.
query_string: The raw bytes for the query string part.
headers: The request headers.
root_path: The root path that should be prepended to all
routes.
http_version: The HTTP version of the request.
body: An awaitable future for the body data i.e.
``data = await body``
max_content_length: The maximum length in bytes of the
body (None implies no limit in Quart).
body_timeout: The maximum time (seconds) to wait for the
body before timing out.
send_push_promise: An awaitable to send a push promise based
off of this request (HTTP/2 feature).
scope: Underlying ASGI scope dictionary.
"""
super().__init__(
method, scheme, path, query_string, headers, root_path, http_version, scope
)
self.body_timeout = body_timeout
self.body = self.body_class(self.content_length, max_content_length)
self._form: Optional[MultiDict] = None
self._files: Optional[MultiDict] = None
self._send_push_promise = send_push_promise
async def get_data(self, raw: bool = True) -> AnyStr:
"""The request body data."""
try:
body_future = asyncio.ensure_future(self.body)
raw_data = await asyncio.wait_for(body_future, timeout=self.body_timeout)
except asyncio.TimeoutError:
body_future.cancel()
try:
await body_future
except asyncio.CancelledError:
pass
from ..exceptions import RequestTimeout # noqa Avoiding circular import
raise RequestTimeout()
if raw:
return raw_data
else:
return raw_data.decode(self.charset)
@property
async def data(self) -> bytes:
return await self.get_data()
@property
async def values(self) -> CombinedMultiDict:
form = await self.form
return CombinedMultiDict([self.args, form])
@property
async def form(self) -> MultiDict:
"""The parsed form encoded data.
Note file data is present in the :attr:`files`.
"""
if self._form is None:
await self._load_form_data()
return self._form
@property
async def files(self) -> MultiDict:
"""The parsed files.
This will return an empty multidict unless the request
mimetype was ``enctype="multipart/form-data"`` and the method
POST, PUT, or PATCH.
"""
if self._files is None:
await self._load_form_data()
return self._files
async def _load_form_data(self) -> None:
raw_data: bytes = await self.get_data(raw=True)
self._form = MultiDict()
self._files = MultiDict()
content_header = self.content_type
if content_header is None:
return
content_type, parameters = parse_header(content_header)
if content_type == "application/x-www-form-urlencoded":
try:
data = raw_data.decode(parameters.get("charset", "utf-8"))
except UnicodeDecodeError:
from ..exceptions import BadRequest # noqa Avoiding circular import
raise BadRequest()
for key, values in parse_qs(data, keep_blank_values=True).items():
for value in values:
self._form.add(key, value)
elif content_type == "multipart/form-data":
field_storage = FieldStorage(
io.BytesIO(raw_data),
headers={name.lower(): value for name, value in self.headers.items()},
environ={"REQUEST_METHOD": "POST"},
limit=len(raw_data),
)
for key in field_storage:
field_storage_key = field_storage[key]
if isinstance(field_storage_key, list):
for item in field_storage_key:
self._load_field_storage(key, item)
else:
self._load_field_storage(key, field_storage_key)
def _load_field_storage(self, key: str, field_storage: FieldStorage) -> None:
if isinstance(field_storage, FieldStorage) and field_storage.filename is not None:
self._files.add(
key,
FileStorage(
io.BytesIO(field_storage.file.read()),
field_storage.filename,
field_storage.name, # type: ignore
field_storage.type,
field_storage.headers, # type: ignore
),
)
else:
self._form.add(key, field_storage.value)
@property
def content_encoding(self) -> Optional[str]:
return self.headers.get("Content-Encoding")
@property
def content_length(self) -> Optional[int]:
if "Content-Length" in self.headers:
return int(self.headers["Content-Length"])
else:
return None
@property
def content_md5(self) -> Optional[str]:
return self.headers.get("Content-md5")
@property
def content_type(self) -> Optional[str]:
return self.headers.get("Content-Type")
async def _load_json_data(self) -> str:
"""Return the data after decoding."""
return await self.get_data(raw=False)
async def send_push_promise(self, path: str) -> None:
headers = Headers()
for name in SERVER_PUSH_HEADERS_TO_COPY:
for value in self.headers.getlist(name):
headers.add(name, value)
await self._send_push_promise(path, headers)
def __repr__(self) -> str:
return f"{self.__class__.__name__}({self.method}, {self.path})"
| 34.111111 | 95 | 0.610653 | from __future__ import annotations
import asyncio
import io
from cgi import FieldStorage, parse_header
from typing import Any, AnyStr, Awaitable, Callable, Generator, Optional
from urllib.parse import parse_qs
from werkzeug.datastructures import CombinedMultiDict, Headers, MultiDict
from .base import BaseRequestWebsocket, JSONMixin
from ..datastructures import FileStorage
SERVER_PUSH_HEADERS_TO_COPY = {
"accept",
"accept-encoding",
"accept-language",
"cache-control",
"user-agent",
}
class Body:
def __init__(
self, expected_content_length: Optional[int], max_content_length: Optional[int]
) -> None:
self._data = bytearray()
self._complete: asyncio.Event = asyncio.Event()
self._has_data: asyncio.Event = asyncio.Event()
self._max_content_length = max_content_length
self._must_raise: Optional[Exception] = None
if (
expected_content_length is not None
and max_content_length is not None
and expected_content_length > max_content_length
):
from ..exceptions import RequestEntityTooLarge
self._must_raise = RequestEntityTooLarge()
def __aiter__(self) -> "Body":
return self
async def __anext__(self) -> bytes:
if self._must_raise is not None:
raise self._must_raise
if not self._complete.is_set():
await self._has_data.wait()
if self._complete.is_set() and len(self._data) == 0:
raise StopAsyncIteration()
data = bytes(self._data)
self._data.clear()
self._has_data.clear()
return data
def __await__(self) -> Generator[Any, None, Any]:
if self._must_raise is not None:
raise self._must_raise
yield from self._complete.wait().__await__()
if self._must_raise is not None:
raise self._must_raise
return bytes(self._data)
def append(self, data: bytes) -> None:
if data == b"" or self._must_raise is not None:
return
self._data.extend(data)
self._has_data.set()
if self._max_content_length is not None and len(self._data) > self._max_content_length:
from ..exceptions import RequestEntityTooLarge
self._must_raise = RequestEntityTooLarge()
self.set_complete()
def set_complete(self) -> None:
self._complete.set()
self._has_data.set()
def set_result(self, data: bytes) -> None:
self.append(data)
self.set_complete()
class Request(BaseRequestWebsocket, JSONMixin):
body_class = Body
def __init__(
self,
method: str,
scheme: str,
path: str,
query_string: bytes,
headers: Headers,
root_path: str,
http_version: str,
scope: dict,
*,
max_content_length: Optional[int] = None,
body_timeout: Optional[int] = None,
send_push_promise: Callable[[str, Headers], Awaitable[None]],
) -> None:
super().__init__(
method, scheme, path, query_string, headers, root_path, http_version, scope
)
self.body_timeout = body_timeout
self.body = self.body_class(self.content_length, max_content_length)
self._form: Optional[MultiDict] = None
self._files: Optional[MultiDict] = None
self._send_push_promise = send_push_promise
async def get_data(self, raw: bool = True) -> AnyStr:
try:
body_future = asyncio.ensure_future(self.body)
raw_data = await asyncio.wait_for(body_future, timeout=self.body_timeout)
except asyncio.TimeoutError:
body_future.cancel()
try:
await body_future
except asyncio.CancelledError:
pass
from ..exceptions import RequestTimeout
raise RequestTimeout()
if raw:
return raw_data
else:
return raw_data.decode(self.charset)
@property
async def data(self) -> bytes:
return await self.get_data()
@property
async def values(self) -> CombinedMultiDict:
form = await self.form
return CombinedMultiDict([self.args, form])
@property
async def form(self) -> MultiDict:
if self._form is None:
await self._load_form_data()
return self._form
@property
async def files(self) -> MultiDict:
if self._files is None:
await self._load_form_data()
return self._files
async def _load_form_data(self) -> None:
raw_data: bytes = await self.get_data(raw=True)
self._form = MultiDict()
self._files = MultiDict()
content_header = self.content_type
if content_header is None:
return
content_type, parameters = parse_header(content_header)
if content_type == "application/x-www-form-urlencoded":
try:
data = raw_data.decode(parameters.get("charset", "utf-8"))
except UnicodeDecodeError:
from ..exceptions import BadRequest
raise BadRequest()
for key, values in parse_qs(data, keep_blank_values=True).items():
for value in values:
self._form.add(key, value)
elif content_type == "multipart/form-data":
field_storage = FieldStorage(
io.BytesIO(raw_data),
headers={name.lower(): value for name, value in self.headers.items()},
environ={"REQUEST_METHOD": "POST"},
limit=len(raw_data),
)
for key in field_storage:
field_storage_key = field_storage[key]
if isinstance(field_storage_key, list):
for item in field_storage_key:
self._load_field_storage(key, item)
else:
self._load_field_storage(key, field_storage_key)
def _load_field_storage(self, key: str, field_storage: FieldStorage) -> None:
if isinstance(field_storage, FieldStorage) and field_storage.filename is not None:
self._files.add(
key,
FileStorage(
io.BytesIO(field_storage.file.read()),
field_storage.filename,
field_storage.name,
field_storage.type,
field_storage.headers,
),
)
else:
self._form.add(key, field_storage.value)
@property
def content_encoding(self) -> Optional[str]:
return self.headers.get("Content-Encoding")
@property
def content_length(self) -> Optional[int]:
if "Content-Length" in self.headers:
return int(self.headers["Content-Length"])
else:
return None
@property
def content_md5(self) -> Optional[str]:
return self.headers.get("Content-md5")
@property
def content_type(self) -> Optional[str]:
return self.headers.get("Content-Type")
async def _load_json_data(self) -> str:
return await self.get_data(raw=False)
async def send_push_promise(self, path: str) -> None:
headers = Headers()
for name in SERVER_PUSH_HEADERS_TO_COPY:
for value in self.headers.getlist(name):
headers.add(name, value)
await self._send_push_promise(path, headers)
def __repr__(self) -> str:
return f"{self.__class__.__name__}({self.method}, {self.path})"
| true | true |
f72fee28d1d7a6de068ec92b5dd4448e2007bd1e | 7,158 | py | Python | sdk/python/pulumi_azure_native/avs/v20210101preview/get_workload_network_dns_service.py | pulumi-bot/pulumi-azure-native | f7b9490b5211544318e455e5cceafe47b628e12c | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/avs/v20210101preview/get_workload_network_dns_service.py | pulumi-bot/pulumi-azure-native | f7b9490b5211544318e455e5cceafe47b628e12c | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/avs/v20210101preview/get_workload_network_dns_service.py | pulumi-bot/pulumi-azure-native | f7b9490b5211544318e455e5cceafe47b628e12c | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
__all__ = [
'GetWorkloadNetworkDnsServiceResult',
'AwaitableGetWorkloadNetworkDnsServiceResult',
'get_workload_network_dns_service',
]
@pulumi.output_type
class GetWorkloadNetworkDnsServiceResult:
"""
NSX DNS Service
"""
def __init__(__self__, default_dns_zone=None, display_name=None, dns_service_ip=None, fqdn_zones=None, id=None, log_level=None, name=None, provisioning_state=None, revision=None, status=None, type=None):
if default_dns_zone and not isinstance(default_dns_zone, str):
raise TypeError("Expected argument 'default_dns_zone' to be a str")
pulumi.set(__self__, "default_dns_zone", default_dns_zone)
if display_name and not isinstance(display_name, str):
raise TypeError("Expected argument 'display_name' to be a str")
pulumi.set(__self__, "display_name", display_name)
if dns_service_ip and not isinstance(dns_service_ip, str):
raise TypeError("Expected argument 'dns_service_ip' to be a str")
pulumi.set(__self__, "dns_service_ip", dns_service_ip)
if fqdn_zones and not isinstance(fqdn_zones, list):
raise TypeError("Expected argument 'fqdn_zones' to be a list")
pulumi.set(__self__, "fqdn_zones", fqdn_zones)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if log_level and not isinstance(log_level, str):
raise TypeError("Expected argument 'log_level' to be a str")
pulumi.set(__self__, "log_level", log_level)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if provisioning_state and not isinstance(provisioning_state, str):
raise TypeError("Expected argument 'provisioning_state' to be a str")
pulumi.set(__self__, "provisioning_state", provisioning_state)
if revision and not isinstance(revision, float):
raise TypeError("Expected argument 'revision' to be a float")
pulumi.set(__self__, "revision", revision)
if status and not isinstance(status, str):
raise TypeError("Expected argument 'status' to be a str")
pulumi.set(__self__, "status", status)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="defaultDnsZone")
def default_dns_zone(self) -> Optional[str]:
"""
Default DNS zone of the DNS Service.
"""
return pulumi.get(self, "default_dns_zone")
@property
@pulumi.getter(name="displayName")
def display_name(self) -> Optional[str]:
"""
Display name of the DNS Service.
"""
return pulumi.get(self, "display_name")
@property
@pulumi.getter(name="dnsServiceIp")
def dns_service_ip(self) -> Optional[str]:
"""
DNS service IP of the DNS Service.
"""
return pulumi.get(self, "dns_service_ip")
@property
@pulumi.getter(name="fqdnZones")
def fqdn_zones(self) -> Optional[Sequence[str]]:
"""
FQDN zones of the DNS Service.
"""
return pulumi.get(self, "fqdn_zones")
@property
@pulumi.getter
def id(self) -> str:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="logLevel")
def log_level(self) -> Optional[str]:
"""
DNS Service log level.
"""
return pulumi.get(self, "log_level")
@property
@pulumi.getter
def name(self) -> str:
"""
Resource name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> str:
"""
The provisioning state
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter
def revision(self) -> Optional[float]:
"""
NSX revision number.
"""
return pulumi.get(self, "revision")
@property
@pulumi.getter
def status(self) -> str:
"""
DNS Service status.
"""
return pulumi.get(self, "status")
@property
@pulumi.getter
def type(self) -> str:
"""
Resource type.
"""
return pulumi.get(self, "type")
class AwaitableGetWorkloadNetworkDnsServiceResult(GetWorkloadNetworkDnsServiceResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetWorkloadNetworkDnsServiceResult(
default_dns_zone=self.default_dns_zone,
display_name=self.display_name,
dns_service_ip=self.dns_service_ip,
fqdn_zones=self.fqdn_zones,
id=self.id,
log_level=self.log_level,
name=self.name,
provisioning_state=self.provisioning_state,
revision=self.revision,
status=self.status,
type=self.type)
def get_workload_network_dns_service(dns_service_id: Optional[str] = None,
private_cloud_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetWorkloadNetworkDnsServiceResult:
"""
NSX DNS Service
:param str dns_service_id: NSX DNS Service identifier. Generally the same as the DNS Service's display name
:param str private_cloud_name: Name of the private cloud
:param str resource_group_name: The name of the resource group. The name is case insensitive.
"""
__args__ = dict()
__args__['dnsServiceId'] = dns_service_id
__args__['privateCloudName'] = private_cloud_name
__args__['resourceGroupName'] = resource_group_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:avs/v20210101preview:getWorkloadNetworkDnsService', __args__, opts=opts, typ=GetWorkloadNetworkDnsServiceResult).value
return AwaitableGetWorkloadNetworkDnsServiceResult(
default_dns_zone=__ret__.default_dns_zone,
display_name=__ret__.display_name,
dns_service_ip=__ret__.dns_service_ip,
fqdn_zones=__ret__.fqdn_zones,
id=__ret__.id,
log_level=__ret__.log_level,
name=__ret__.name,
provisioning_state=__ret__.provisioning_state,
revision=__ret__.revision,
status=__ret__.status,
type=__ret__.type)
| 35.969849 | 207 | 0.644035 |
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
__all__ = [
'GetWorkloadNetworkDnsServiceResult',
'AwaitableGetWorkloadNetworkDnsServiceResult',
'get_workload_network_dns_service',
]
@pulumi.output_type
class GetWorkloadNetworkDnsServiceResult:
def __init__(__self__, default_dns_zone=None, display_name=None, dns_service_ip=None, fqdn_zones=None, id=None, log_level=None, name=None, provisioning_state=None, revision=None, status=None, type=None):
if default_dns_zone and not isinstance(default_dns_zone, str):
raise TypeError("Expected argument 'default_dns_zone' to be a str")
pulumi.set(__self__, "default_dns_zone", default_dns_zone)
if display_name and not isinstance(display_name, str):
raise TypeError("Expected argument 'display_name' to be a str")
pulumi.set(__self__, "display_name", display_name)
if dns_service_ip and not isinstance(dns_service_ip, str):
raise TypeError("Expected argument 'dns_service_ip' to be a str")
pulumi.set(__self__, "dns_service_ip", dns_service_ip)
if fqdn_zones and not isinstance(fqdn_zones, list):
raise TypeError("Expected argument 'fqdn_zones' to be a list")
pulumi.set(__self__, "fqdn_zones", fqdn_zones)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if log_level and not isinstance(log_level, str):
raise TypeError("Expected argument 'log_level' to be a str")
pulumi.set(__self__, "log_level", log_level)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if provisioning_state and not isinstance(provisioning_state, str):
raise TypeError("Expected argument 'provisioning_state' to be a str")
pulumi.set(__self__, "provisioning_state", provisioning_state)
if revision and not isinstance(revision, float):
raise TypeError("Expected argument 'revision' to be a float")
pulumi.set(__self__, "revision", revision)
if status and not isinstance(status, str):
raise TypeError("Expected argument 'status' to be a str")
pulumi.set(__self__, "status", status)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="defaultDnsZone")
def default_dns_zone(self) -> Optional[str]:
return pulumi.get(self, "default_dns_zone")
@property
@pulumi.getter(name="displayName")
def display_name(self) -> Optional[str]:
return pulumi.get(self, "display_name")
@property
@pulumi.getter(name="dnsServiceIp")
def dns_service_ip(self) -> Optional[str]:
return pulumi.get(self, "dns_service_ip")
@property
@pulumi.getter(name="fqdnZones")
def fqdn_zones(self) -> Optional[Sequence[str]]:
return pulumi.get(self, "fqdn_zones")
@property
@pulumi.getter
def id(self) -> str:
return pulumi.get(self, "id")
@property
@pulumi.getter(name="logLevel")
def log_level(self) -> Optional[str]:
return pulumi.get(self, "log_level")
@property
@pulumi.getter
def name(self) -> str:
return pulumi.get(self, "name")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> str:
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter
def revision(self) -> Optional[float]:
return pulumi.get(self, "revision")
@property
@pulumi.getter
def status(self) -> str:
return pulumi.get(self, "status")
@property
@pulumi.getter
def type(self) -> str:
return pulumi.get(self, "type")
class AwaitableGetWorkloadNetworkDnsServiceResult(GetWorkloadNetworkDnsServiceResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetWorkloadNetworkDnsServiceResult(
default_dns_zone=self.default_dns_zone,
display_name=self.display_name,
dns_service_ip=self.dns_service_ip,
fqdn_zones=self.fqdn_zones,
id=self.id,
log_level=self.log_level,
name=self.name,
provisioning_state=self.provisioning_state,
revision=self.revision,
status=self.status,
type=self.type)
def get_workload_network_dns_service(dns_service_id: Optional[str] = None,
private_cloud_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetWorkloadNetworkDnsServiceResult:
__args__ = dict()
__args__['dnsServiceId'] = dns_service_id
__args__['privateCloudName'] = private_cloud_name
__args__['resourceGroupName'] = resource_group_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:avs/v20210101preview:getWorkloadNetworkDnsService', __args__, opts=opts, typ=GetWorkloadNetworkDnsServiceResult).value
return AwaitableGetWorkloadNetworkDnsServiceResult(
default_dns_zone=__ret__.default_dns_zone,
display_name=__ret__.display_name,
dns_service_ip=__ret__.dns_service_ip,
fqdn_zones=__ret__.fqdn_zones,
id=__ret__.id,
log_level=__ret__.log_level,
name=__ret__.name,
provisioning_state=__ret__.provisioning_state,
revision=__ret__.revision,
status=__ret__.status,
type=__ret__.type)
| true | true |
f72fee595b4703f699cfe5d567dfaf697a1d6207 | 828 | py | Python | pysm/preprocessing/museum_crm/x01_make_karma_sources.py | binh-vu/semantic-modeling | b387584502ba1daa6abd6b7573828416f6426b49 | [
"MIT"
] | 3 | 2019-10-31T15:26:20.000Z | 2022-03-03T06:04:03.000Z | pysm/preprocessing/museum_crm/x01_make_karma_sources.py | binh-vu/semantic-modeling | b387584502ba1daa6abd6b7573828416f6426b49 | [
"MIT"
] | 1 | 2021-10-05T14:57:29.000Z | 2022-03-27T01:58:41.000Z | pysm/preprocessing/museum_crm/x01_make_karma_sources.py | binh-vu/semantic-modeling | b387584502ba1daa6abd6b7573828416f6426b49 | [
"MIT"
] | null | null | null | #!/usr/bin/python
# -*- coding: utf-8 -*-
import ujson
from pathlib import Path
from typing import Dict, Tuple, List, Set, Union, Optional, Any
from semantic_modeling.config import config
from semantic_modeling.data_io import get_data_tables, get_raw_data_tables, get_semantic_models, get_ontology, \
get_sampled_data_tables
from semantic_modeling.utilities.serializable import serializeJSON
from transformation.r2rml.commands.modeling import SetInternalLinkCmd, SetSemanticTypeCmd
from transformation.r2rml.r2rml import R2RML
dataset = "museum_crm"
ont = get_ontology(dataset)
source_dir = Path(config.datasets[dataset].as_path()) / "karma-version" / "sources"
source_dir.mkdir(exist_ok=True, parents=True)
for tbl in get_sampled_data_tables(dataset):
serializeJSON(tbl.rows, source_dir / f"{tbl.id}.json", indent=4) | 41.4 | 112 | 0.805556 |
import ujson
from pathlib import Path
from typing import Dict, Tuple, List, Set, Union, Optional, Any
from semantic_modeling.config import config
from semantic_modeling.data_io import get_data_tables, get_raw_data_tables, get_semantic_models, get_ontology, \
get_sampled_data_tables
from semantic_modeling.utilities.serializable import serializeJSON
from transformation.r2rml.commands.modeling import SetInternalLinkCmd, SetSemanticTypeCmd
from transformation.r2rml.r2rml import R2RML
dataset = "museum_crm"
ont = get_ontology(dataset)
source_dir = Path(config.datasets[dataset].as_path()) / "karma-version" / "sources"
source_dir.mkdir(exist_ok=True, parents=True)
for tbl in get_sampled_data_tables(dataset):
serializeJSON(tbl.rows, source_dir / f"{tbl.id}.json", indent=4) | true | true |
f72fef007e9ec6112672dfd0e87b7ec609049c6a | 2,115 | py | Python | scrape_artists/artists.py | flannerykj/python_scrape | c5166431810432c24e04150eb305b3ec2a899a91 | [
"MIT"
] | null | null | null | scrape_artists/artists.py | flannerykj/python_scrape | c5166431810432c24e04150eb305b3ec2a899a91 | [
"MIT"
] | null | null | null | scrape_artists/artists.py | flannerykj/python_scrape | c5166431810432c24e04150eb305b3ec2a899a91 | [
"MIT"
] | null | null | null |
import csv
import requests
import socket
from bs4 import BeautifulSoup
import re
import json
def parse_artists():
artist_profiles = []
try:
url = 'http://wx.toronto.ca/inter/pmmd/streetart.nsf/artists?OpenView'
response = requests.get(url)
html = response.content
soup = BeautifulSoup(html)
link_list = soup.findAll('a', attrs={'class': 'viewa1'})
for item in link_list:
item_url = 'http://wx.toronto.ca'+item.get('href')
profile = get_profile_data(item_url)
artist_profiles.append(profile)
except Exception as e:
print (e.message)
return artist_profiles
def get_profile_data(url):
try:
response = requests.get(url)
html = response.content
soup = BeautifulSoup(html)
profile = soup.find('div', attrs={'id': 'profiledisplay'}).text
name = soup.findAll('legend')[0].text
email = re.search(r'[\w\.-]+@[\w\.-]+', profile).group().replace('Business', '')
website = re.search(r'Website: (.*?)[\n\r\s]+', profile).group().replace('Website: ', '')
bio = re.search(r'Profile\n(.*?)\n', profile).group().replace('Profile', '')
description = re.search(r'Business/Organization Description\n(.*?)\n', profile).group().replace('Business/Organization Description', '')
experience = re.search(r'Experience\n(.*?)\n', profile).group().replace('Experience', '')
return {
"name": name,
"email": email,
"website": website,
"bio": bio,
"description": description,
"experience": experience,
"dateJoined": "1508884475917",
"dateUpdated": "1508884475917"
}
return profile
except Exception as e:
print (e.message)
return
with open('artists.json', 'w') as outfile:
json.dump(parse_artists(), outfile)
'''artist_urls = get_artist_urls()
artist_array = compile_artist_profiles(artist_urls)
outfile = open("./toronto-artists.csv", "wb")
writer = csv.writer(outfile)
writer.writerows(recipe_array)'''
| 33.571429 | 144 | 0.605674 |
import csv
import requests
import socket
from bs4 import BeautifulSoup
import re
import json
def parse_artists():
artist_profiles = []
try:
url = 'http://wx.toronto.ca/inter/pmmd/streetart.nsf/artists?OpenView'
response = requests.get(url)
html = response.content
soup = BeautifulSoup(html)
link_list = soup.findAll('a', attrs={'class': 'viewa1'})
for item in link_list:
item_url = 'http://wx.toronto.ca'+item.get('href')
profile = get_profile_data(item_url)
artist_profiles.append(profile)
except Exception as e:
print (e.message)
return artist_profiles
def get_profile_data(url):
try:
response = requests.get(url)
html = response.content
soup = BeautifulSoup(html)
profile = soup.find('div', attrs={'id': 'profiledisplay'}).text
name = soup.findAll('legend')[0].text
email = re.search(r'[\w\.-]+@[\w\.-]+', profile).group().replace('Business', '')
website = re.search(r'Website: (.*?)[\n\r\s]+', profile).group().replace('Website: ', '')
bio = re.search(r'Profile\n(.*?)\n', profile).group().replace('Profile', '')
description = re.search(r'Business/Organization Description\n(.*?)\n', profile).group().replace('Business/Organization Description', '')
experience = re.search(r'Experience\n(.*?)\n', profile).group().replace('Experience', '')
return {
"name": name,
"email": email,
"website": website,
"bio": bio,
"description": description,
"experience": experience,
"dateJoined": "1508884475917",
"dateUpdated": "1508884475917"
}
return profile
except Exception as e:
print (e.message)
return
with open('artists.json', 'w') as outfile:
json.dump(parse_artists(), outfile)
| true | true |
f72fef0e4ab230a89d2f0b6d56c75cd135c69cf4 | 497 | py | Python | puzzles/day21/puzzle1.py | sbr075/advent2021 | e431b56d9ee9ef9ef02fb9f9cde276feefb78095 | [
"MIT"
] | 1 | 2021-12-03T23:13:36.000Z | 2021-12-03T23:13:36.000Z | puzzles/day21/puzzle1.py | sbr075/advent2021 | e431b56d9ee9ef9ef02fb9f9cde276feefb78095 | [
"MIT"
] | null | null | null | puzzles/day21/puzzle1.py | sbr075/advent2021 | e431b56d9ee9ef9ef02fb9f9cde276feefb78095 | [
"MIT"
] | null | null | null | def read_input():
with open("input.txt", "r") as file:
return [int(p[28:]) for p in file.read().splitlines()]
mod = lambda i,j: ((i-1) % j) + 1
def main():
pos = read_input()
s = [0,0]
for i in range(1,1000,3):
pos[(i-1)%2] += sum([mod(j,100) for j in range(i,i+3)])
pos[(i-1)%2] = mod(pos[(i-1)%2],10)
s[(i-1)%2] += pos[(i-1)%2]
if s[(i-1)%2] >= 1000: break
print(f"Part 1 {min(s)*(i+2)}")
if __name__ == "__main__":
main() | 26.157895 | 63 | 0.478873 | def read_input():
with open("input.txt", "r") as file:
return [int(p[28:]) for p in file.read().splitlines()]
mod = lambda i,j: ((i-1) % j) + 1
def main():
pos = read_input()
s = [0,0]
for i in range(1,1000,3):
pos[(i-1)%2] += sum([mod(j,100) for j in range(i,i+3)])
pos[(i-1)%2] = mod(pos[(i-1)%2],10)
s[(i-1)%2] += pos[(i-1)%2]
if s[(i-1)%2] >= 1000: break
print(f"Part 1 {min(s)*(i+2)}")
if __name__ == "__main__":
main() | true | true |
f72fefc517a309b1ebb05a09c441a25eb97845f7 | 654 | py | Python | sort/insertion_sort.py | vasili-byl/algorithms | 4e37609ab9b724e140cfec4b01495a0952d28724 | [
"MIT"
] | 1 | 2020-05-02T13:40:10.000Z | 2020-05-02T13:40:10.000Z | sort/insertion_sort.py | vasili-byl/algorithms | 4e37609ab9b724e140cfec4b01495a0952d28724 | [
"MIT"
] | null | null | null | sort/insertion_sort.py | vasili-byl/algorithms | 4e37609ab9b724e140cfec4b01495a0952d28724 | [
"MIT"
] | null | null | null | from sort.abstract_sort import Sort
class InsertionSort(Sort):
def __call__(self, array, left_bound=None, right_bound=None):
if left_bound is None:
left_bound = 0
if right_bound is None:
right_bound = len(array) - 1
for i in range(left_bound + 1, right_bound + 1):
pos = left_bound
for j in range(i - 1, left_bound - 1, -1):
if array[j] <= array[i]:
pos = j + 1
break
current = array[i]
for j in range(i - 1, pos - 1, -1):
array[j + 1] = array[j]
array[pos] = current
| 32.7 | 65 | 0.496942 | from sort.abstract_sort import Sort
class InsertionSort(Sort):
def __call__(self, array, left_bound=None, right_bound=None):
if left_bound is None:
left_bound = 0
if right_bound is None:
right_bound = len(array) - 1
for i in range(left_bound + 1, right_bound + 1):
pos = left_bound
for j in range(i - 1, left_bound - 1, -1):
if array[j] <= array[i]:
pos = j + 1
break
current = array[i]
for j in range(i - 1, pos - 1, -1):
array[j + 1] = array[j]
array[pos] = current
| true | true |
f72ff070a885f440110d03df8a65db80bf61a2f3 | 4,299 | py | Python | rllib/utils/torch_ops.py | acmore/ray | 9f0f54266064e203b0bdcc9d3fa947cb4518ebc0 | [
"Apache-2.0"
] | null | null | null | rllib/utils/torch_ops.py | acmore/ray | 9f0f54266064e203b0bdcc9d3fa947cb4518ebc0 | [
"Apache-2.0"
] | 1 | 2020-06-23T07:54:44.000Z | 2020-06-23T08:04:47.000Z | rllib/utils/torch_ops.py | acmore/ray | 9f0f54266064e203b0bdcc9d3fa947cb4518ebc0 | [
"Apache-2.0"
] | null | null | null | import numpy as np
from ray.rllib.utils import try_import_tree
from ray.rllib.utils.framework import try_import_torch
torch, _ = try_import_torch()
tree = try_import_tree()
def explained_variance(y, pred):
y_var = torch.var(y, dim=[0])
diff_var = torch.var(y - pred, dim=[0])
min_ = torch.Tensor([-1.0])
return torch.max(
min_.to(device=torch.device("cuda"))
if torch.cuda.is_available() else min_,
1 - (diff_var / y_var))
def global_norm(tensors):
"""Returns the global L2 norm over a list of tensors.
output = sqrt(SUM(t ** 2 for t in tensors)),
where SUM reduces over all tensors and over all elements in tensors.
Args:
tensors (List[torch.Tensor]): The list of tensors to calculate the
global norm over.
"""
# List of single tensors' L2 norms: SQRT(SUM(xi^2)) over all xi in tensor.
single_l2s = [
torch.pow(torch.sum(torch.pow(t, 2.0)), 0.5) for t in tensors
]
# Compute global norm from all single tensors' L2 norms.
return torch.pow(sum(torch.pow(l2, 2.0) for l2 in single_l2s), 0.5)
def huber_loss(x, delta=1.0):
"""Reference: https://en.wikipedia.org/wiki/Huber_loss"""
return torch.where(
torch.abs(x) < delta,
torch.pow(x, 2.0) * 0.5, delta * (torch.abs(x) - 0.5 * delta))
def l2_loss(x):
"""Computes half the L2 norm of a tensor without the sqrt.
output = sum(x ** 2) / 2
"""
return torch.sum(torch.pow(x, 2.0)) / 2.0
def reduce_mean_ignore_inf(x, axis):
"""Same as torch.mean() but ignores -inf values."""
mask = torch.ne(x, float("-inf"))
x_zeroed = torch.where(mask, x, torch.zeros_like(x))
return torch.sum(x_zeroed, axis) / torch.sum(mask.float(), axis)
def minimize_and_clip(optimizer, clip_val=10):
"""Clips gradients found in `optimizer.param_groups` to given value.
Ensures the norm of the gradients for each variable is clipped to
`clip_val`
"""
for param_group in optimizer.param_groups:
for p in param_group["params"]:
if p.grad is not None:
torch.nn.utils.clip_grad_norm_(p.grad, clip_val)
def sequence_mask(lengths, maxlen=None, dtype=None):
"""Offers same behavior as tf.sequence_mask for torch.
Thanks to Dimitris Papatheodorou
(https://discuss.pytorch.org/t/pytorch-equivalent-for-tf-sequence-mask/
39036).
"""
if maxlen is None:
maxlen = lengths.max()
mask = ~(torch.ones((len(lengths), maxlen)).to(
lengths.device).cumsum(dim=1).t() > lengths).t()
mask.type(dtype or torch.bool)
return mask
def convert_to_non_torch_type(stats):
"""Converts values in `stats` to non-Tensor numpy or python types.
Args:
stats (any): Any (possibly nested) struct, the values in which will be
converted and returned as a new struct with all torch tensors
being converted to numpy types.
Returns:
Any: A new struct with the same structure as `stats`, but with all
values converted to non-torch Tensor types.
"""
# The mapping function used to numpyize torch Tensors.
def mapping(item):
if isinstance(item, torch.Tensor):
return item.cpu().item() if len(item.size()) == 0 else \
item.cpu().detach().numpy()
else:
return item
return tree.map_structure(mapping, stats)
def convert_to_torch_tensor(stats, device=None):
"""Converts any struct to torch.Tensors.
stats (any): Any (possibly nested) struct, the values in which will be
converted and returned as a new struct with all leaves converted
to torch tensors.
Returns:
Any: A new struct with the same structure as `stats`, but with all
values converted to torch Tensor types.
"""
def mapping(item):
if torch.is_tensor(item):
return item if device is None else item.to(device)
tensor = torch.from_numpy(np.asarray(item))
# Floatify all float64 tensors.
if tensor.dtype == torch.double:
tensor = tensor.float()
return tensor if device is None else tensor.to(device)
return tree.map_structure(mapping, stats)
def atanh(x):
return 0.5 * torch.log((1 + x) / (1 - x))
| 30.928058 | 78 | 0.640381 | import numpy as np
from ray.rllib.utils import try_import_tree
from ray.rllib.utils.framework import try_import_torch
torch, _ = try_import_torch()
tree = try_import_tree()
def explained_variance(y, pred):
y_var = torch.var(y, dim=[0])
diff_var = torch.var(y - pred, dim=[0])
min_ = torch.Tensor([-1.0])
return torch.max(
min_.to(device=torch.device("cuda"))
if torch.cuda.is_available() else min_,
1 - (diff_var / y_var))
def global_norm(tensors):
single_l2s = [
torch.pow(torch.sum(torch.pow(t, 2.0)), 0.5) for t in tensors
]
# Compute global norm from all single tensors' L2 norms.
return torch.pow(sum(torch.pow(l2, 2.0) for l2 in single_l2s), 0.5)
def huber_loss(x, delta=1.0):
return torch.where(
torch.abs(x) < delta,
torch.pow(x, 2.0) * 0.5, delta * (torch.abs(x) - 0.5 * delta))
def l2_loss(x):
return torch.sum(torch.pow(x, 2.0)) / 2.0
def reduce_mean_ignore_inf(x, axis):
mask = torch.ne(x, float("-inf"))
x_zeroed = torch.where(mask, x, torch.zeros_like(x))
return torch.sum(x_zeroed, axis) / torch.sum(mask.float(), axis)
def minimize_and_clip(optimizer, clip_val=10):
for param_group in optimizer.param_groups:
for p in param_group["params"]:
if p.grad is not None:
torch.nn.utils.clip_grad_norm_(p.grad, clip_val)
def sequence_mask(lengths, maxlen=None, dtype=None):
if maxlen is None:
maxlen = lengths.max()
mask = ~(torch.ones((len(lengths), maxlen)).to(
lengths.device).cumsum(dim=1).t() > lengths).t()
mask.type(dtype or torch.bool)
return mask
def convert_to_non_torch_type(stats):
def mapping(item):
if isinstance(item, torch.Tensor):
return item.cpu().item() if len(item.size()) == 0 else \
item.cpu().detach().numpy()
else:
return item
return tree.map_structure(mapping, stats)
def convert_to_torch_tensor(stats, device=None):
def mapping(item):
if torch.is_tensor(item):
return item if device is None else item.to(device)
tensor = torch.from_numpy(np.asarray(item))
if tensor.dtype == torch.double:
tensor = tensor.float()
return tensor if device is None else tensor.to(device)
return tree.map_structure(mapping, stats)
def atanh(x):
return 0.5 * torch.log((1 + x) / (1 - x))
| true | true |
f72ff1c4d7592842535f6a31fa135b7e0705f968 | 1,651 | py | Python | spotify_tracker/watcher_client.py | eriktaubeneck/spotifytracker | c0f7f1a418aae9184cb1d2d27835495f261027ce | [
"MIT"
] | null | null | null | spotify_tracker/watcher_client.py | eriktaubeneck/spotifytracker | c0f7f1a418aae9184cb1d2d27835495f261027ce | [
"MIT"
] | null | null | null | spotify_tracker/watcher_client.py | eriktaubeneck/spotifytracker | c0f7f1a418aae9184cb1d2d27835495f261027ce | [
"MIT"
] | null | null | null | import time
import logging
from .spotify_client import SpotifyPlaylistClient
from . import config
logger = logging.getLogger(name='spotify_tracker')
class SpotifyWatcherClient(SpotifyPlaylistClient):
def __init__(self):
self.playlist_id = config.get_config_value('watcher_playlist_id')
self.last_track_id = None
return super().__init__()
def setup_playlist_id(self):
print("You need to add a playlist_id to your config to save "
"song history to.")
sp_playlists = self.sp.user_playlists(self.username)
playlists = [p for p in sp_playlists['items']
if p['owner']['id'] == self.username]
for playlist in playlists:
print('{}: {}'.format(playlist['name'], playlist['id']))
playlist_id = input("Please input the playlist_id of the Playlist "
"you'd like to save your history to: ")
config.save_config_value('watcher_playlist_id', playlist_id)
def main(self):
track_id = self.get_current_track_id()
if not track_id or track_id == self.last_track_id:
return
logger.info('Currently listening to {}'.format(
self.get_track_name_and_artist_string(track_id)
))
self.add_track_to_playlist(track_id)
self.last_track_id = track_id
def watch(self):
if not self.check_config():
raise Exception("Please run setupwatcher command.")
logger.debug('Starting watch loop')
while True:
logger.debug('New watch lap completed.')
self.safe_main()
time.sleep(5)
| 34.395833 | 75 | 0.634161 | import time
import logging
from .spotify_client import SpotifyPlaylistClient
from . import config
logger = logging.getLogger(name='spotify_tracker')
class SpotifyWatcherClient(SpotifyPlaylistClient):
def __init__(self):
self.playlist_id = config.get_config_value('watcher_playlist_id')
self.last_track_id = None
return super().__init__()
def setup_playlist_id(self):
print("You need to add a playlist_id to your config to save "
"song history to.")
sp_playlists = self.sp.user_playlists(self.username)
playlists = [p for p in sp_playlists['items']
if p['owner']['id'] == self.username]
for playlist in playlists:
print('{}: {}'.format(playlist['name'], playlist['id']))
playlist_id = input("Please input the playlist_id of the Playlist "
"you'd like to save your history to: ")
config.save_config_value('watcher_playlist_id', playlist_id)
def main(self):
track_id = self.get_current_track_id()
if not track_id or track_id == self.last_track_id:
return
logger.info('Currently listening to {}'.format(
self.get_track_name_and_artist_string(track_id)
))
self.add_track_to_playlist(track_id)
self.last_track_id = track_id
def watch(self):
if not self.check_config():
raise Exception("Please run setupwatcher command.")
logger.debug('Starting watch loop')
while True:
logger.debug('New watch lap completed.')
self.safe_main()
time.sleep(5)
| true | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.