hexsha stringlengths 40 40 | size int64 4 996k | ext stringclasses 8
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 245 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 245 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 245 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 4 996k | avg_line_length float64 1.33 58.2k | max_line_length int64 2 323k | alphanum_fraction float64 0 0.97 | content_no_comment stringlengths 0 946k | is_comment_constant_removed bool 2
classes | is_sharp_comment_removed bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f7fdd9a08c27c767cdf64427dfbfed5aeb57825d | 1,992 | py | Python | ooobuild/lo/animations/x_animate_color.py | Amourspirit/ooo_uno_tmpl | 64e0c86fd68f24794acc22d63d8d32ae05dd12b8 | [
"Apache-2.0"
] | null | null | null | ooobuild/lo/animations/x_animate_color.py | Amourspirit/ooo_uno_tmpl | 64e0c86fd68f24794acc22d63d8d32ae05dd12b8 | [
"Apache-2.0"
] | null | null | null | ooobuild/lo/animations/x_animate_color.py | Amourspirit/ooo_uno_tmpl | 64e0c86fd68f24794acc22d63d8d32ae05dd12b8 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
#
# Copyright 2022 :Barry-Thomas-Paul: Moss
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http: // www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Interface Class
# this is a auto generated file generated by Cheetah
# Libre Office Version: 7.3
# Namespace: com.sun.star.animations
from abc import abstractproperty
from .x_animate import XAnimate as XAnimate_ca680c52
class XAnimateColor(XAnimate_ca680c52):
"""
Interface for animation by defining color changes over time.
Only color value will be legal values for the following members
See Also:
`API XAnimateColor <https://api.libreoffice.org/docs/idl/ref/interfacecom_1_1sun_1_1star_1_1animations_1_1XAnimateColor.html>`_
"""
__ooo_ns__: str = 'com.sun.star.animations'
__ooo_full_ns__: str = 'com.sun.star.animations.XAnimateColor'
__ooo_type_name__: str = 'interface'
__pyunointerface__: str = 'com.sun.star.animations.XAnimateColor'
@abstractproperty
def ColorInterpolation(self) -> int:
"""
defines the color space which is used to perform the interpolation.
"""
@abstractproperty
def Direction(self) -> bool:
"""
defines the direction which is used to perform the interpolation inside the color space defined with ColorInterpolation.
Values could be TRUE for clockwise and FALSE for counterclockwise.
This attribute will be ignored for color spaces where this does not make any sense.
"""
__all__ = ['XAnimateColor']
| 34.947368 | 135 | 0.725402 |
from abc import abstractproperty
from .x_animate import XAnimate as XAnimate_ca680c52
class XAnimateColor(XAnimate_ca680c52):
__ooo_ns__: str = 'com.sun.star.animations'
__ooo_full_ns__: str = 'com.sun.star.animations.XAnimateColor'
__ooo_type_name__: str = 'interface'
__pyunointerface__: str = 'com.sun.star.animations.XAnimateColor'
@abstractproperty
def ColorInterpolation(self) -> int:
@abstractproperty
def Direction(self) -> bool:
__all__ = ['XAnimateColor']
| true | true |
f7fdda0ce6d96a5cb012c54ea8d48ad17684fa3b | 3,356 | py | Python | src/assignments/assignment3.py | acc-cosc-1336/cosc-1336-spring-2018-EricScotty | 80c0249a583dc178cfc7bb95b851d7f3240dc3e9 | [
"MIT"
] | null | null | null | src/assignments/assignment3.py | acc-cosc-1336/cosc-1336-spring-2018-EricScotty | 80c0249a583dc178cfc7bb95b851d7f3240dc3e9 | [
"MIT"
] | null | null | null | src/assignments/assignment3.py | acc-cosc-1336/cosc-1336-spring-2018-EricScotty | 80c0249a583dc178cfc7bb95b851d7f3240dc3e9 | [
"MIT"
] | null | null | null |
def decimal_to_binary(number):
'''
YOU MUST USE A WHILE LOOP
Given a number return its binary value in 8 bits
:param number: A whole number from 0-255
:return: a byte (8 bits) binary
TO GET CREDIT, YOU MUST WRITE CODE FOR FOLLOWING PSEUDOCODE Algorithm
Create a binary variable set it to ''
Set power variable to 0 and Iterate from 7-0 (use for or while loop)
in each iteration create/assign a value variable and assign it the result of 2 to the power variable
(example first loop is 2 to the power of 7 thus value is 128)
When number greater or equal to value append a '1' to the binary value and subtract value from number
otherwise append a '0' to the binary value
return binary value
WRITE YOUR CODE AFTER THE THREE QUOTES BELOW
'''
binary = ''
for int in range (7,-1,-1):
value = 2 ** int
if value <= number:
binary += '1'
number = number - value
else:
binary+='0'
return binary
def sum_square_of_number(number):
'''
USE A FOR LOOP
Given a number return the sum of all squared number from 1 to the number
Example given number 3 returns 14.
Number Square
1 1
2 4
3 9
SUM: 14<-- return this value
:param number:
:return: the sum of all squares from 1 to the number
WRITE YOUR CODE AFTER THE THREE QUOTES BELOW
'''
sum_of_squares = 0
for num in range (1, number+1):
square = num ** 2
sum_of_squares = sum_of_squares + square
return sum_of_squares
def is_prime(n):
'''
USE A FOR LOOP
Given a number return true if prime or false if not prime
:param number: Any whole number
:return: True if prime False if not
PSEUDOCODE
if number equal 1 return false
otherwise if number equal 2 return True
otherwise iterate from 2 to the number itself(create a new variable current_number assign it value of 2)
if the number divided by current_number remainder is 0 return False HINT: remainder operator
increment the value of x
After loop exits return True
TYPE YOUR CODE AFTER THE THREE QUOTES BELOW
DON'T FORGET RETURN STATEMENT AT THE END OF THE FUNCTION
'''
if n == 1:
return False
elif n == 2:
return True
else:
for i in range (2, n):
if (n % i)== 0:
return False
return True
def list_of_primes(n):
'''
USE A WHILE LOOP
Given a number returns all the prime numbers up to the number
Example given number 10 returns '2,3,5,7,'
:param n:
:return:
Psuedocode:
Create a new variable names primes and assign it value ''
loop from 1 to the value of n create a variable current_number and assign it value of 1
in the loop call is_prime function with an argument of current_number
if the return value of is_prime is True append current_number to the primes string HINT: Concatenate string
After loop exits return primes variable
WRITE YOUR CODE AFTER THE THREE QUOTES BELOW
'''
primes = ''
for i in range (1, n):
if is_prime (i) == True:
primes = primes +str(i)+ ','
return primes
| 31.074074 | 112 | 0.625447 |
def decimal_to_binary(number):
binary = ''
for int in range (7,-1,-1):
value = 2 ** int
if value <= number:
binary += '1'
number = number - value
else:
binary+='0'
return binary
def sum_square_of_number(number):
sum_of_squares = 0
for num in range (1, number+1):
square = num ** 2
sum_of_squares = sum_of_squares + square
return sum_of_squares
def is_prime(n):
if n == 1:
return False
elif n == 2:
return True
else:
for i in range (2, n):
if (n % i)== 0:
return False
return True
def list_of_primes(n):
primes = ''
for i in range (1, n):
if is_prime (i) == True:
primes = primes +str(i)+ ','
return primes
| true | true |
f7fddc74e3bd0da3b983ba2082a158b251918951 | 485 | py | Python | src/Classes/MSDS400/Module 8/orchids.py | bmoretz/Python-Playground | a367ec7659b85c24363c21b5c0ac25db08ffa1f6 | [
"MIT"
] | null | null | null | src/Classes/MSDS400/Module 8/orchids.py | bmoretz/Python-Playground | a367ec7659b85c24363c21b5c0ac25db08ffa1f6 | [
"MIT"
] | null | null | null | src/Classes/MSDS400/Module 8/orchids.py | bmoretz/Python-Playground | a367ec7659b85c24363c21b5c0ac25db08ffa1f6 | [
"MIT"
] | null | null | null | # 7 orchids from a collection of 20 are to be selected for a flower show. Complete parts (a) and (b) below.
import math
from functools import reduce
from operator import mul # or mul=lambda x,y:x*y
from fractions import Fraction
def nCk(n,k):
return int( reduce(mul, ( Fraction( n - i, i+ 1) for i in range( k ) ), 1) )
# In how many ways can this be done?
nCk( 20, 7 )
# How many ways can the 7 be selected if 2 special plants from the 20 must be included?
nCk( 18, 5 ) | 32.333333 | 110 | 0.686598 |
import math
from functools import reduce
from operator import mul
from fractions import Fraction
def nCk(n,k):
return int( reduce(mul, ( Fraction( n - i, i+ 1) for i in range( k ) ), 1) )
nCk( 20, 7 )
nCk( 18, 5 ) | true | true |
f7fddc833d74a103b60b8dc2feb0f9793a54c4c5 | 12,036 | py | Python | tests/test_crystal.py | dquigley-warwick/matador | 729e97efb0865c4fff50af87555730ff4b7b6d91 | [
"MIT"
] | null | null | null | tests/test_crystal.py | dquigley-warwick/matador | 729e97efb0865c4fff50af87555730ff4b7b6d91 | [
"MIT"
] | null | null | null | tests/test_crystal.py | dquigley-warwick/matador | 729e97efb0865c4fff50af87555730ff4b7b6d91 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# standard library
import unittest
import copy
from os.path import realpath
import numpy as np
# matador modules
from matador.crystal.crystal import Crystal, UnitCell
from matador.crystal.crystal_site import Site
from matador.scrapers.castep_scrapers import castep2dict, res2dict
from matador.utils.cell_utils import frac2cart
from matador.scrapers.magres_scrapers import magres2dict
# grab abs path for accessing test data
REAL_PATH = "/".join(realpath(__file__).split("/")[:-1]) + "/"
try:
import networkx # noqa
imported_networkx = True
except ImportError:
imported_networkx = False
imported_vornet = False
class UnitCellTest(unittest.TestCase):
def test_cart_init(self):
lattice_cart = [[3, 0, 0], [0, 3, 0], [0, 0, 3]]
lat_tup = tuple(tuple(vec) for vec in lattice_cart)
cell = UnitCell(lattice_cart)
self.assertEqual(cell.lattice_cart, lat_tup)
self.assertEqual(cell.lattice_abc, ((3, 3, 3), (90, 90, 90)))
self.assertEqual(cell.volume, 27)
self.assertEqual(cell.lengths, (3, 3, 3))
self.assertEqual(cell.angles, (90, 90, 90))
lattice_cart = np.asarray([[3, 0, 0], [0, 3, 0], [0, 0, 3]])
cell_2 = UnitCell(lattice_cart)
self.assertAlmostEqual(cell_2.lattice_cart, lat_tup)
self.assertAlmostEqual(cell_2.lattice_abc, ((3, 3, 3), (90, 90, 90)))
self.assertEqual(cell_2.volume, 27)
self.assertAlmostEqual(cell_2.lengths, (3, 3, 3))
self.assertAlmostEqual(cell_2.angles, (90, 90, 90))
self.assertEqual(cell.lattice_cart, lat_tup)
self.assertEqual(cell.lattice_abc, ((3, 3, 3), (90, 90, 90)))
self.assertEqual(cell.volume, 27)
self.assertEqual(cell.lengths, (3, 3, 3))
lattice_cart = [[10, 0, 0], [0, 10, 0], [0, 0, 10]]
cell.lattice_cart = lattice_cart
lat_tup = tuple(tuple(vec) for vec in lattice_cart)
self.assertEqual(cell.lattice_cart, lat_tup)
lattice_cart = "aadsfadsf"
self.assertEqual(cell.lattice_cart, lat_tup)
self.assertEqual(cell.lattice_abc, ((10, 10, 10), (90, 90, 90)))
self.assertEqual(cell.volume, 1000)
def test_abc_init(self):
lattice_abc = [[2, 3, 4], [60, 60, 60]]
lat_tup = tuple(tuple(elem) for elem in lattice_abc)
cell = UnitCell(lattice_abc)
self.assertAlmostEqual(cell.lattice_abc, lat_tup)
cell.lengths = [10, 10, 10]
self.assertEqual(cell.lattice_abc, ((10, 10, 10), (60, 60, 60)))
cell.angles = [90, 90, 90]
self.assertEqual(cell.lattice_abc, ((10, 10, 10), (90, 90, 90)))
lattice_cart = ((10, 0, 0), (0, 10, 0), (0, 0, 10))
self.assertEqual(cell.lattice_cart, lattice_cart)
class CrystalTest(unittest.TestCase):
def test_getters_setters(self):
doc, s = castep2dict(REAL_PATH + "data/Na3Zn4-swap-ReOs-OQMD_759599.castep")
crystal = Crystal(doc)
self.assertEqual(
list(crystal.lattice_cart[0]), [9.0397727, 0.0081202, 0.0000000]
)
self.assertEqual(crystal.num_atoms, 14)
with self.assertRaises(AttributeError):
crystal["positions_frac"] = [[0, 1, 2]]
# check we can set fields to the same value
crystal["new_field"] = [1, 2, 3]
crystal["new_field"] = [1, 2, 3]
crystal["new_field_2"] = np.nan
crystal["new_field_2"] = np.nan
crystal["new_field_3"] = [1, 2, 4]
with self.assertRaises(AttributeError):
crystal["new_field_3"] = [1, 2, 5]
crystal["new_field_4"] = [1, 2, np.nan]
crystal["new_field_4"] = [1, 2, np.nan]
crystal["new_field_5"] = [1, np.nan, 2]
with self.assertRaises(AttributeError):
crystal["new_field_5"] = [1, 2, np.nan]
crystal["new_field_6"] = np.linspace(0, 1, 1000).tolist()
crystal["new_field_6"] = np.array(crystal["new_field_6"], copy=True).tolist()
def test_set_positions(self):
doc, s = castep2dict(REAL_PATH + "data/Na3Zn4-swap-ReOs-OQMD_759599.castep")
doc = Crystal(doc)
copydoc = copy.deepcopy(doc)
old_pos = np.asarray(doc.positions_frac)
copydoc.set_positions(np.zeros_like(old_pos), fractional=True)
np.testing.assert_array_almost_equal(
np.asarray(copydoc.positions_frac), np.zeros_like(old_pos)
)
np.testing.assert_array_almost_equal(
np.asarray(copydoc.positions_abs), np.zeros_like(old_pos)
)
self.assertNotAlmostEqual(doc.positions_frac[-1][0], 0.0)
def test_convert_positions(self):
doc = res2dict(REAL_PATH + "data/structures/Li7Sn-Fmmm.res")[0]
crystal = res2dict(REAL_PATH + "data/structures/Li7Sn-Fmmm.res", as_model=True)[
0
]
doc["positions_abs"] = frac2cart(doc["lattice_cart"], doc["positions_frac"])
np.testing.assert_array_almost_equal(doc["positions_abs"], crystal.positions_abs)
for ind, site in enumerate(crystal):
np.testing.assert_array_almost_equal(doc["positions_abs"][ind], site.coords_cartesian)
crystal.cell.lengths = np.asarray(crystal.cell.lengths) * 10
rescaled_pos = frac2cart(np.asarray(doc["lattice_cart"]) * 10, doc["positions_frac"])
for ind, site in enumerate(crystal):
np.testing.assert_array_almost_equal(doc["positions_frac"][ind], site.coords)
np.testing.assert_array_almost_equal(rescaled_pos[ind], site.coords_cartesian)
def test_minimal_init(self):
doc = Crystal(
dict(
lattice_abc=np.asarray([[3, 3, 3], [90, 90, 90]]),
atom_types=["Na", "Cl"],
positions_frac=[[0, 0, 0], [0.5, 0.5, 0.5]],
)
)
self.assertEqual(doc.stoichiometry, [["Cl", 1.0], ["Na", 1.0]])
self.assertEqual(doc.lattice_abc, ((3.0, 3.0, 3.0), (90.0, 90.0, 90.0)))
self.assertEqual(
doc.lattice_cart, ((3.0, 0.0, 0.0), (0.0, 3.0, 0.0), (0.0, 0.0, 3.0))
)
self.assertEqual(len(doc.sites), 2)
self.assertEqual(doc.num_atoms, 2)
self.assertEqual(doc.concentration, [0.5, 0.5])
self.assertEqual(doc.positions_abs, [[0, 0, 0], [1.5, 1.5, 1.5]])
self.assertEqual(doc.positions_frac, [[0, 0, 0], [0.5, 0.5, 0.5]])
self.assertEqual(doc.formula, "NaCl")
self.assertEqual(doc.cell_volume, 27.0)
self.assertEqual(doc.space_group, "Pm-3m")
self.assertEqual(doc.space_group_tex, "$Pm\\bar{3}m$")
doc = Crystal(
dict(
lattice_cart=((3.0, 0.0, 0.0), (0.0, 3.0, 0.0), (0.0, 0.0, 3.0)),
atom_types=["Na", "Cl"],
positions_abs=[[0, 0, 0], [1.5, 1.5, 1.5]],
)
)
self.assertEqual(doc.lattice_abc, ((3.0, 3.0, 3.0), (90.0, 90.0, 90.0)))
self.assertEqual(
doc.lattice_cart, ((3.0, 0.0, 0.0), (0.0, 3.0, 0.0), (0.0, 0.0, 3.0))
)
self.assertEqual(doc.stoichiometry, [["Cl", 1.0], ["Na", 1.0]])
self.assertEqual(len(doc.sites), 2)
self.assertEqual(doc.num_atoms, 2)
self.assertEqual(doc.concentration, [0.5, 0.5])
self.assertEqual(doc.positions_abs, [[0.0, 0.0, 0.0], [1.5, 1.5, 1.5]])
self.assertEqual(doc.positions_frac, [[0, 0, 0], [0.5, 0.5, 0.5]])
self.assertEqual(doc.formula, "NaCl")
self.assertEqual(doc.cell_volume, 27.0)
self.assertEqual(doc.space_group, "Pm-3m")
def testSites(self):
doc, s = castep2dict(REAL_PATH + "data/Na3Zn4-swap-ReOs-OQMD_759599.castep")
del doc["lattice_cart"]
crystal = Crystal(doc)
np.testing.assert_array_almost_equal(crystal[0].coords, [0.776467, 0.466319, 0.0])
with self.assertRaises(RuntimeError):
crystal[0].set_position([0.5, 0.6, 0.7, 0.8], "fractional")
with self.assertRaises(RuntimeError):
crystal[0].set_position([[1, 2, 3], [4, 5, 6], [7, 8, 9]], "fractional")
self.assertEqual(
[atom for atom in crystal], [atom[1] for atom in enumerate(crystal)]
)
atom = Site(
species="Cl",
position=[0.2, 0.5, 0.2],
lattice=[[10, 0, 0], [0, 10, 0], [0, 0, 10]],
)
atom2 = copy.deepcopy(atom)
atom2.species = "Br"
self.assertEqual(atom.species, "Cl")
self.assertEqual(atom2.species, "Br")
atom2.set_position([1.2, -0.5, 0.2], "fractional")
np.testing.assert_array_almost_equal(
atom2.displacement_between_sites(atom), [0.0, 0.0, 0.0], decimal=10
)
self.assertAlmostEqual(atom2.distance_between_sites(atom), 0.0, places=10)
atom2.set_position([1.3, -0.5, 0.2], "fractional")
np.testing.assert_array_almost_equal(
atom2.displacement_between_sites(atom), [1.0, 0.0, 0.0], decimal=10
)
self.assertAlmostEqual(atom2.distance_between_sites(atom), 1.0, places=10)
atom2.set_position([1.3, -0.5, 0.3], "fractional")
np.testing.assert_array_almost_equal(
atom2.displacement_between_sites(atom), [1.0, 0.0, 1.0], decimal=10
)
self.assertAlmostEqual(
atom2.distance_between_sites(atom), np.sqrt(2), places=10
)
def testSpg(self):
doc, s = castep2dict(REAL_PATH + "data/Na3Zn4-swap-ReOs-OQMD_759599.castep")
crystal = Crystal(doc)
print(crystal.get_space_group(symprec=0.01))
print(crystal.get_space_group(symprec=0.001))
self.assertEqual(crystal.get_space_group(symprec=0.0000001), "Pm")
def testFromMagres(self):
doc, s = magres2dict(REAL_PATH + "data/magres_files/NaP_QE6.magres")
crystal = Crystal(doc)
for atom in crystal:
print(atom, atom["chemical_shielding_iso"], atom["chemical_shift_asymmetry"])
@unittest.skipIf(not imported_vornet, "Voronoi code not found in this distribution")
def testCoordination(self):
doc, s = magres2dict(REAL_PATH + "data/magres_files/NaP_QE6.magres")
crystal = Crystal(doc, voronoi=True)
for atom in crystal:
print(atom, atom.coordination)
print(crystal.coordination_lists)
print(crystal.coordination_stats)
@unittest.skipIf(not imported_vornet, "Voronoi code not found in this distribution")
def testVoronoi(self):
doc, s = magres2dict(REAL_PATH + "data/magres_files/NaP_QE6.magres")
crystal = Crystal(doc)
print(crystal.unique_sites)
@unittest.skipIf(not imported_networkx, "NetworkX missing")
def testBondLengths(self):
doc, s = magres2dict(REAL_PATH + "data/magres_files/NaP_QE6.magres")
crystal = Crystal(doc)
print(crystal.bond_lengths)
@unittest.skipIf(not imported_networkx, "NetworkX missing")
def testBondStats(self):
doc, s = magres2dict(REAL_PATH + "data/magres_files/NaP_QE6.magres")
crystal = Crystal(doc)
print(crystal.bonding_stats)
class ElasticCrystalTest(unittest.TestCase):
""" Test the elastic functionality of the Crystal module. """
def testKBulkModulus(self):
from matador.crystal.elastic import get_equation_of_state
results = get_equation_of_state(
REAL_PATH + "/data/bulk_modulus/K-bulk_modulus", plot=False
)
self.assertTrue("eos" in results)
self.assertEqual(len(results["eos"]), 3)
self.assertAlmostEqual(results["eos"][0].bulk_modulus, 3.696117355)
self.assertAlmostEqual(results["eos"][1].bulk_modulus, 3.699072676)
self.assertAlmostEqual(results["eos"][2].bulk_modulus, 3.691406442)
self.assertAlmostEqual(results["eos"][0].bulk_modulus_err, 3e-6, places=1)
self.assertAlmostEqual(results["eos"][1].bulk_modulus_err, 2e-6, places=1)
self.assertAlmostEqual(results["eos"][2].bulk_modulus_err, 2e-6, places=1)
if __name__ == "__main__":
unittest.main(buffer=False, verbosity=2)
| 41.219178 | 98 | 0.622383 |
import unittest
import copy
from os.path import realpath
import numpy as np
from matador.crystal.crystal import Crystal, UnitCell
from matador.crystal.crystal_site import Site
from matador.scrapers.castep_scrapers import castep2dict, res2dict
from matador.utils.cell_utils import frac2cart
from matador.scrapers.magres_scrapers import magres2dict
REAL_PATH = "/".join(realpath(__file__).split("/")[:-1]) + "/"
try:
import networkx
imported_networkx = True
except ImportError:
imported_networkx = False
imported_vornet = False
class UnitCellTest(unittest.TestCase):
def test_cart_init(self):
lattice_cart = [[3, 0, 0], [0, 3, 0], [0, 0, 3]]
lat_tup = tuple(tuple(vec) for vec in lattice_cart)
cell = UnitCell(lattice_cart)
self.assertEqual(cell.lattice_cart, lat_tup)
self.assertEqual(cell.lattice_abc, ((3, 3, 3), (90, 90, 90)))
self.assertEqual(cell.volume, 27)
self.assertEqual(cell.lengths, (3, 3, 3))
self.assertEqual(cell.angles, (90, 90, 90))
lattice_cart = np.asarray([[3, 0, 0], [0, 3, 0], [0, 0, 3]])
cell_2 = UnitCell(lattice_cart)
self.assertAlmostEqual(cell_2.lattice_cart, lat_tup)
self.assertAlmostEqual(cell_2.lattice_abc, ((3, 3, 3), (90, 90, 90)))
self.assertEqual(cell_2.volume, 27)
self.assertAlmostEqual(cell_2.lengths, (3, 3, 3))
self.assertAlmostEqual(cell_2.angles, (90, 90, 90))
self.assertEqual(cell.lattice_cart, lat_tup)
self.assertEqual(cell.lattice_abc, ((3, 3, 3), (90, 90, 90)))
self.assertEqual(cell.volume, 27)
self.assertEqual(cell.lengths, (3, 3, 3))
lattice_cart = [[10, 0, 0], [0, 10, 0], [0, 0, 10]]
cell.lattice_cart = lattice_cart
lat_tup = tuple(tuple(vec) for vec in lattice_cart)
self.assertEqual(cell.lattice_cart, lat_tup)
lattice_cart = "aadsfadsf"
self.assertEqual(cell.lattice_cart, lat_tup)
self.assertEqual(cell.lattice_abc, ((10, 10, 10), (90, 90, 90)))
self.assertEqual(cell.volume, 1000)
def test_abc_init(self):
lattice_abc = [[2, 3, 4], [60, 60, 60]]
lat_tup = tuple(tuple(elem) for elem in lattice_abc)
cell = UnitCell(lattice_abc)
self.assertAlmostEqual(cell.lattice_abc, lat_tup)
cell.lengths = [10, 10, 10]
self.assertEqual(cell.lattice_abc, ((10, 10, 10), (60, 60, 60)))
cell.angles = [90, 90, 90]
self.assertEqual(cell.lattice_abc, ((10, 10, 10), (90, 90, 90)))
lattice_cart = ((10, 0, 0), (0, 10, 0), (0, 0, 10))
self.assertEqual(cell.lattice_cart, lattice_cart)
class CrystalTest(unittest.TestCase):
def test_getters_setters(self):
doc, s = castep2dict(REAL_PATH + "data/Na3Zn4-swap-ReOs-OQMD_759599.castep")
crystal = Crystal(doc)
self.assertEqual(
list(crystal.lattice_cart[0]), [9.0397727, 0.0081202, 0.0000000]
)
self.assertEqual(crystal.num_atoms, 14)
with self.assertRaises(AttributeError):
crystal["positions_frac"] = [[0, 1, 2]]
crystal["new_field"] = [1, 2, 3]
crystal["new_field"] = [1, 2, 3]
crystal["new_field_2"] = np.nan
crystal["new_field_2"] = np.nan
crystal["new_field_3"] = [1, 2, 4]
with self.assertRaises(AttributeError):
crystal["new_field_3"] = [1, 2, 5]
crystal["new_field_4"] = [1, 2, np.nan]
crystal["new_field_4"] = [1, 2, np.nan]
crystal["new_field_5"] = [1, np.nan, 2]
with self.assertRaises(AttributeError):
crystal["new_field_5"] = [1, 2, np.nan]
crystal["new_field_6"] = np.linspace(0, 1, 1000).tolist()
crystal["new_field_6"] = np.array(crystal["new_field_6"], copy=True).tolist()
def test_set_positions(self):
doc, s = castep2dict(REAL_PATH + "data/Na3Zn4-swap-ReOs-OQMD_759599.castep")
doc = Crystal(doc)
copydoc = copy.deepcopy(doc)
old_pos = np.asarray(doc.positions_frac)
copydoc.set_positions(np.zeros_like(old_pos), fractional=True)
np.testing.assert_array_almost_equal(
np.asarray(copydoc.positions_frac), np.zeros_like(old_pos)
)
np.testing.assert_array_almost_equal(
np.asarray(copydoc.positions_abs), np.zeros_like(old_pos)
)
self.assertNotAlmostEqual(doc.positions_frac[-1][0], 0.0)
def test_convert_positions(self):
doc = res2dict(REAL_PATH + "data/structures/Li7Sn-Fmmm.res")[0]
crystal = res2dict(REAL_PATH + "data/structures/Li7Sn-Fmmm.res", as_model=True)[
0
]
doc["positions_abs"] = frac2cart(doc["lattice_cart"], doc["positions_frac"])
np.testing.assert_array_almost_equal(doc["positions_abs"], crystal.positions_abs)
for ind, site in enumerate(crystal):
np.testing.assert_array_almost_equal(doc["positions_abs"][ind], site.coords_cartesian)
crystal.cell.lengths = np.asarray(crystal.cell.lengths) * 10
rescaled_pos = frac2cart(np.asarray(doc["lattice_cart"]) * 10, doc["positions_frac"])
for ind, site in enumerate(crystal):
np.testing.assert_array_almost_equal(doc["positions_frac"][ind], site.coords)
np.testing.assert_array_almost_equal(rescaled_pos[ind], site.coords_cartesian)
def test_minimal_init(self):
doc = Crystal(
dict(
lattice_abc=np.asarray([[3, 3, 3], [90, 90, 90]]),
atom_types=["Na", "Cl"],
positions_frac=[[0, 0, 0], [0.5, 0.5, 0.5]],
)
)
self.assertEqual(doc.stoichiometry, [["Cl", 1.0], ["Na", 1.0]])
self.assertEqual(doc.lattice_abc, ((3.0, 3.0, 3.0), (90.0, 90.0, 90.0)))
self.assertEqual(
doc.lattice_cart, ((3.0, 0.0, 0.0), (0.0, 3.0, 0.0), (0.0, 0.0, 3.0))
)
self.assertEqual(len(doc.sites), 2)
self.assertEqual(doc.num_atoms, 2)
self.assertEqual(doc.concentration, [0.5, 0.5])
self.assertEqual(doc.positions_abs, [[0, 0, 0], [1.5, 1.5, 1.5]])
self.assertEqual(doc.positions_frac, [[0, 0, 0], [0.5, 0.5, 0.5]])
self.assertEqual(doc.formula, "NaCl")
self.assertEqual(doc.cell_volume, 27.0)
self.assertEqual(doc.space_group, "Pm-3m")
self.assertEqual(doc.space_group_tex, "$Pm\\bar{3}m$")
doc = Crystal(
dict(
lattice_cart=((3.0, 0.0, 0.0), (0.0, 3.0, 0.0), (0.0, 0.0, 3.0)),
atom_types=["Na", "Cl"],
positions_abs=[[0, 0, 0], [1.5, 1.5, 1.5]],
)
)
self.assertEqual(doc.lattice_abc, ((3.0, 3.0, 3.0), (90.0, 90.0, 90.0)))
self.assertEqual(
doc.lattice_cart, ((3.0, 0.0, 0.0), (0.0, 3.0, 0.0), (0.0, 0.0, 3.0))
)
self.assertEqual(doc.stoichiometry, [["Cl", 1.0], ["Na", 1.0]])
self.assertEqual(len(doc.sites), 2)
self.assertEqual(doc.num_atoms, 2)
self.assertEqual(doc.concentration, [0.5, 0.5])
self.assertEqual(doc.positions_abs, [[0.0, 0.0, 0.0], [1.5, 1.5, 1.5]])
self.assertEqual(doc.positions_frac, [[0, 0, 0], [0.5, 0.5, 0.5]])
self.assertEqual(doc.formula, "NaCl")
self.assertEqual(doc.cell_volume, 27.0)
self.assertEqual(doc.space_group, "Pm-3m")
def testSites(self):
doc, s = castep2dict(REAL_PATH + "data/Na3Zn4-swap-ReOs-OQMD_759599.castep")
del doc["lattice_cart"]
crystal = Crystal(doc)
np.testing.assert_array_almost_equal(crystal[0].coords, [0.776467, 0.466319, 0.0])
with self.assertRaises(RuntimeError):
crystal[0].set_position([0.5, 0.6, 0.7, 0.8], "fractional")
with self.assertRaises(RuntimeError):
crystal[0].set_position([[1, 2, 3], [4, 5, 6], [7, 8, 9]], "fractional")
self.assertEqual(
[atom for atom in crystal], [atom[1] for atom in enumerate(crystal)]
)
atom = Site(
species="Cl",
position=[0.2, 0.5, 0.2],
lattice=[[10, 0, 0], [0, 10, 0], [0, 0, 10]],
)
atom2 = copy.deepcopy(atom)
atom2.species = "Br"
self.assertEqual(atom.species, "Cl")
self.assertEqual(atom2.species, "Br")
atom2.set_position([1.2, -0.5, 0.2], "fractional")
np.testing.assert_array_almost_equal(
atom2.displacement_between_sites(atom), [0.0, 0.0, 0.0], decimal=10
)
self.assertAlmostEqual(atom2.distance_between_sites(atom), 0.0, places=10)
atom2.set_position([1.3, -0.5, 0.2], "fractional")
np.testing.assert_array_almost_equal(
atom2.displacement_between_sites(atom), [1.0, 0.0, 0.0], decimal=10
)
self.assertAlmostEqual(atom2.distance_between_sites(atom), 1.0, places=10)
atom2.set_position([1.3, -0.5, 0.3], "fractional")
np.testing.assert_array_almost_equal(
atom2.displacement_between_sites(atom), [1.0, 0.0, 1.0], decimal=10
)
self.assertAlmostEqual(
atom2.distance_between_sites(atom), np.sqrt(2), places=10
)
def testSpg(self):
doc, s = castep2dict(REAL_PATH + "data/Na3Zn4-swap-ReOs-OQMD_759599.castep")
crystal = Crystal(doc)
print(crystal.get_space_group(symprec=0.01))
print(crystal.get_space_group(symprec=0.001))
self.assertEqual(crystal.get_space_group(symprec=0.0000001), "Pm")
def testFromMagres(self):
doc, s = magres2dict(REAL_PATH + "data/magres_files/NaP_QE6.magres")
crystal = Crystal(doc)
for atom in crystal:
print(atom, atom["chemical_shielding_iso"], atom["chemical_shift_asymmetry"])
@unittest.skipIf(not imported_vornet, "Voronoi code not found in this distribution")
def testCoordination(self):
doc, s = magres2dict(REAL_PATH + "data/magres_files/NaP_QE6.magres")
crystal = Crystal(doc, voronoi=True)
for atom in crystal:
print(atom, atom.coordination)
print(crystal.coordination_lists)
print(crystal.coordination_stats)
@unittest.skipIf(not imported_vornet, "Voronoi code not found in this distribution")
def testVoronoi(self):
doc, s = magres2dict(REAL_PATH + "data/magres_files/NaP_QE6.magres")
crystal = Crystal(doc)
print(crystal.unique_sites)
@unittest.skipIf(not imported_networkx, "NetworkX missing")
def testBondLengths(self):
doc, s = magres2dict(REAL_PATH + "data/magres_files/NaP_QE6.magres")
crystal = Crystal(doc)
print(crystal.bond_lengths)
@unittest.skipIf(not imported_networkx, "NetworkX missing")
def testBondStats(self):
doc, s = magres2dict(REAL_PATH + "data/magres_files/NaP_QE6.magres")
crystal = Crystal(doc)
print(crystal.bonding_stats)
class ElasticCrystalTest(unittest.TestCase):
def testKBulkModulus(self):
from matador.crystal.elastic import get_equation_of_state
results = get_equation_of_state(
REAL_PATH + "/data/bulk_modulus/K-bulk_modulus", plot=False
)
self.assertTrue("eos" in results)
self.assertEqual(len(results["eos"]), 3)
self.assertAlmostEqual(results["eos"][0].bulk_modulus, 3.696117355)
self.assertAlmostEqual(results["eos"][1].bulk_modulus, 3.699072676)
self.assertAlmostEqual(results["eos"][2].bulk_modulus, 3.691406442)
self.assertAlmostEqual(results["eos"][0].bulk_modulus_err, 3e-6, places=1)
self.assertAlmostEqual(results["eos"][1].bulk_modulus_err, 2e-6, places=1)
self.assertAlmostEqual(results["eos"][2].bulk_modulus_err, 2e-6, places=1)
if __name__ == "__main__":
unittest.main(buffer=False, verbosity=2)
| true | true |
f7fddd06e2471b72240c6432c772b4245f286880 | 4,658 | py | Python | test.py | chrisqqq123/FA-Dist-EfficientNet | cb788b0f212d568d9bf04a51516d79fed5383585 | [
"MIT"
] | 1 | 2022-03-09T02:24:22.000Z | 2022-03-09T02:24:22.000Z | test.py | chrisqqq123/FA-Dist-EfficientNet | cb788b0f212d568d9bf04a51516d79fed5383585 | [
"MIT"
] | null | null | null | test.py | chrisqqq123/FA-Dist-EfficientNet | cb788b0f212d568d9bf04a51516d79fed5383585 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
"""Script to test a pytorch model on Cifar100's validation set."""
import argparse
import logging
import pprint
import sys
import time
import torch
from torch import nn
from models import model_factory
import opts
import utils
import mul_cifar100
def parse_args(argv):
"""Parse arguments @argv and return the flags needed for training."""
parser = argparse.ArgumentParser(description=__doc__, allow_abbrev=False)
group = parser.add_argument_group('General Options')
opts.add_general_flags(group)
group = parser.add_argument_group('Dataset Options')
opts.add_dataset_flags(group)
group = parser.add_argument_group('Model Options')
opts.add_model_flags(group)
args = parser.parse_args(argv)
if args.model_state_file is None:
parser.error("You should set --model-state-file to reload a model "
"state.")
return args
def test_for_one_epoch(model, loss, test_loader, epoch_number):
model.eval()
loss.eval()
data_time_meter = utils.AverageMeter()
batch_time_meter = utils.AverageMeter()
loss_meter = utils.AverageMeter(recent=100)
top1_meter = utils.AverageMeter(recent=100)
top5_meter = utils.AverageMeter(recent=100)
timestamp = time.time()
for i, (images, labels) in enumerate(test_loader):
batch_size = images.size(0)
if utils.is_model_cuda(model):
images = images.cuda()
labels = labels.cuda()
# Record data time
data_time_meter.update(time.time() - timestamp)
# Forward pass without computing gradients.
with torch.no_grad():
outputs = model(images)
loss_output = loss(outputs, labels)
# Sometimes loss function returns a modified version of the output,
# which must be used to compute the model accuracy.
if isinstance(loss_output, tuple):
loss_value, outputs = loss_output
else:
loss_value = loss_output
# Record loss and model accuracy.
loss_meter.update(loss_value.item(), batch_size)
top1, top5 = utils.topk_accuracy(outputs, labels, recalls=(1, 5))
top1_meter.update(top1, batch_size)
top5_meter.update(top5, batch_size)
# Record batch time
batch_time_meter.update(time.time() - timestamp)
timestamp = time.time()
if i % 10 == 0:
logging.info(
'Epoch: [{epoch}][{batch}/{epoch_size}]\t'
'Time {batch_time.value:.2f} ({batch_time.average:.2f}) '
'Data {data_time.value:.2f} ({data_time.average:.2f}) '
'Loss {loss.value:.3f} {{{loss.average:.3f}, {loss.average_recent:.3f}}} '
'Top-1 {top1.value:.2f} {{{top1.average:.2f}, {top1.average_recent:.2f}}} '
'Top-5 {top5.value:.2f} {{{top5.average:.2f}, {top5.average_recent:.2f}}} '.format(
epoch=epoch_number, batch=i + 1, epoch_size=len(test_loader),
batch_time=batch_time_meter, data_time=data_time_meter,
loss=loss_meter, top1=top1_meter, top5=top5_meter))
# Log the overall test stats
logging.info(
'Epoch: [{epoch}] -- TESTING SUMMARY\t'
'Time {batch_time.sum:.2f} '
'Data {data_time.sum:.2f} '
'Loss {loss.average:.3f} '
'Top-1 {top1.average:.2f} '
'Top-5 {top5.average:.2f} '.format(
epoch=epoch_number, batch_time=batch_time_meter, data_time=data_time_meter,
loss=loss_meter, top1=top1_meter, top5=top5_meter))
def main(argv):
"""Run the test script with command line arguments @argv."""
args = parse_args(argv)
utils.general_setup(args.save, args.gpus)
logging.info("Arguments parsed.\n{}".format(pprint.pformat(vars(args))))
# Create the validation data loaders.
# val_loader = imagenet.get_val_loader(args.imagenet, args.batch_size,
# args.num_workers)
val_loader = mul_cifar100.mul_CIFAR100DataLoader(root=args.data_dir,
image_size=32, train=False, batch_size=args.batch_size, shuffle=True, num_workers=args.num_workers)
# Create model and the loss.
model, loss = model_factory.create_model(
args.model, args.model_state_file, args.gpus, coslinear=args.coslinear, scale=args.s)
logging.info("Model:\n{}".format(model))
# for n,p in model.named_parameters():
# print(n)
# Test for one epoch.
test_for_one_epoch(model, loss, val_loader, epoch_number=1)
print('\n')
if __name__ == '__main__':
main(sys.argv[1:])
| 35.287879 | 107 | 0.640833 |
import argparse
import logging
import pprint
import sys
import time
import torch
from torch import nn
from models import model_factory
import opts
import utils
import mul_cifar100
def parse_args(argv):
parser = argparse.ArgumentParser(description=__doc__, allow_abbrev=False)
group = parser.add_argument_group('General Options')
opts.add_general_flags(group)
group = parser.add_argument_group('Dataset Options')
opts.add_dataset_flags(group)
group = parser.add_argument_group('Model Options')
opts.add_model_flags(group)
args = parser.parse_args(argv)
if args.model_state_file is None:
parser.error("You should set --model-state-file to reload a model "
"state.")
return args
def test_for_one_epoch(model, loss, test_loader, epoch_number):
model.eval()
loss.eval()
data_time_meter = utils.AverageMeter()
batch_time_meter = utils.AverageMeter()
loss_meter = utils.AverageMeter(recent=100)
top1_meter = utils.AverageMeter(recent=100)
top5_meter = utils.AverageMeter(recent=100)
timestamp = time.time()
for i, (images, labels) in enumerate(test_loader):
batch_size = images.size(0)
if utils.is_model_cuda(model):
images = images.cuda()
labels = labels.cuda()
data_time_meter.update(time.time() - timestamp)
with torch.no_grad():
outputs = model(images)
loss_output = loss(outputs, labels)
if isinstance(loss_output, tuple):
loss_value, outputs = loss_output
else:
loss_value = loss_output
loss_meter.update(loss_value.item(), batch_size)
top1, top5 = utils.topk_accuracy(outputs, labels, recalls=(1, 5))
top1_meter.update(top1, batch_size)
top5_meter.update(top5, batch_size)
batch_time_meter.update(time.time() - timestamp)
timestamp = time.time()
if i % 10 == 0:
logging.info(
'Epoch: [{epoch}][{batch}/{epoch_size}]\t'
'Time {batch_time.value:.2f} ({batch_time.average:.2f}) '
'Data {data_time.value:.2f} ({data_time.average:.2f}) '
'Loss {loss.value:.3f} {{{loss.average:.3f}, {loss.average_recent:.3f}}} '
'Top-1 {top1.value:.2f} {{{top1.average:.2f}, {top1.average_recent:.2f}}} '
'Top-5 {top5.value:.2f} {{{top5.average:.2f}, {top5.average_recent:.2f}}} '.format(
epoch=epoch_number, batch=i + 1, epoch_size=len(test_loader),
batch_time=batch_time_meter, data_time=data_time_meter,
loss=loss_meter, top1=top1_meter, top5=top5_meter))
logging.info(
'Epoch: [{epoch}] -- TESTING SUMMARY\t'
'Time {batch_time.sum:.2f} '
'Data {data_time.sum:.2f} '
'Loss {loss.average:.3f} '
'Top-1 {top1.average:.2f} '
'Top-5 {top5.average:.2f} '.format(
epoch=epoch_number, batch_time=batch_time_meter, data_time=data_time_meter,
loss=loss_meter, top1=top1_meter, top5=top5_meter))
def main(argv):
args = parse_args(argv)
utils.general_setup(args.save, args.gpus)
logging.info("Arguments parsed.\n{}".format(pprint.pformat(vars(args))))
val_loader = mul_cifar100.mul_CIFAR100DataLoader(root=args.data_dir,
image_size=32, train=False, batch_size=args.batch_size, shuffle=True, num_workers=args.num_workers)
model, loss = model_factory.create_model(
args.model, args.model_state_file, args.gpus, coslinear=args.coslinear, scale=args.s)
logging.info("Model:\n{}".format(model))
test_for_one_epoch(model, loss, val_loader, epoch_number=1)
print('\n')
if __name__ == '__main__':
main(sys.argv[1:])
| true | true |
f7fddd243e7d093627acd79beeb9c70e14af278a | 1,572 | py | Python | VideoUtils/codec.py | epmcj/nextflix | de15f0a63fe8906a0417da675b9a1c408f71bc79 | [
"MIT"
] | null | null | null | VideoUtils/codec.py | epmcj/nextflix | de15f0a63fe8906a0417da675b9a1c408f71bc79 | [
"MIT"
] | null | null | null | VideoUtils/codec.py | epmcj/nextflix | de15f0a63fe8906a0417da675b9a1c408f71bc79 | [
"MIT"
] | null | null | null | import numpy as np
import structures as st
#return a data object containing an entire frame
def decomposeFrame(frame,frameNum):
channelList = []
if len(frame.shape)==3:
for i in range(frame.shape[2]):
channelList.append(decomposeMatrix(frame[:,:,i]))
else:
channelList.append(decomposeMatrix(frame))
return(st.Data(channelList, frameNum))
#apply svd decomposition to a single channel of an image.
def decomposeMatrix(mat):
P, D, Q = np.linalg.svd(np.matrix(mat,dtype=float), full_matrices=False)
ceList = []
for i in range(len(D)):
ceList.append(st.ChannelElement(P[:,i],D[i],Q[i,:]))
return(st.Channel(ceList))
#recomposes the frame after the transformation into transferable data
def composeFrame(data):
if(data.isEmpty()):
return False,None
else:
#get the dimensions
height, width = data.dim()
#create blank image
frame = np.zeros((height,width,len(data.channel)), np.uint8)
#recompose each channel
for i in range(len(data.channel)):
frame[:,:,i] = np.uint8(composeMatrix(data.channel[i]));
return True,frame
#recompose a simple 1-channel image (double)
def composeMatrix(channel):
#get the dimensions
height, width = channel.dim()
#the matrices for svd
P = np.zeros((height,len(channel)));
D = np.zeros(len(channel));
Q = np.zeros((len(channel),width));
#fulfill the matrices
for i in range(len(channel)):
P[:,i] = channel.list[i].P_column.flatten()
D[i] = channel.list[i].D_value
Q[i,:] = channel.list[i].Q_line.flatten()
#wayback from svd
m = np.matmul(np.matmul(P, np.diag(D)), Q)
return(m)
| 26.644068 | 73 | 0.701654 | import numpy as np
import structures as st
def decomposeFrame(frame,frameNum):
channelList = []
if len(frame.shape)==3:
for i in range(frame.shape[2]):
channelList.append(decomposeMatrix(frame[:,:,i]))
else:
channelList.append(decomposeMatrix(frame))
return(st.Data(channelList, frameNum))
def decomposeMatrix(mat):
P, D, Q = np.linalg.svd(np.matrix(mat,dtype=float), full_matrices=False)
ceList = []
for i in range(len(D)):
ceList.append(st.ChannelElement(P[:,i],D[i],Q[i,:]))
return(st.Channel(ceList))
def composeFrame(data):
if(data.isEmpty()):
return False,None
else:
height, width = data.dim()
frame = np.zeros((height,width,len(data.channel)), np.uint8)
for i in range(len(data.channel)):
frame[:,:,i] = np.uint8(composeMatrix(data.channel[i]));
return True,frame
def composeMatrix(channel):
height, width = channel.dim()
P = np.zeros((height,len(channel)));
D = np.zeros(len(channel));
Q = np.zeros((len(channel),width));
for i in range(len(channel)):
P[:,i] = channel.list[i].P_column.flatten()
D[i] = channel.list[i].D_value
Q[i,:] = channel.list[i].Q_line.flatten()
m = np.matmul(np.matmul(P, np.diag(D)), Q)
return(m)
| true | true |
f7fddd71a9030bafa5e3d9c34da976d9c9f787b7 | 5,649 | py | Python | python/lbann/contrib/modules/radial_profile.py | LLNL/LBANN | 8bcc5d461e52de70e329d73081ca7eee3e5c580a | [
"Apache-2.0"
] | null | null | null | python/lbann/contrib/modules/radial_profile.py | LLNL/LBANN | 8bcc5d461e52de70e329d73081ca7eee3e5c580a | [
"Apache-2.0"
] | null | null | null | python/lbann/contrib/modules/radial_profile.py | LLNL/LBANN | 8bcc5d461e52de70e329d73081ca7eee3e5c580a | [
"Apache-2.0"
] | null | null | null | import numpy as np
import lbann
import lbann.modules
class RadialProfile(lbann.modules.Module):
"""Compute average pixel value w.r.t. distance from image center.
We compute the distance between each image pixel and the image
center. These distances are binned (with a bin size of 1), and the
average pixel value in each bin is computed.
A separate profile is computed for each image channel. The image
can have any spatial dimension, but the first dimension is
interpreted as the channel dimension (e.g. CHW format).
"""
def __init__(self):
pass
def forward(self, image, dims, max_r):
"""Compute radial profile.
Args:
image (lbann.Layer): Image
dims (tuple of int): Image dimensions (dim 0 corresponds
to channel)
max_r (int): Maximum radial distance. Pixels outside this
distance are ignored.
Returns:
Layer: num_channels x max_r radial profile
"""
# Bin spatial positions
r, r_counts = self._find_radial_bins(dims[1:], max_r)
# Reciprocal of bin counts
# Note: If a count is 0, its reciprocal is 0.
r_counts_recip = [0 if c==0 else 1/c for c in r_counts]
# Get scatter indices and scaling factors
# Note: Independent binning for each channel (dim 0)
tile_dims = [dims[0]] + [1]*r.ndim
inds_vals = np.tile(r, tile_dims)
inds_vals += np.arange(0, dims[0]*max_r, max_r).reshape(tile_dims)
inds_vals[:,r>=max_r] = -1
inds_vals = inds_vals.flatten()
scales_vals = r_counts_recip * dims[0]
# Construct LBANN layer graph
image = lbann.Reshape(image, dims=[np.prod(dims)])
inds = lbann.WeightsLayer(
weights=lbann.Weights(
lbann.ValueInitializer(values=inds_vals),
optimizer=lbann.NoOptimizer(),
),
dims=[len(inds_vals)],
)
r_sums = lbann.Scatter(image, inds, dims=[dims[0]*max_r])
scales = lbann.WeightsLayer(
weights=lbann.Weights(
lbann.ValueInitializer(values=scales_vals),
optimizer=lbann.NoOptimizer(),
),
dims=[len(scales_vals)],
)
r_means = lbann.Multiply(scales, r_sums)
return lbann.Reshape(r_means, dims=[dims[0], max_r])
def _find_radial_bins(self, dims, max_r):
"""Bin tensor positions based on distance from center.
Args:
dims (tuple of int): Tensor dimensions
max_r (int): Maximum radial distance. Positions outside
this distance are ignored.
Returns:
numpy.ndarray of int: Bin for each tensor position. Some
bins may be greater than max_r. Its dimensions match
dims.
numpy.ndarray of int: Number of positions in each bin.
It is 1D and with a length of max_r.
"""
# Find bin for each position
r2 = np.zeros([])
for i, d in enumerate(dims):
x = np.arange(d) - (d-1)/2
r2 = np.expand_dims(r2, -1) + x**2
r = np.sqrt(r2).astype(int)
# Count number of positions in each bin
# Note: Pad/truncate to max_r
r_counts = np.bincount(r.flatten(), minlength=max_r)
r_counts = r_counts[:max_r]
return r, r_counts
# Test by computing radial profile for user-provided image
if __name__ == "__main__":
# Imports
import argparse
import matplotlib.image
# Command-line options
parser = argparse.ArgumentParser()
parser.add_argument(
'image', action='store', type=str,
help='image file', metavar='FILE',
)
args = parser.parse_args()
# Load image
image = matplotlib.image.imread(args.image)
if image.ndim == 2:
image = np.expand_dims(image, 2)
assert image.ndim == 3, f'failed to load 2D image from {args.image}'
if image.shape[-1] == 1:
image = np.tile(image, (1,1,3))
elif image.shape[-1] == 4:
image = image[:,:,:3]
assert image.shape[-1] == 3, f'failed to load RGB image from {args.image}'
image = np.transpose(image, (2,0,1))
# Dummy input
reader = lbann.reader_pb2.DataReader()
def add_data_reader(role):
_reader = reader.reader.add()
_reader.name = 'synthetic'
_reader.role = role
_reader.num_samples = 1
_reader.num_labels = 1
_reader.synth_dimensions = '1'
_reader.percent_of_data_to_use = 1.0
add_data_reader('train')
add_data_reader('test')
input_ = lbann.Input()
# Radial profile
x = lbann.WeightsLayer(
weights=lbann.Weights(
lbann.ValueInitializer(values=image.flatten()),
),
dims=image.shape,
)
max_r = image.shape[-1] // 2
rprof = RadialProfile()(x, image.shape, max_r)
rprof_slice = lbann.Slice(rprof, slice_points=[0,1,2,3])
red = lbann.Identity(rprof_slice, name='red')
green = lbann.Identity(rprof_slice, name='green')
blue = lbann.Identity(rprof_slice, name='blue')
# Construct model
callbacks = [
lbann.CallbackDumpOutputs(layers=['red', 'green', 'blue']),
]
model = lbann.Model(
epochs=0,
layers=lbann.traverse_layer_graph([input_, rprof]),
callbacks=callbacks,
)
# Run LBANN
lbann.run(
trainer=lbann.Trainer(mini_batch_size=1),
model=model,
data_reader=reader,
optimizer=lbann.NoOptimizer(),
job_name='lbann_radial_profile_test',
)
| 32.096591 | 78 | 0.601699 | import numpy as np
import lbann
import lbann.modules
class RadialProfile(lbann.modules.Module):
def __init__(self):
pass
def forward(self, image, dims, max_r):
r, r_counts = self._find_radial_bins(dims[1:], max_r)
r_counts_recip = [0 if c==0 else 1/c for c in r_counts]
tile_dims = [dims[0]] + [1]*r.ndim
inds_vals = np.tile(r, tile_dims)
inds_vals += np.arange(0, dims[0]*max_r, max_r).reshape(tile_dims)
inds_vals[:,r>=max_r] = -1
inds_vals = inds_vals.flatten()
scales_vals = r_counts_recip * dims[0]
image = lbann.Reshape(image, dims=[np.prod(dims)])
inds = lbann.WeightsLayer(
weights=lbann.Weights(
lbann.ValueInitializer(values=inds_vals),
optimizer=lbann.NoOptimizer(),
),
dims=[len(inds_vals)],
)
r_sums = lbann.Scatter(image, inds, dims=[dims[0]*max_r])
scales = lbann.WeightsLayer(
weights=lbann.Weights(
lbann.ValueInitializer(values=scales_vals),
optimizer=lbann.NoOptimizer(),
),
dims=[len(scales_vals)],
)
r_means = lbann.Multiply(scales, r_sums)
return lbann.Reshape(r_means, dims=[dims[0], max_r])
def _find_radial_bins(self, dims, max_r):
r2 = np.zeros([])
for i, d in enumerate(dims):
x = np.arange(d) - (d-1)/2
r2 = np.expand_dims(r2, -1) + x**2
r = np.sqrt(r2).astype(int)
r_counts = np.bincount(r.flatten(), minlength=max_r)
r_counts = r_counts[:max_r]
return r, r_counts
if __name__ == "__main__":
import argparse
import matplotlib.image
parser = argparse.ArgumentParser()
parser.add_argument(
'image', action='store', type=str,
help='image file', metavar='FILE',
)
args = parser.parse_args()
image = matplotlib.image.imread(args.image)
if image.ndim == 2:
image = np.expand_dims(image, 2)
assert image.ndim == 3, f'failed to load 2D image from {args.image}'
if image.shape[-1] == 1:
image = np.tile(image, (1,1,3))
elif image.shape[-1] == 4:
image = image[:,:,:3]
assert image.shape[-1] == 3, f'failed to load RGB image from {args.image}'
image = np.transpose(image, (2,0,1))
reader = lbann.reader_pb2.DataReader()
def add_data_reader(role):
_reader = reader.reader.add()
_reader.name = 'synthetic'
_reader.role = role
_reader.num_samples = 1
_reader.num_labels = 1
_reader.synth_dimensions = '1'
_reader.percent_of_data_to_use = 1.0
add_data_reader('train')
add_data_reader('test')
input_ = lbann.Input()
x = lbann.WeightsLayer(
weights=lbann.Weights(
lbann.ValueInitializer(values=image.flatten()),
),
dims=image.shape,
)
max_r = image.shape[-1] // 2
rprof = RadialProfile()(x, image.shape, max_r)
rprof_slice = lbann.Slice(rprof, slice_points=[0,1,2,3])
red = lbann.Identity(rprof_slice, name='red')
green = lbann.Identity(rprof_slice, name='green')
blue = lbann.Identity(rprof_slice, name='blue')
callbacks = [
lbann.CallbackDumpOutputs(layers=['red', 'green', 'blue']),
]
model = lbann.Model(
epochs=0,
layers=lbann.traverse_layer_graph([input_, rprof]),
callbacks=callbacks,
)
lbann.run(
trainer=lbann.Trainer(mini_batch_size=1),
model=model,
data_reader=reader,
optimizer=lbann.NoOptimizer(),
job_name='lbann_radial_profile_test',
)
| true | true |
f7fddd8de9efccb054a1b2894bf3c394477db6d0 | 3,709 | py | Python | baselines/a2c/utils.py | MoritzTaylor/baselines-tf2 | f51e40707b3c3021ae6309788d0cc0f29832dbea | [
"MIT"
] | 1 | 2020-02-28T06:41:52.000Z | 2020-02-28T06:41:52.000Z | baselines/a2c/utils.py | MoritzTaylor/baselines-tf2 | f51e40707b3c3021ae6309788d0cc0f29832dbea | [
"MIT"
] | null | null | null | baselines/a2c/utils.py | MoritzTaylor/baselines-tf2 | f51e40707b3c3021ae6309788d0cc0f29832dbea | [
"MIT"
] | null | null | null | import os
import numpy as np
import tensorflow as tf
from collections import deque
def ortho_init(scale=1.0):
def _ortho_init(shape, dtype, partition_info=None):
#lasagne ortho init for tf
shape = tuple(shape)
if len(shape) == 2:
flat_shape = shape
elif len(shape) == 4: # assumes NHWC
flat_shape = (np.prod(shape[:-1]), shape[-1])
else:
raise NotImplementedError
a = np.random.normal(0.0, 1.0, flat_shape)
u, _, v = np.linalg.svd(a, full_matrices=False)
q = u if u.shape == flat_shape else v # pick the one with the correct shape
q = q.reshape(shape)
return (scale * q[:shape[0], :shape[1]]).astype(np.float32)
return _ortho_init
def conv(scope, *, nf, rf, stride, activation, pad='valid', init_scale=1.0, data_format='channels_last'):
with tf.name_scope(scope):
layer = tf.keras.layers.Conv2D(filters=nf, kernel_size=rf, strides=stride, padding=pad,
data_format=data_format, kernel_initializer=ortho_init(init_scale))
return layer
def fc(input_shape, scope, nh, *, init_scale=1.0, init_bias=0.0):
with tf.name_scope(scope):
layer = tf.keras.layers.Dense(units=nh, kernel_initializer=ortho_init(init_scale),
bias_initializer=tf.keras.initializers.Constant(init_bias))
# layer = tf.keras.layers.Dense(units=nh, kernel_initializer=tf.keras.initializers.Constant(init_scale),
# bias_initializer=tf.keras.initializers.Constant(init_bias))
layer.build(input_shape)
return layer
def discount_with_dones(rewards, dones, gamma):
discounted = []
r = 0
for reward, done in zip(rewards[::-1], dones[::-1]):
r = reward + gamma*r*(1.-done) # fixed off by one bug
discounted.append(r)
return discounted[::-1]
class InverseLinearTimeDecay(tf.keras.optimizers.schedules.LearningRateSchedule):
def __init__(self, initial_learning_rate, nupdates, name="InverseLinearTimeDecay"):
super(InverseLinearTimeDecay, self).__init__()
self.initial_learning_rate = initial_learning_rate
self.nupdates = nupdates
self.name = name
def __call__(self, step):
with tf.name_scope(self.name):
initial_learning_rate = tf.convert_to_tensor(self.initial_learning_rate, name="initial_learning_rate")
dtype = initial_learning_rate.dtype
step_t = tf.cast(step, dtype)
nupdates_t = tf.convert_to_tensor(self.nupdates, dtype=dtype)
tf.assert_less(step_t, nupdates_t)
return initial_learning_rate * (1. - step_t / nupdates_t)
def get_config(self):
return {
"initial_learning_rate": self.initial_learning_rate,
"nupdates": self.nupdates,
"name": self.name
}
class LinearTimeDecay(tf.keras.optimizers.schedules.LearningRateSchedule):
def __init__(self, initial_learning_rate, name="LinearTimeDecay"):
super(LinearTimeDecay, self).__init__()
self.initial_learning_rate = initial_learning_rate
self.name = name
def __call__(self, step):
with tf.name_scope(self.name):
initial_learning_rate = tf.convert_to_tensor(self.initial_learning_rate, name="initial_learning_rate")
dtype = initial_learning_rate.dtype
step_t = tf.cast(step, dtype) # TODO: step_t = step/n_total_steps ?
return initial_learning_rate * step_t
def get_config(self):
return {
"initial_learning_rate": self.initial_learning_rate,
"name": self.name
}
| 41.674157 | 114 | 0.649771 | import os
import numpy as np
import tensorflow as tf
from collections import deque
def ortho_init(scale=1.0):
def _ortho_init(shape, dtype, partition_info=None):
shape = tuple(shape)
if len(shape) == 2:
flat_shape = shape
elif len(shape) == 4:
flat_shape = (np.prod(shape[:-1]), shape[-1])
else:
raise NotImplementedError
a = np.random.normal(0.0, 1.0, flat_shape)
u, _, v = np.linalg.svd(a, full_matrices=False)
q = u if u.shape == flat_shape else v
q = q.reshape(shape)
return (scale * q[:shape[0], :shape[1]]).astype(np.float32)
return _ortho_init
def conv(scope, *, nf, rf, stride, activation, pad='valid', init_scale=1.0, data_format='channels_last'):
with tf.name_scope(scope):
layer = tf.keras.layers.Conv2D(filters=nf, kernel_size=rf, strides=stride, padding=pad,
data_format=data_format, kernel_initializer=ortho_init(init_scale))
return layer
def fc(input_shape, scope, nh, *, init_scale=1.0, init_bias=0.0):
with tf.name_scope(scope):
layer = tf.keras.layers.Dense(units=nh, kernel_initializer=ortho_init(init_scale),
bias_initializer=tf.keras.initializers.Constant(init_bias))
layer.build(input_shape)
return layer
def discount_with_dones(rewards, dones, gamma):
discounted = []
r = 0
for reward, done in zip(rewards[::-1], dones[::-1]):
r = reward + gamma*r*(1.-done)
discounted.append(r)
return discounted[::-1]
class InverseLinearTimeDecay(tf.keras.optimizers.schedules.LearningRateSchedule):
def __init__(self, initial_learning_rate, nupdates, name="InverseLinearTimeDecay"):
super(InverseLinearTimeDecay, self).__init__()
self.initial_learning_rate = initial_learning_rate
self.nupdates = nupdates
self.name = name
def __call__(self, step):
with tf.name_scope(self.name):
initial_learning_rate = tf.convert_to_tensor(self.initial_learning_rate, name="initial_learning_rate")
dtype = initial_learning_rate.dtype
step_t = tf.cast(step, dtype)
nupdates_t = tf.convert_to_tensor(self.nupdates, dtype=dtype)
tf.assert_less(step_t, nupdates_t)
return initial_learning_rate * (1. - step_t / nupdates_t)
def get_config(self):
return {
"initial_learning_rate": self.initial_learning_rate,
"nupdates": self.nupdates,
"name": self.name
}
class LinearTimeDecay(tf.keras.optimizers.schedules.LearningRateSchedule):
def __init__(self, initial_learning_rate, name="LinearTimeDecay"):
super(LinearTimeDecay, self).__init__()
self.initial_learning_rate = initial_learning_rate
self.name = name
def __call__(self, step):
with tf.name_scope(self.name):
initial_learning_rate = tf.convert_to_tensor(self.initial_learning_rate, name="initial_learning_rate")
dtype = initial_learning_rate.dtype
step_t = tf.cast(step, dtype)
return initial_learning_rate * step_t
def get_config(self):
return {
"initial_learning_rate": self.initial_learning_rate,
"name": self.name
}
| true | true |
f7fde31fd4590e09f06e1fb72c2e458099119596 | 7,192 | py | Python | datahub/company/test/admin/test_update_from_dnb.py | Staberinde/data-hub-api | 3d0467dbceaf62a47158eea412a3dba827073300 | [
"MIT"
] | null | null | null | datahub/company/test/admin/test_update_from_dnb.py | Staberinde/data-hub-api | 3d0467dbceaf62a47158eea412a3dba827073300 | [
"MIT"
] | 4 | 2021-06-30T10:34:50.000Z | 2021-06-30T10:34:51.000Z | datahub/company/test/admin/test_update_from_dnb.py | Staberinde/data-hub-api | 3d0467dbceaf62a47158eea412a3dba827073300 | [
"MIT"
] | null | null | null | from urllib.parse import urljoin
import pytest
from django.conf import settings
from django.contrib.admin.templatetags.admin_urls import admin_urlname
from django.contrib.messages import get_messages
from django.urls import reverse
from rest_framework import status
from reversion.models import Version
from datahub.company.models import Company, CompanyPermission
from datahub.company.test.factories import CompanyFactory
from datahub.core.test_utils import AdminTestMixin, create_test_user
DNB_SEARCH_URL = urljoin(f'{settings.DNB_SERVICE_BASE_URL}/', 'companies/search/')
class TestUpdateFromDNB(AdminTestMixin):
"""
Tests GET requests to 'Update from DNB'.
"""
def _create_company(self, **kwargs):
self.company = CompanyFactory(**kwargs)
change_url = reverse(
admin_urlname(Company._meta, 'change'),
args=(self.company.pk, ),
)
update_url = reverse(
admin_urlname(Company._meta, 'update-from-dnb'),
args=(self.company.pk, ),
)
return (change_url, update_url)
def test_get(self, requests_mock, dnb_response):
"""
Test that the link exists for a company with duns_number
and a user with the change company permission.
"""
change_url, update_url = self._create_company(duns_number='123456789')
requests_mock.post(
DNB_SEARCH_URL,
json=dnb_response,
)
response = self.client.get(change_url)
assert update_url in response.rendered_content
response = self.client.get(update_url)
assert response.status_code == status.HTTP_200_OK
def test_get_view_permission_only(self):
"""
Test that the link does not exist for a company with duns_number
but a user with only the view company permission.
"""
change_url, update_url = self._create_company(duns_number='123456789')
user = create_test_user(
permission_codenames=(CompanyPermission.view_company,),
is_staff=True,
password=self.PASSWORD,
)
client = self.create_client(user=user)
response = client.get(change_url)
assert update_url not in response.rendered_content
response = client.get(update_url)
assert response.status_code == status.HTTP_403_FORBIDDEN
def test_get_no_duns_number(self):
"""
Test that the link does not exist when the company does not
have a duns_number.
"""
change_url, update_url = self._create_company()
response = self.client.get(change_url)
assert update_url not in response.rendered_content
response = self.client.get(update_url)
assert response.status_code == status.HTTP_400_BAD_REQUEST
def test_post(self, requests_mock, dnb_response):
"""
Test that a post request to 'update-from-dnb' updates
the company.
"""
_, update_url = self._create_company(
duns_number='123456789',
pending_dnb_investigation=True,
)
requests_mock.post(
DNB_SEARCH_URL,
json=dnb_response,
)
response = self.client.post(update_url)
assert response.status_code == status.HTTP_302_FOUND
self.company.refresh_from_db()
dnb_company = dnb_response['results'][0]
assert self.company.name == dnb_company['primary_name']
assert self.company.address_1 == dnb_company['address_line_1']
assert self.company.address_2 == dnb_company['address_line_2']
assert self.company.address_town == dnb_company['address_town']
assert self.company.address_county == dnb_company['address_county']
assert self.company.address_country.iso_alpha2_code == dnb_company['address_country']
assert not self.company.pending_dnb_investigation
assert (
self.company.global_ultimate_duns_number
== dnb_company['global_ultimate_duns_number']
)
versions = list(Version.objects.get_for_object(self.company))
assert len(versions) == 1
assert versions[0].revision.comment == 'Updated from D&B'
@pytest.mark.parametrize(
'dnb_response_code',
(
status.HTTP_400_BAD_REQUEST,
status.HTTP_401_UNAUTHORIZED,
status.HTTP_403_FORBIDDEN,
status.HTTP_404_NOT_FOUND,
status.HTTP_500_INTERNAL_SERVER_ERROR,
),
)
def test_post_dnb_error(self, requests_mock, dnb_response_code):
"""
Tests that the users get an error message if the dnb-service
doesn't return with a 200 status code.
"""
_, update_url = self._create_company(duns_number='123456789')
requests_mock.post(
DNB_SEARCH_URL,
status_code=dnb_response_code,
)
response = self.client.post(update_url)
assert response.status_code == status.HTTP_302_FOUND
messages = list(get_messages(response.wsgi_request))
assert len(messages) == 1
assert str(messages[0]) == 'Something went wrong in an upstream service.'
@pytest.mark.parametrize(
'search_results, expected_message',
(
(
[],
'No matching company found in D&B database.',
),
(
['foo', 'bar'],
'Something went wrong in an upstream service.',
),
(
[{'duns_number': '012345678'}],
'Something went wrong in an upstream service.',
),
),
)
def test_post_dnb_response_invalid(
self,
requests_mock,
search_results,
expected_message,
):
"""
Test if we get anything other than a single company from dnb-service,
we return an error message to the user.
"""
_, update_url = self._create_company(duns_number='123456789')
requests_mock.post(
DNB_SEARCH_URL,
json={'results': search_results},
)
response = self.client.post(update_url)
assert response.status_code == status.HTTP_302_FOUND
messages = list(get_messages(response.wsgi_request))
assert len(messages) == 1
assert str(messages[0]) == expected_message
def test_post_dnb_data_invalid(
self,
requests_mock,
dnb_response,
):
"""
Tests that if the data returned from DNB does not
clear DataHub validation, we show an appropriate
message to our users.
"""
_, update_url = self._create_company(duns_number='123456789')
dnb_response['results'][0]['primary_name'] = None
requests_mock.post(
DNB_SEARCH_URL,
json=dnb_response,
)
response = self.client.post(update_url)
assert response.status_code == status.HTTP_302_FOUND
messages = list(get_messages(response.wsgi_request))
assert len(messages) == 1
assert str(messages[0]) == 'Data from D&B did not pass the Data Hub validation checks.'
| 35.60396 | 95 | 0.635984 | from urllib.parse import urljoin
import pytest
from django.conf import settings
from django.contrib.admin.templatetags.admin_urls import admin_urlname
from django.contrib.messages import get_messages
from django.urls import reverse
from rest_framework import status
from reversion.models import Version
from datahub.company.models import Company, CompanyPermission
from datahub.company.test.factories import CompanyFactory
from datahub.core.test_utils import AdminTestMixin, create_test_user
DNB_SEARCH_URL = urljoin(f'{settings.DNB_SERVICE_BASE_URL}/', 'companies/search/')
class TestUpdateFromDNB(AdminTestMixin):
def _create_company(self, **kwargs):
self.company = CompanyFactory(**kwargs)
change_url = reverse(
admin_urlname(Company._meta, 'change'),
args=(self.company.pk, ),
)
update_url = reverse(
admin_urlname(Company._meta, 'update-from-dnb'),
args=(self.company.pk, ),
)
return (change_url, update_url)
def test_get(self, requests_mock, dnb_response):
change_url, update_url = self._create_company(duns_number='123456789')
requests_mock.post(
DNB_SEARCH_URL,
json=dnb_response,
)
response = self.client.get(change_url)
assert update_url in response.rendered_content
response = self.client.get(update_url)
assert response.status_code == status.HTTP_200_OK
def test_get_view_permission_only(self):
change_url, update_url = self._create_company(duns_number='123456789')
user = create_test_user(
permission_codenames=(CompanyPermission.view_company,),
is_staff=True,
password=self.PASSWORD,
)
client = self.create_client(user=user)
response = client.get(change_url)
assert update_url not in response.rendered_content
response = client.get(update_url)
assert response.status_code == status.HTTP_403_FORBIDDEN
def test_get_no_duns_number(self):
change_url, update_url = self._create_company()
response = self.client.get(change_url)
assert update_url not in response.rendered_content
response = self.client.get(update_url)
assert response.status_code == status.HTTP_400_BAD_REQUEST
def test_post(self, requests_mock, dnb_response):
_, update_url = self._create_company(
duns_number='123456789',
pending_dnb_investigation=True,
)
requests_mock.post(
DNB_SEARCH_URL,
json=dnb_response,
)
response = self.client.post(update_url)
assert response.status_code == status.HTTP_302_FOUND
self.company.refresh_from_db()
dnb_company = dnb_response['results'][0]
assert self.company.name == dnb_company['primary_name']
assert self.company.address_1 == dnb_company['address_line_1']
assert self.company.address_2 == dnb_company['address_line_2']
assert self.company.address_town == dnb_company['address_town']
assert self.company.address_county == dnb_company['address_county']
assert self.company.address_country.iso_alpha2_code == dnb_company['address_country']
assert not self.company.pending_dnb_investigation
assert (
self.company.global_ultimate_duns_number
== dnb_company['global_ultimate_duns_number']
)
versions = list(Version.objects.get_for_object(self.company))
assert len(versions) == 1
assert versions[0].revision.comment == 'Updated from D&B'
@pytest.mark.parametrize(
'dnb_response_code',
(
status.HTTP_400_BAD_REQUEST,
status.HTTP_401_UNAUTHORIZED,
status.HTTP_403_FORBIDDEN,
status.HTTP_404_NOT_FOUND,
status.HTTP_500_INTERNAL_SERVER_ERROR,
),
)
def test_post_dnb_error(self, requests_mock, dnb_response_code):
_, update_url = self._create_company(duns_number='123456789')
requests_mock.post(
DNB_SEARCH_URL,
status_code=dnb_response_code,
)
response = self.client.post(update_url)
assert response.status_code == status.HTTP_302_FOUND
messages = list(get_messages(response.wsgi_request))
assert len(messages) == 1
assert str(messages[0]) == 'Something went wrong in an upstream service.'
@pytest.mark.parametrize(
'search_results, expected_message',
(
(
[],
'No matching company found in D&B database.',
),
(
['foo', 'bar'],
'Something went wrong in an upstream service.',
),
(
[{'duns_number': '012345678'}],
'Something went wrong in an upstream service.',
),
),
)
def test_post_dnb_response_invalid(
self,
requests_mock,
search_results,
expected_message,
):
_, update_url = self._create_company(duns_number='123456789')
requests_mock.post(
DNB_SEARCH_URL,
json={'results': search_results},
)
response = self.client.post(update_url)
assert response.status_code == status.HTTP_302_FOUND
messages = list(get_messages(response.wsgi_request))
assert len(messages) == 1
assert str(messages[0]) == expected_message
def test_post_dnb_data_invalid(
self,
requests_mock,
dnb_response,
):
_, update_url = self._create_company(duns_number='123456789')
dnb_response['results'][0]['primary_name'] = None
requests_mock.post(
DNB_SEARCH_URL,
json=dnb_response,
)
response = self.client.post(update_url)
assert response.status_code == status.HTTP_302_FOUND
messages = list(get_messages(response.wsgi_request))
assert len(messages) == 1
assert str(messages[0]) == 'Data from D&B did not pass the Data Hub validation checks.'
| true | true |
f7fde3d9cf40f13051a639dd72146d2dd64eb354 | 220 | py | Python | setup.py | CamCairns/house_price_model | c7abb3449fef67cfe673bccb2abc627b1cab9dcb | [
"MIT"
] | null | null | null | setup.py | CamCairns/house_price_model | c7abb3449fef67cfe673bccb2abc627b1cab9dcb | [
"MIT"
] | 1 | 2019-08-23T23:59:05.000Z | 2019-08-23T23:59:05.000Z | setup.py | CamCairns/house_price_model | c7abb3449fef67cfe673bccb2abc627b1cab9dcb | [
"MIT"
] | null | null | null | from setuptools import find_packages, setup
setup(
name='src',
packages=find_packages(),
version='0.1.0',
description='A short description of the project.',
author='Cam Cairns',
license='MIT',
)
| 20 | 54 | 0.663636 | from setuptools import find_packages, setup
setup(
name='src',
packages=find_packages(),
version='0.1.0',
description='A short description of the project.',
author='Cam Cairns',
license='MIT',
)
| true | true |
f7fde3f6d022b1b6ff5dc2244c2d042249f10b02 | 45,995 | py | Python | cms/tests/test_placeholder.py | evildmp/django-cms | 15f47a7acb22a112c343dca72597f4b8e6c0d593 | [
"BSD-3-Clause"
] | 1 | 2020-04-09T19:53:43.000Z | 2020-04-09T19:53:43.000Z | cms/tests/test_placeholder.py | maykinmedia/django-cms-sea | 16974c6ebf8fcb190ee42f1267a7ec4ee14a1b4c | [
"BSD-3-Clause"
] | 1 | 2017-01-04T16:58:49.000Z | 2017-01-04T16:58:49.000Z | cms/tests/test_placeholder.py | evildmp/django-cms | 15f47a7acb22a112c343dca72597f4b8e6c0d593 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
from django.conf import settings
from django.contrib.auth import get_user_model
from django.core.cache import cache
from django.core.exceptions import ImproperlyConfigured
from django.template import TemplateSyntaxError, Template
from django.template.loader import get_template
from django.test import TestCase
from django.test.utils import override_settings
from django.utils.encoding import force_text
from django.utils.numberformat import format
from djangocms_link.cms_plugins import LinkPlugin
from djangocms_text_ckeditor.cms_plugins import TextPlugin
from sekizai.context import SekizaiContext
from cms import constants
from cms.api import add_plugin, create_page, create_title
from cms.exceptions import DuplicatePlaceholderWarning
from cms.models.fields import PlaceholderField
from cms.models.placeholdermodel import Placeholder
from cms.models.pluginmodel import CMSPlugin
from cms.plugin_pool import plugin_pool
from cms.tests.test_toolbar import ToolbarTestBase
from cms.test_utils.fixtures.fakemlng import FakemlngFixtures
from cms.test_utils.project.fakemlng.models import Translations
from cms.test_utils.project.placeholderapp.models import (
DynamicPlaceholderSlotExample,
Example1,
MultilingualExample1,
TwoPlaceholderExample,
)
from cms.test_utils.project.sampleapp.models import Category
from cms.test_utils.testcases import CMSTestCase
from cms.test_utils.util.mock import AttributeObject
from cms.toolbar.toolbar import CMSToolbar
from cms.toolbar.utils import get_toolbar_from_request
from cms.utils.compat import DJANGO_1_8
from cms.utils.compat.tests import UnittestCompatMixin
from cms.utils.conf import get_cms_setting
from cms.utils.placeholder import (PlaceholderNoAction, MLNGPlaceholderActions,
get_placeholder_conf, get_placeholders, _get_nodelist,
_scan_placeholders)
from cms.utils.plugins import assign_plugins
from cms.utils.urlutils import admin_reverse
def _render_placeholder(placeholder, context, **kwargs):
request = context['request']
toolbar = get_toolbar_from_request(request)
content_renderer = toolbar.content_renderer
context['cms_content_renderer'] = content_renderer
return content_renderer.render_placeholder(placeholder, context, **kwargs)
class PlaceholderTestCase(CMSTestCase, UnittestCompatMixin):
def setUp(self):
u = self._create_user("test", True, True)
self._login_context = self.login_user_context(u)
self._login_context.__enter__()
def tearDown(self):
self._login_context.__exit__(None, None, None)
def test_placeholder_scanning_extend(self):
placeholders = get_placeholders('placeholder_tests/test_one.html')
self.assertEqual(sorted(placeholders), sorted([u'new_one', u'two', u'three']))
def test_placeholder_scanning_sekizai_extend(self):
placeholders = get_placeholders('placeholder_tests/test_one_sekizai.html')
self.assertEqual(sorted(placeholders), sorted([u'new_one', u'two', u'three']))
def test_placeholder_scanning_include(self):
placeholders = get_placeholders('placeholder_tests/test_two.html')
self.assertEqual(sorted(placeholders), sorted([u'child', u'three']))
def test_placeholder_scanning_double_extend(self):
placeholders = get_placeholders('placeholder_tests/test_three.html')
self.assertEqual(sorted(placeholders), sorted([u'new_one', u'two', u'new_three']))
def test_placeholder_scanning_sekizai_double_extend(self):
placeholders = get_placeholders('placeholder_tests/test_three_sekizai.html')
self.assertEqual(sorted(placeholders), sorted([u'new_one', u'two', u'new_three']))
def test_placeholder_scanning_complex(self):
placeholders = get_placeholders('placeholder_tests/test_four.html')
self.assertEqual(sorted(placeholders), sorted([u'new_one', u'child', u'four']))
def test_placeholder_scanning_super(self):
placeholders = get_placeholders('placeholder_tests/test_five.html')
self.assertEqual(sorted(placeholders), sorted([u'one', u'extra_one', u'two', u'three']))
def test_placeholder_scanning_nested(self):
placeholders = get_placeholders('placeholder_tests/test_six.html')
self.assertEqual(sorted(placeholders), sorted([u'new_one', u'new_two', u'new_three']))
def test_placeholder_scanning_duplicate(self):
placeholders = self.assertWarns(DuplicatePlaceholderWarning,
'Duplicate {% placeholder "one" %} in template placeholder_tests/test_seven.html.',
get_placeholders, 'placeholder_tests/test_seven.html')
self.assertEqual(sorted(placeholders), sorted([u'one']))
def test_placeholder_scanning_extend_outside_block(self):
placeholders = get_placeholders('placeholder_tests/outside.html')
self.assertEqual(sorted(placeholders), sorted([u'new_one', u'two', u'base_outside']))
def test_placeholder_scanning_sekizai_extend_outside_block(self):
placeholders = get_placeholders('placeholder_tests/outside_sekizai.html')
self.assertEqual(sorted(placeholders), sorted([u'new_one', u'two', u'base_outside']))
def test_placeholder_scanning_extend_outside_block_nested(self):
placeholders = get_placeholders('placeholder_tests/outside_nested.html')
self.assertEqual(sorted(placeholders), sorted([u'new_one', u'two', u'base_outside']))
def test_placeholder_scanning_sekizai_extend_outside_block_nested(self):
placeholders = get_placeholders('placeholder_tests/outside_nested_sekizai.html')
self.assertEqual(sorted(placeholders), sorted([u'new_one', u'two', u'base_outside']))
def test_placeholder_scanning_var(self):
t = Template('{%load cms_tags %}{% include name %}{% placeholder "a_placeholder" %}')
phs = _scan_placeholders(t.nodelist)
self.assertListEqual(sorted(phs), sorted([u'a_placeholder']))
t = Template('{% include "placeholder_tests/outside_nested_sekizai.html" %}')
phs = _scan_placeholders(t.nodelist)
self.assertListEqual(sorted(phs), sorted([u'two', u'new_one', u'base_outside']))
def test_fieldsets_requests(self):
response = self.client.get(admin_reverse('placeholderapp_example1_add'))
self.assertEqual(response.status_code, 200)
response = self.client.get(admin_reverse('placeholderapp_twoplaceholderexample_add'))
self.assertEqual(response.status_code, 200)
def test_page_only_plugins(self):
ex = Example1(
char_1='one',
char_2='two',
char_3='tree',
char_4='four'
)
ex.save()
response = self.client.get(admin_reverse('placeholderapp_example1_change', args=(ex.pk,)))
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, 'InheritPagePlaceholderPlugin')
def test_inter_placeholder_plugin_move(self):
ex = TwoPlaceholderExample(
char_1='one',
char_2='two',
char_3='tree',
char_4='four'
)
ex.save()
ph1 = ex.placeholder_1
ph2 = ex.placeholder_2
ph1_pl1 = add_plugin(ph1, TextPlugin, 'en', body='ph1 plugin1').cmsplugin_ptr
ph1_pl2 = add_plugin(ph1, TextPlugin, 'en', body='ph1 plugin2').cmsplugin_ptr
ph1_pl3 = add_plugin(ph1, TextPlugin, 'en', body='ph1 plugin3').cmsplugin_ptr
ph2_pl1 = add_plugin(ph2, TextPlugin, 'en', body='ph2 plugin1').cmsplugin_ptr
ph2_pl2 = add_plugin(ph2, TextPlugin, 'en', body='ph2 plugin2').cmsplugin_ptr
ph2_pl3 = add_plugin(ph2, TextPlugin, 'en', body='ph2 plugin3').cmsplugin_ptr
data = {
'placeholder_id': str(ph2.pk),
'plugin_id': str(ph1_pl2.pk),
'plugin_order[]': [str(p.pk) for p in [ph2_pl3, ph2_pl1, ph2_pl2, ph1_pl2]]
}
endpoint = self.get_move_plugin_uri(ph1_pl2, container=TwoPlaceholderExample)
response = self.client.post(endpoint, data)
self.assertEqual(response.status_code, 200)
self.assertEqual([ph1_pl1, ph1_pl3], list(ph1.cmsplugin_set.order_by('position')))
self.assertEqual([ph2_pl3, ph2_pl1, ph2_pl2, ph1_pl2, ], list(ph2.cmsplugin_set.order_by('position')))
def test_placeholder_render_ghost_plugin(self):
"""
Tests a placeholder won't render a ghost plugin.
"""
page_en = create_page('page_en', 'col_two.html', 'en')
placeholder_en = page_en.placeholders.get(slot='col_left')
CMSPlugin.objects.create(
language='en',
plugin_type='LinkPlugin',
position=1,
placeholder=placeholder_en,
parent=None,
)
add_plugin(
placeholder_en,
"LinkPlugin",
"en",
name='name',
url='http://example.com/',
)
context_en = SekizaiContext()
context_en['request'] = self.get_request(language="en", page=page_en)
content_en = _render_placeholder(placeholder_en, context_en)
self.assertEqual(content_en.strip(), '<a href="http://example.com/" >name</a>')
def test_placeholder_render_ghost_plugin_with_child(self):
"""
Tests a placeholder won't render a ghost plugin or any of it's children.
"""
page_en = create_page('page_en', 'col_two.html', 'en')
placeholder_en = page_en.placeholders.get(slot='col_left')
plugin = CMSPlugin.objects.create(
language='en',
plugin_type='LinkPlugin',
position=1,
placeholder=placeholder_en,
parent=None,
)
add_plugin(
placeholder_en,
"LinkPlugin",
"en",
target=plugin,
name='invalid',
url='http://example.com/',
)
add_plugin(
placeholder_en,
"LinkPlugin",
"en",
name='valid',
url='http://example.com/',
)
context_en = SekizaiContext()
context_en['request'] = self.get_request(language="en", page=page_en)
content_en = _render_placeholder(placeholder_en, context_en)
self.assertEqual(content_en.strip(), '<a href="http://example.com/" >valid</a>')
@override_settings(CMS_PERMISSION=False)
def test_nested_plugin_escapejs(self):
"""
Checks #1366 error condition.
When adding/editing a plugin whose icon_src() method returns a URL
containing an hyphen, the hyphen is escaped by django escapejs resulting
in a incorrect URL
"""
ex = Example1(
char_1='one',
char_2='two',
char_3='tree',
char_4='four'
)
ex.save()
ph1 = ex.placeholder
###
# add the test plugin
###
test_plugin = add_plugin(ph1, u"EmptyPlugin", u"en")
test_plugin.save()
endpoint = self.get_change_plugin_uri(test_plugin, container=Example1)
response = self.client.post(endpoint, {})
self.assertContains(response, "CMS.API.Helpers.onPluginSave")
@override_settings(CMS_PERMISSION=False)
def test_nested_plugin_escapejs_page(self):
"""
Sibling test of the above, on a page.
#1366 does not apply to placeholder defined in a page
"""
page = create_page('page', 'col_two.html', 'en')
ph1 = page.placeholders.get(slot='col_left')
###
# add the test plugin
###
test_plugin = add_plugin(ph1, u"EmptyPlugin", u"en")
test_plugin.save()
endpoint = self.get_change_plugin_uri(test_plugin)
response = self.client.post(endpoint, {})
self.assertContains(response, "CMS.API.Helpers.onPluginSave")
def test_placeholder_scanning_fail(self):
self.assertRaises(TemplateSyntaxError, get_placeholders, 'placeholder_tests/test_eleven.html')
def test_placeholder_tag(self):
request = self.get_request('/', language=settings.LANGUAGES[0][0])
template = "{% load cms_tags %}{% render_placeholder placeholder %}"
output = self.render_template_obj(template, {}, request)
self.assertEqual(output, "")
placeholder = Placeholder.objects.create(slot="test")
output = self.render_template_obj(template, {'placeholder': placeholder}, request)
self.assertEqual(output, "")
self.assertEqual(placeholder.get_plugins().count(), 0)
add_plugin(placeholder, "TextPlugin", settings.LANGUAGES[0][0], body="test")
self.assertEqual(placeholder.get_plugins().count(), 1)
placeholder = self.reload(placeholder)
output = self.render_template_obj(template, {'placeholder': placeholder}, request)
self.assertEqual(output, "test")
def test_placeholder_tag_language(self):
template = "{% load cms_tags %}{% render_placeholder placeholder language language %}"
placeholder = Placeholder.objects.create(slot="test")
add_plugin(placeholder, "TextPlugin", 'en', body="English")
add_plugin(placeholder, "TextPlugin", 'de', body="Deutsch")
request = self.get_request('/')
output = self.render_template_obj(template, {'placeholder': placeholder, 'language': 'en'}, request)
self.assertEqual(output.strip(), "English")
del placeholder._plugins_cache
output = self.render_template_obj(template, {'placeholder': placeholder, 'language': 'de'}, request)
self.assertEqual(output.strip(), "Deutsch")
def test_get_placeholder_conf(self):
TEST_CONF = {
'main': {
'name': 'main content',
'plugins': ['TextPlugin', 'LinkPlugin'],
'default_plugins': [
{
'plugin_type': 'TextPlugin',
'values': {
'body': '<p>Some default text</p>'
},
},
],
},
'layout/home.html main': {
'name': u'main content with FilerImagePlugin and limit',
'plugins': ['TextPlugin', 'FilerImagePlugin', 'LinkPlugin'],
'inherit': 'main',
'limits': {'global': 1},
},
'layout/other.html main': {
'name': u'main content with FilerImagePlugin and no limit',
'inherit': 'layout/home.html main',
'limits': {},
'excluded_plugins': ['LinkPlugin']
},
None: {
'name': u'All',
'plugins': ['FilerImagePlugin', 'LinkPlugin'],
'limits': {},
},
}
with self.settings(CMS_PLACEHOLDER_CONF=TEST_CONF):
# test no inheritance
returned = get_placeholder_conf('plugins', 'main')
self.assertEqual(returned, TEST_CONF['main']['plugins'])
# test no inherited value with inheritance enabled
returned = get_placeholder_conf('plugins', 'main', 'layout/home.html')
self.assertEqual(returned, TEST_CONF['layout/home.html main']['plugins'])
# test direct inherited value
returned = get_placeholder_conf('plugins', 'main', 'layout/other.html')
self.assertEqual(returned, TEST_CONF['layout/home.html main']['plugins'])
# test excluded_plugins key
returned = get_placeholder_conf('excluded_plugins', 'main', 'layout/other.html')
self.assertEqual(returned, TEST_CONF['layout/other.html main']['excluded_plugins'])
# test grandparent inherited value
returned = get_placeholder_conf('default_plugins', 'main', 'layout/other.html')
self.assertEqual(returned, TEST_CONF['main']['default_plugins'])
# test generic configuration
returned = get_placeholder_conf('plugins', 'something')
self.assertEqual(returned, TEST_CONF[None]['plugins'])
def test_placeholder_context_leaking(self):
TEST_CONF = {'test': {'extra_context': {'extra_width': 10}}}
ph = Placeholder.objects.create(slot='test')
content_renderer = self.get_content_renderer()
context = SekizaiContext()
context['request'] = content_renderer.request
with self.settings(CMS_PLACEHOLDER_CONF=TEST_CONF):
_render_placeholder(ph, context)
self.assertFalse('extra_width' in context)
ph.render(context, None)
self.assertFalse('extra_width' in context)
def test_placeholder_scanning_nested_super(self):
placeholders = get_placeholders('placeholder_tests/nested_super_level1.html')
self.assertEqual(sorted(placeholders), sorted([u'level1', u'level2', u'level3', u'level4']))
def test_placeholder_field_no_related_name(self):
self.assertRaises(ValueError, PlaceholderField, 'placeholder', related_name='+')
def test_placeholder_field_db_table(self):
"""
Test for leaking Model._meta.db_table monkeypatching on SQLite (#3891).
"""
example = Category.objects.create(
name='category',
parent=None, depth=1,
)
self.assertEqual(example.description._get_attached_fields()[0].model, Category)
self.assertEqual(len(example.description._get_attached_fields()), 1)
def test_placeholder_field_valid_slotname(self):
self.assertRaises(ImproperlyConfigured, PlaceholderField, 10)
def test_placeholder_field_dynamic_slot_generation(self):
instance = DynamicPlaceholderSlotExample.objects.create(char_1='slot1', char_2='slot2')
self.assertEqual(instance.char_1, instance.placeholder_1.slot)
self.assertEqual(instance.char_2, instance.placeholder_2.slot)
def test_placeholder_field_dynamic_slot_update(self):
instance = DynamicPlaceholderSlotExample.objects.create(char_1='slot1', char_2='slot2')
# Plugin counts
old_placeholder_1_plugin_count = len(instance.placeholder_1.get_plugins())
old_placeholder_2_plugin_count = len(instance.placeholder_2.get_plugins())
# Switch around the slot names
instance.char_1, instance.char_2 = instance.char_2, instance.char_1
# Store the ids before save, to test that a new placeholder is NOT created.
placeholder_1_id = instance.placeholder_1.pk
placeholder_2_id = instance.placeholder_2.pk
# Save instance
instance.save()
current_placeholder_1_plugin_count = len(instance.placeholder_1.get_plugins())
current_placeholder_2_plugin_count = len(instance.placeholder_2.get_plugins())
# Now test that the placeholder slots have changed
self.assertEqual(instance.char_2, 'slot1')
self.assertEqual(instance.char_1, 'slot2')
# Test that a new placeholder was never created
self.assertEqual(instance.placeholder_1.pk, placeholder_1_id)
self.assertEqual(instance.placeholder_2.pk, placeholder_2_id)
# And test the plugin counts remain the same
self.assertEqual(old_placeholder_1_plugin_count, current_placeholder_1_plugin_count)
self.assertEqual(old_placeholder_2_plugin_count, current_placeholder_2_plugin_count)
def test_plugins_language_fallback(self):
""" Tests language_fallback placeholder configuration """
page_en = create_page('page_en', 'col_two.html', 'en')
title_de = create_title("de", "page_de", page_en)
placeholder_en = page_en.placeholders.get(slot='col_left')
placeholder_de = title_de.page.placeholders.get(slot='col_left')
add_plugin(placeholder_en, TextPlugin, 'en', body='en body')
context_en = SekizaiContext()
context_en['request'] = self.get_request(language="en", page=page_en)
context_de = SekizaiContext()
context_de['request'] = self.get_request(language="de", page=page_en)
# First test the default (fallback) behavior)
## English page should have the text plugin
content_en = _render_placeholder(placeholder_en, context_en)
self.assertRegexpMatches(content_en, "^en body$")
## Deutsch page have text due to fallback
content_de = _render_placeholder(placeholder_de, context_de)
self.assertRegexpMatches(content_de, "^en body$")
self.assertEqual(len(content_de), 7)
conf = {
'col_left': {
'language_fallback': False,
},
}
# configure non fallback
with self.settings(CMS_PLACEHOLDER_CONF=conf):
## Deutsch page should have no text
del(placeholder_de._plugins_cache)
cache.clear()
content_de = _render_placeholder(placeholder_de, context_de)
## Deutsch page should inherit english content
self.assertNotRegex(content_de, "^en body$")
context_de2 = SekizaiContext()
request = self.get_request(language="de", page=page_en)
request.user = self.get_superuser()
request.toolbar = CMSToolbar(request)
request.toolbar.edit_mode = True
context_de2['request'] = request
del(placeholder_de._plugins_cache)
cache.clear()
content_de2 = _render_placeholder(placeholder_de, context_de2)
self.assertFalse("en body" in content_de2)
# remove the cached plugins instances
del(placeholder_de._plugins_cache)
cache.clear()
# Then we add a plugin to check for proper rendering
add_plugin(placeholder_de, TextPlugin, 'de', body='de body')
content_de = _render_placeholder(placeholder_de, context_de)
self.assertRegexpMatches(content_de, "^de body$")
def test_nested_plugins_language_fallback(self):
""" Tests language_fallback placeholder configuration for nested plugins"""
page_en = create_page('page_en', 'col_two.html', 'en')
title_de = create_title("de", "page_de", page_en)
placeholder_en = page_en.placeholders.get(slot='col_left')
placeholder_de = title_de.page.placeholders.get(slot='col_left')
link_en = add_plugin(placeholder_en, LinkPlugin, 'en', name='en name', url='http://example.com/en')
add_plugin(placeholder_en, TextPlugin, 'en', target=link_en, body='en body')
context_en = SekizaiContext()
context_en['request'] = self.get_request(language="en", page=page_en)
context_de = SekizaiContext()
context_de['request'] = self.get_request(language="de", page=page_en)
conf = {
'col_left': {
'language_fallback': True,
},
}
with self.settings(CMS_PLACEHOLDER_CONF=conf):
content_de = _render_placeholder(placeholder_de, context_de)
self.assertRegexpMatches(content_de, "<a href=\"http://example.com/en\"")
self.assertRegexpMatches(content_de, "en body")
context_de2 = SekizaiContext()
request = self.get_request(language="de", page=page_en)
request.user = self.get_superuser()
request.toolbar = CMSToolbar(request)
request.toolbar.edit_mode = True
context_de2['request'] = request
del(placeholder_de._plugins_cache)
cache.clear()
content_de2 = _render_placeholder(placeholder_de, context_de2)
self.assertFalse("en body" in content_de2)
# remove the cached plugins instances
del(placeholder_de._plugins_cache)
cache.clear()
# Then we add a plugin to check for proper rendering
link_de = add_plugin(placeholder_en, LinkPlugin, 'de', name='de name', url='http://example.com/de')
add_plugin(placeholder_en, TextPlugin, 'de', target=link_de, body='de body')
content_de = _render_placeholder(placeholder_de, context_de)
self.assertRegexpMatches(content_de, "<a href=\"http://example.com/de\"")
self.assertRegexpMatches(content_de, "de body")
def test_plugins_non_default_language_fallback(self):
""" Tests language_fallback placeholder configuration """
page_en = create_page('page_en', 'col_two.html', 'en')
create_title("de", "page_de", page_en)
placeholder_en = page_en.placeholders.get(slot='col_left')
placeholder_de = page_en.placeholders.get(slot='col_left')
add_plugin(placeholder_de, TextPlugin, 'de', body='de body')
context_en = SekizaiContext()
context_en['request'] = self.get_request(language="en", page=page_en)
context_de = SekizaiContext()
context_de['request'] = self.get_request(language="de", page=page_en)
# First test the default (fallback) behavior)
## Deutsch page should have the text plugin
content_de = _render_placeholder(placeholder_en, context_de)
self.assertRegexpMatches(content_de, "^de body$")
del(placeholder_en._plugins_cache)
cache.clear()
## English page should have no text
content_en = _render_placeholder(placeholder_en, context_en)
self.assertRegexpMatches(content_en, "^de body$")
self.assertEqual(len(content_en), 7)
del(placeholder_en._plugins_cache)
cache.clear()
conf = {
'col_left': {
'language_fallback': False,
},
}
# configure non-fallback
with self.settings(CMS_PLACEHOLDER_CONF=conf):
## English page should have deutsch text
content_en = _render_placeholder(placeholder_en, context_en)
self.assertNotRegex(content_en, "^de body$")
# remove the cached plugins instances
del(placeholder_en._plugins_cache)
cache.clear()
# Then we add a plugin to check for proper rendering
add_plugin(placeholder_en, TextPlugin, 'en', body='en body')
content_en = _render_placeholder(placeholder_en, context_en)
self.assertRegexpMatches(content_en, "^en body$")
def test_plugins_discarded_with_language_fallback(self):
"""
Tests side effect of language fallback: if fallback enabled placeholder
existed, it discards all other existing plugins
"""
page_en = create_page('page_en', 'col_two.html', 'en')
create_title("de", "page_de", page_en)
placeholder_sidebar_en = page_en.placeholders.get(slot='col_sidebar')
placeholder_en = page_en.placeholders.get(slot='col_left')
add_plugin(placeholder_sidebar_en, TextPlugin, 'en', body='en body')
context_en = SekizaiContext()
context_en['request'] = self.get_request(language="en", page=page_en)
conf = {
'col_left': {
'language_fallback': True,
},
}
with self.settings(CMS_PLACEHOLDER_CONF=conf):
# call assign plugins first, as this is what is done in real cms life
# for all placeholders in a page at once
assign_plugins(context_en['request'],
[placeholder_sidebar_en, placeholder_en], 'col_two.html')
# if the normal, non fallback enabled placeholder still has content
content_en = _render_placeholder(placeholder_sidebar_en, context_en)
self.assertRegexpMatches(content_en, "^en body$")
# remove the cached plugins instances
del(placeholder_sidebar_en._plugins_cache)
cache.clear()
def test_plugins_prepopulate(self):
""" Tests prepopulate placeholder configuration """
conf = {
'col_left': {
'default_plugins' : [
{
'plugin_type':'TextPlugin',
'values':{'body':'<p>en default body 1</p>'},
},
{
'plugin_type':'TextPlugin',
'values':{'body':'<p>en default body 2</p>'},
},
]
},
}
with self.settings(CMS_PLACEHOLDER_CONF=conf):
page = create_page('page_en', 'col_two.html', 'en')
placeholder = page.placeholders.get(slot='col_left')
context = SekizaiContext()
context['request'] = self.get_request(language="en", page=page)
# Our page should have "en default body 1" AND "en default body 2"
content = _render_placeholder(placeholder, context)
self.assertRegexpMatches(content, "^<p>en default body 1</p>\s*<p>en default body 2</p>$")
def test_plugins_children_prepopulate(self):
"""
Validate a default textplugin with a nested default link plugin
"""
conf = {
'col_left': {
'default_plugins': [
{
'plugin_type': 'TextPlugin',
'values': {
'body': '<p>body %(_tag_child_1)s and %(_tag_child_2)s</p>'
},
'children': [
{
'plugin_type': 'LinkPlugin',
'values': {
'name': 'django',
'url': 'https://www.djangoproject.com/'
},
},
{
'plugin_type': 'LinkPlugin',
'values': {
'name': 'django-cms',
'url': 'https://www.django-cms.org'
},
},
]
},
]
},
}
with self.settings(CMS_PLACEHOLDER_CONF=conf):
page = create_page('page_en', 'col_two.html', 'en')
placeholder = page.placeholders.get(slot='col_left')
context = SekizaiContext()
context['request'] = self.get_request(language="en", page=page)
_render_placeholder(placeholder, context)
plugins = placeholder.get_plugins_list()
self.assertEqual(len(plugins), 3)
self.assertEqual(plugins[0].plugin_type, 'TextPlugin')
self.assertEqual(plugins[1].plugin_type, 'LinkPlugin')
self.assertEqual(plugins[2].plugin_type, 'LinkPlugin')
self.assertTrue(plugins[1].parent == plugins[2].parent and plugins[1].parent == plugins[0])
def test_placeholder_pk_thousands_format(self):
page = create_page("page", "nav_playground.html", "en", published=True)
for placeholder in page.placeholders.all():
page.placeholders.remove(placeholder)
placeholder.pk += 1000
placeholder.save()
page.placeholders.add(placeholder)
page.reload()
for placeholder in page.placeholders.all():
add_plugin(placeholder, "TextPlugin", "en", body="body")
with self.settings(USE_THOUSAND_SEPARATOR=True, USE_L10N=True):
# Superuser
user = self.get_superuser()
self.client.login(username=getattr(user, get_user_model().USERNAME_FIELD),
password=getattr(user, get_user_model().USERNAME_FIELD))
response = self.client.get("/en/?%s" % get_cms_setting('CMS_TOOLBAR_URL__EDIT_ON'))
for placeholder in page.placeholders.all():
self.assertContains(
response, '"placeholder_id": "%s"' % placeholder.pk)
self.assertNotContains(
response, '"placeholder_id": "%s"' % format(
placeholder.pk, ".", grouping=3, thousand_sep=","))
self.assertNotContains(
response, '"plugin_id": "%s"' % format(
placeholder.pk, ".", grouping=3, thousand_sep=","))
self.assertNotContains(
response, '"clipboard": "%s"' % format(
response.context['request'].toolbar.clipboard.pk, ".",
grouping=3, thousand_sep=","))
def test_placeholder_languages_model(self):
"""
Checks the retrieval of filled languages for a placeholder in a django
model
"""
avail_langs = set([u'en', u'de', u'fr'])
# Setup instance
ex = Example1(
char_1='one',
char_2='two',
char_3='tree',
char_4='four'
)
ex.save()
###
# add the test plugin
###
for lang in avail_langs:
add_plugin(ex.placeholder, u"EmptyPlugin", lang)
# reload instance from database
ex = Example1.objects.get(pk=ex.pk)
#get languages
langs = [lang['code'] for lang in ex.placeholder.get_filled_languages()]
self.assertEqual(avail_langs, set(langs))
def test_placeholder_languages_page(self):
"""
Checks the retrieval of filled languages for a placeholder in a django
model
"""
avail_langs = set([u'en', u'de', u'fr'])
# Setup instances
page = create_page('test page', 'col_two.html', u'en')
for lang in avail_langs:
if lang != u'en':
create_title(lang, 'test page %s' % lang, page)
placeholder = page.placeholders.get(slot='col_sidebar')
###
# add the test plugin
###
for lang in avail_langs:
add_plugin(placeholder, u"EmptyPlugin", lang)
# reload placeholder from database
placeholder = page.placeholders.get(slot='col_sidebar')
# get languages
langs = [lang['code'] for lang in placeholder.get_filled_languages()]
self.assertEqual(avail_langs, set(langs))
@override_settings(TEMPLATE_LOADERS=(
('django.template.loaders.cached.Loader', (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)),))
def test_cached_template_not_corrupted_by_placeholder_scan(self):
"""
This is the test for the low-level code that caused the bug:
the placeholder scan corrupts the nodelist of the extends node,
which is retained by the cached template loader, and future
renders of that template will render the super block twice.
"""
nodelist = _get_nodelist(get_template("placeholder_tests/test_super_extends_2.html"))
self.assertNotIn('one',
nodelist[0].blocks.keys(),
"test_super_extends_1.html contains a block called 'one', "
"but _2.html does not.")
get_placeholders("placeholder_tests/test_super_extends_2.html")
nodelist = _get_nodelist(get_template("placeholder_tests/test_super_extends_2.html"))
self.assertNotIn('one',
nodelist[0].blocks.keys(),
"test_super_extends_1.html still should not contain a block "
"called 'one' after rescanning placeholders.")
@override_settings(TEMPLATE_LOADERS=(
('django.template.loaders.cached.Loader', (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)),))
def test_super_extends_not_corrupted_by_placeholder_scan(self):
"""
This is the test for the symptom of the bug: because the block
context now contains two copies of the inherited block, that block
will be executed twice, and if it adds content to {{block.super}},
that content will be added twice.
"""
template = get_template("placeholder_tests/test_super_extends_2.html")
output = template.render({})
self.assertEqual(['Whee'], [o for o in output.split('\n')
if 'Whee' in o])
get_placeholders("placeholder_tests/test_super_extends_2.html")
template = get_template("placeholder_tests/test_super_extends_2.html")
output = template.render({})
self.assertEqual(['Whee'], [o for o in output.split('\n')
if 'Whee' in o])
class PlaceholderActionTests(FakemlngFixtures, CMSTestCase):
def test_placeholder_no_action(self):
actions = PlaceholderNoAction()
self.assertEqual(actions.get_copy_languages(), [])
self.assertFalse(actions.copy())
def test_mlng_placeholder_actions_get_copy_languages(self):
actions = MLNGPlaceholderActions()
fr = Translations.objects.get(language_code='fr')
de = Translations.objects.get(language_code='de')
en = Translations.objects.get(language_code='en')
fieldname = 'placeholder'
fr_copy_languages = actions.get_copy_languages(
fr.placeholder, Translations, fieldname
)
de_copy_languages = actions.get_copy_languages(
de.placeholder, Translations, fieldname
)
en_copy_languages = actions.get_copy_languages(
en.placeholder, Translations, fieldname
)
EN = ('en', 'English')
FR = ('fr', 'French')
self.assertEqual(set(fr_copy_languages), set([EN]))
self.assertEqual(set(de_copy_languages), set([EN, FR]))
self.assertEqual(set(en_copy_languages), set([FR]))
def test_mlng_placeholder_actions_copy(self):
actions = MLNGPlaceholderActions()
fr = Translations.objects.get(language_code='fr')
de = Translations.objects.get(language_code='de')
self.assertEqual(fr.placeholder.get_plugins().count(), 1)
self.assertEqual(de.placeholder.get_plugins().count(), 0)
new_plugins = actions.copy(de.placeholder, 'fr', 'placeholder', Translations, 'de')
self.assertEqual(len(new_plugins), 1)
de = self.reload(de)
fr = self.reload(fr)
self.assertEqual(fr.placeholder.get_plugins().count(), 1)
self.assertEqual(de.placeholder.get_plugins().count(), 1)
def test_mlng_placeholder_actions_empty_copy(self):
actions = MLNGPlaceholderActions()
fr = Translations.objects.get(language_code='fr')
de = Translations.objects.get(language_code='de')
self.assertEqual(fr.placeholder.get_plugins().count(), 1)
self.assertEqual(de.placeholder.get_plugins().count(), 0)
new_plugins = actions.copy(fr.placeholder, 'de', 'placeholder', Translations, 'fr')
self.assertEqual(len(new_plugins), 0)
de = self.reload(de)
fr = self.reload(fr)
self.assertEqual(fr.placeholder.get_plugins().count(), 1)
self.assertEqual(de.placeholder.get_plugins().count(), 0)
def test_mlng_placeholder_actions_no_placeholder(self):
actions = MLNGPlaceholderActions()
Translations.objects.filter(language_code='nl').update(placeholder=None)
de = Translations.objects.get(language_code='de')
nl = Translations.objects.get(language_code='nl')
self.assertEqual(nl.placeholder, None)
self.assertEqual(de.placeholder.get_plugins().count(), 0)
okay = actions.copy(de.placeholder, 'nl', 'placeholder', Translations, 'de')
self.assertEqual(okay, False)
de = self.reload(de)
nl = self.reload(nl)
nl = Translations.objects.get(language_code='nl')
de = Translations.objects.get(language_code='de')
@override_settings(CMS_PERMISSION=False)
class PlaceholderModelTests(ToolbarTestBase, CMSTestCase):
def get_mock_user(self, superuser):
return AttributeObject(
is_superuser=superuser,
has_perm=lambda string: False,
)
def get_mock_request(self, superuser=True):
return AttributeObject(
superuser=superuser,
user=self.get_mock_user(superuser)
)
def test_check_placeholder_permissions_ok_for_superuser(self):
ph = Placeholder.objects.create(slot='test', default_width=300)
user = self.get_mock_user(True)
result = ph.has_change_permission(user)
self.assertTrue(result)
def test_check_placeholder_permissions_nok_for_user(self):
ph = Placeholder.objects.create(slot='test', default_width=300)
user = self.get_mock_user(False)
result = ph.has_change_permission(user)
self.assertFalse(result)
def test_check_unicode_rendering(self):
ph = Placeholder.objects.create(slot='test', default_width=300)
result = force_text(ph)
self.assertEqual(result, u'test')
def test_excercise_get_attached_model(self):
ph = Placeholder.objects.create(slot='test', default_width=300)
result = ph._get_attached_model()
self.assertEqual(result, None) # Simple PH - no model
def test_excercise_get_attached_field_name(self):
ph = Placeholder.objects.create(slot='test', default_width=300)
result = ph._get_attached_field_name()
self.assertEqual(result, None) # Simple PH - no field name
def test_excercise_get_attached_models_notplugins(self):
ex = Example1(
char_1='one',
char_2='two',
char_3='tree',
char_4='four'
)
ex.save()
ph = ex.placeholder
result = list(ph._get_attached_models())
self.assertEqual(result, [Example1]) # Simple PH - Example1 model
add_plugin(ph, TextPlugin, 'en', body='en body')
result = list(ph._get_attached_models())
self.assertEqual(result, [Example1]) # Simple PH still one Example1 model
def test_excercise_get_attached_fields_notplugins(self):
ex = Example1(
char_1='one',
char_2='two',
char_3='tree',
char_4='four',
)
ex.save()
ph = ex.placeholder
result = [f.name for f in list(ph._get_attached_fields())]
self.assertEqual(result, ['placeholder']) # Simple PH - placeholder field name
add_plugin(ph, TextPlugin, 'en', body='en body')
result = [f.name for f in list(ph._get_attached_fields())]
self.assertEqual(result, ['placeholder']) # Simple PH - still one placeholder field name
class PlaceholderConfTests(TestCase):
def test_get_all_plugins_single_page(self):
page = create_page('page', 'col_two.html', 'en')
placeholder = page.placeholders.get(slot='col_left')
conf = {
'col_two': {
'plugins': ['TextPlugin', 'LinkPlugin'],
},
'col_two.html col_left': {
'plugins': ['LinkPlugin'],
},
}
with self.settings(CMS_PLACEHOLDER_CONF=conf):
plugins = plugin_pool.get_all_plugins(placeholder, page)
self.assertEqual(len(plugins), 1, plugins)
self.assertEqual(plugins[0], LinkPlugin)
def test_get_all_plugins_inherit(self):
parent = create_page('parent', 'col_two.html', 'en')
page = create_page('page', constants.TEMPLATE_INHERITANCE_MAGIC, 'en', parent=parent)
placeholder = page.placeholders.get(slot='col_left')
conf = {
'col_two': {
'plugins': ['TextPlugin', 'LinkPlugin'],
},
'col_two.html col_left': {
'plugins': ['LinkPlugin'],
},
}
with self.settings(CMS_PLACEHOLDER_CONF=conf):
plugins = plugin_pool.get_all_plugins(placeholder, page)
self.assertEqual(len(plugins), 1, plugins)
self.assertEqual(plugins[0], LinkPlugin)
class PlaceholderI18NTest(CMSTestCase):
def _get_url(self, app, model, pk):
if DJANGO_1_8:
return '/de/admin/%s/%s/%d/' % (app, model, pk)
else:
return '/de/admin/%s/%s/%d/change/' % (app, model, pk)
def _testuser(self):
User = get_user_model()
u = User(is_staff=True, is_active=True, is_superuser=True)
setattr(u, u.USERNAME_FIELD, "test")
u.set_password("test")
u.save()
return u
def test_hvad_tabs(self):
ex = MultilingualExample1.objects.language('en').create(char_1='one', char_2='two')
self._testuser()
self.client.login(username='test', password='test')
response = self.client.get(self._get_url('placeholderapp', 'multilingualexample1', ex.pk))
self.assertContains(response, '<input type="hidden" class="language_button selected" name="de" />')
def test_no_tabs(self):
ex = Example1.objects.create(
char_1='one',
char_2='two',
char_3='one',
char_4='two',
)
self._testuser()
self.client.login(username='test', password='test')
response = self.client.get(self._get_url('placeholderapp', 'example1', ex.pk))
self.assertNotContains(response, '<input type="hidden" class="language_button selected" name="de" />')
def test_placeholder_tabs(self):
ex = TwoPlaceholderExample.objects.create(
char_1='one',
char_2='two',
char_3='one',
char_4='two',
)
self._testuser()
self.client.login(username='test', password='test')
response = self.client.get(self._get_url('placeholderapp', 'twoplaceholderexample', ex.pk))
self.assertNotContains(response,
"""<input type="button" onclick="trigger_lang_button(this,'./?language=en');" class="language_button selected" id="debutton" name="en" value="English">""")
| 43.555871 | 186 | 0.631112 |
from django.conf import settings
from django.contrib.auth import get_user_model
from django.core.cache import cache
from django.core.exceptions import ImproperlyConfigured
from django.template import TemplateSyntaxError, Template
from django.template.loader import get_template
from django.test import TestCase
from django.test.utils import override_settings
from django.utils.encoding import force_text
from django.utils.numberformat import format
from djangocms_link.cms_plugins import LinkPlugin
from djangocms_text_ckeditor.cms_plugins import TextPlugin
from sekizai.context import SekizaiContext
from cms import constants
from cms.api import add_plugin, create_page, create_title
from cms.exceptions import DuplicatePlaceholderWarning
from cms.models.fields import PlaceholderField
from cms.models.placeholdermodel import Placeholder
from cms.models.pluginmodel import CMSPlugin
from cms.plugin_pool import plugin_pool
from cms.tests.test_toolbar import ToolbarTestBase
from cms.test_utils.fixtures.fakemlng import FakemlngFixtures
from cms.test_utils.project.fakemlng.models import Translations
from cms.test_utils.project.placeholderapp.models import (
DynamicPlaceholderSlotExample,
Example1,
MultilingualExample1,
TwoPlaceholderExample,
)
from cms.test_utils.project.sampleapp.models import Category
from cms.test_utils.testcases import CMSTestCase
from cms.test_utils.util.mock import AttributeObject
from cms.toolbar.toolbar import CMSToolbar
from cms.toolbar.utils import get_toolbar_from_request
from cms.utils.compat import DJANGO_1_8
from cms.utils.compat.tests import UnittestCompatMixin
from cms.utils.conf import get_cms_setting
from cms.utils.placeholder import (PlaceholderNoAction, MLNGPlaceholderActions,
get_placeholder_conf, get_placeholders, _get_nodelist,
_scan_placeholders)
from cms.utils.plugins import assign_plugins
from cms.utils.urlutils import admin_reverse
def _render_placeholder(placeholder, context, **kwargs):
request = context['request']
toolbar = get_toolbar_from_request(request)
content_renderer = toolbar.content_renderer
context['cms_content_renderer'] = content_renderer
return content_renderer.render_placeholder(placeholder, context, **kwargs)
class PlaceholderTestCase(CMSTestCase, UnittestCompatMixin):
def setUp(self):
u = self._create_user("test", True, True)
self._login_context = self.login_user_context(u)
self._login_context.__enter__()
def tearDown(self):
self._login_context.__exit__(None, None, None)
def test_placeholder_scanning_extend(self):
placeholders = get_placeholders('placeholder_tests/test_one.html')
self.assertEqual(sorted(placeholders), sorted([u'new_one', u'two', u'three']))
def test_placeholder_scanning_sekizai_extend(self):
placeholders = get_placeholders('placeholder_tests/test_one_sekizai.html')
self.assertEqual(sorted(placeholders), sorted([u'new_one', u'two', u'three']))
def test_placeholder_scanning_include(self):
placeholders = get_placeholders('placeholder_tests/test_two.html')
self.assertEqual(sorted(placeholders), sorted([u'child', u'three']))
def test_placeholder_scanning_double_extend(self):
placeholders = get_placeholders('placeholder_tests/test_three.html')
self.assertEqual(sorted(placeholders), sorted([u'new_one', u'two', u'new_three']))
def test_placeholder_scanning_sekizai_double_extend(self):
placeholders = get_placeholders('placeholder_tests/test_three_sekizai.html')
self.assertEqual(sorted(placeholders), sorted([u'new_one', u'two', u'new_three']))
def test_placeholder_scanning_complex(self):
placeholders = get_placeholders('placeholder_tests/test_four.html')
self.assertEqual(sorted(placeholders), sorted([u'new_one', u'child', u'four']))
def test_placeholder_scanning_super(self):
placeholders = get_placeholders('placeholder_tests/test_five.html')
self.assertEqual(sorted(placeholders), sorted([u'one', u'extra_one', u'two', u'three']))
def test_placeholder_scanning_nested(self):
placeholders = get_placeholders('placeholder_tests/test_six.html')
self.assertEqual(sorted(placeholders), sorted([u'new_one', u'new_two', u'new_three']))
def test_placeholder_scanning_duplicate(self):
placeholders = self.assertWarns(DuplicatePlaceholderWarning,
'Duplicate {% placeholder "one" %} in template placeholder_tests/test_seven.html.',
get_placeholders, 'placeholder_tests/test_seven.html')
self.assertEqual(sorted(placeholders), sorted([u'one']))
def test_placeholder_scanning_extend_outside_block(self):
placeholders = get_placeholders('placeholder_tests/outside.html')
self.assertEqual(sorted(placeholders), sorted([u'new_one', u'two', u'base_outside']))
def test_placeholder_scanning_sekizai_extend_outside_block(self):
placeholders = get_placeholders('placeholder_tests/outside_sekizai.html')
self.assertEqual(sorted(placeholders), sorted([u'new_one', u'two', u'base_outside']))
def test_placeholder_scanning_extend_outside_block_nested(self):
placeholders = get_placeholders('placeholder_tests/outside_nested.html')
self.assertEqual(sorted(placeholders), sorted([u'new_one', u'two', u'base_outside']))
def test_placeholder_scanning_sekizai_extend_outside_block_nested(self):
placeholders = get_placeholders('placeholder_tests/outside_nested_sekizai.html')
self.assertEqual(sorted(placeholders), sorted([u'new_one', u'two', u'base_outside']))
def test_placeholder_scanning_var(self):
t = Template('{%load cms_tags %}{% include name %}{% placeholder "a_placeholder" %}')
phs = _scan_placeholders(t.nodelist)
self.assertListEqual(sorted(phs), sorted([u'a_placeholder']))
t = Template('{% include "placeholder_tests/outside_nested_sekizai.html" %}')
phs = _scan_placeholders(t.nodelist)
self.assertListEqual(sorted(phs), sorted([u'two', u'new_one', u'base_outside']))
def test_fieldsets_requests(self):
response = self.client.get(admin_reverse('placeholderapp_example1_add'))
self.assertEqual(response.status_code, 200)
response = self.client.get(admin_reverse('placeholderapp_twoplaceholderexample_add'))
self.assertEqual(response.status_code, 200)
def test_page_only_plugins(self):
ex = Example1(
char_1='one',
char_2='two',
char_3='tree',
char_4='four'
)
ex.save()
response = self.client.get(admin_reverse('placeholderapp_example1_change', args=(ex.pk,)))
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, 'InheritPagePlaceholderPlugin')
def test_inter_placeholder_plugin_move(self):
ex = TwoPlaceholderExample(
char_1='one',
char_2='two',
char_3='tree',
char_4='four'
)
ex.save()
ph1 = ex.placeholder_1
ph2 = ex.placeholder_2
ph1_pl1 = add_plugin(ph1, TextPlugin, 'en', body='ph1 plugin1').cmsplugin_ptr
ph1_pl2 = add_plugin(ph1, TextPlugin, 'en', body='ph1 plugin2').cmsplugin_ptr
ph1_pl3 = add_plugin(ph1, TextPlugin, 'en', body='ph1 plugin3').cmsplugin_ptr
ph2_pl1 = add_plugin(ph2, TextPlugin, 'en', body='ph2 plugin1').cmsplugin_ptr
ph2_pl2 = add_plugin(ph2, TextPlugin, 'en', body='ph2 plugin2').cmsplugin_ptr
ph2_pl3 = add_plugin(ph2, TextPlugin, 'en', body='ph2 plugin3').cmsplugin_ptr
data = {
'placeholder_id': str(ph2.pk),
'plugin_id': str(ph1_pl2.pk),
'plugin_order[]': [str(p.pk) for p in [ph2_pl3, ph2_pl1, ph2_pl2, ph1_pl2]]
}
endpoint = self.get_move_plugin_uri(ph1_pl2, container=TwoPlaceholderExample)
response = self.client.post(endpoint, data)
self.assertEqual(response.status_code, 200)
self.assertEqual([ph1_pl1, ph1_pl3], list(ph1.cmsplugin_set.order_by('position')))
self.assertEqual([ph2_pl3, ph2_pl1, ph2_pl2, ph1_pl2, ], list(ph2.cmsplugin_set.order_by('position')))
def test_placeholder_render_ghost_plugin(self):
page_en = create_page('page_en', 'col_two.html', 'en')
placeholder_en = page_en.placeholders.get(slot='col_left')
CMSPlugin.objects.create(
language='en',
plugin_type='LinkPlugin',
position=1,
placeholder=placeholder_en,
parent=None,
)
add_plugin(
placeholder_en,
"LinkPlugin",
"en",
name='name',
url='http://example.com/',
)
context_en = SekizaiContext()
context_en['request'] = self.get_request(language="en", page=page_en)
content_en = _render_placeholder(placeholder_en, context_en)
self.assertEqual(content_en.strip(), '<a href="http://example.com/" >name</a>')
def test_placeholder_render_ghost_plugin_with_child(self):
page_en = create_page('page_en', 'col_two.html', 'en')
placeholder_en = page_en.placeholders.get(slot='col_left')
plugin = CMSPlugin.objects.create(
language='en',
plugin_type='LinkPlugin',
position=1,
placeholder=placeholder_en,
parent=None,
)
add_plugin(
placeholder_en,
"LinkPlugin",
"en",
target=plugin,
name='invalid',
url='http://example.com/',
)
add_plugin(
placeholder_en,
"LinkPlugin",
"en",
name='valid',
url='http://example.com/',
)
context_en = SekizaiContext()
context_en['request'] = self.get_request(language="en", page=page_en)
content_en = _render_placeholder(placeholder_en, context_en)
self.assertEqual(content_en.strip(), '<a href="http://example.com/" >valid</a>')
@override_settings(CMS_PERMISSION=False)
def test_nested_plugin_escapejs(self):
ex = Example1(
char_1='one',
char_2='two',
char_3='tree',
char_4='four'
)
ex.save()
ph1 = ex.placeholder
test_plugin = add_plugin(ph1, u"EmptyPlugin", u"en")
test_plugin.save()
endpoint = self.get_change_plugin_uri(test_plugin, container=Example1)
response = self.client.post(endpoint, {})
self.assertContains(response, "CMS.API.Helpers.onPluginSave")
@override_settings(CMS_PERMISSION=False)
def test_nested_plugin_escapejs_page(self):
page = create_page('page', 'col_two.html', 'en')
ph1 = page.placeholders.get(slot='col_left')
test_plugin = add_plugin(ph1, u"EmptyPlugin", u"en")
test_plugin.save()
endpoint = self.get_change_plugin_uri(test_plugin)
response = self.client.post(endpoint, {})
self.assertContains(response, "CMS.API.Helpers.onPluginSave")
def test_placeholder_scanning_fail(self):
self.assertRaises(TemplateSyntaxError, get_placeholders, 'placeholder_tests/test_eleven.html')
def test_placeholder_tag(self):
request = self.get_request('/', language=settings.LANGUAGES[0][0])
template = "{% load cms_tags %}{% render_placeholder placeholder %}"
output = self.render_template_obj(template, {}, request)
self.assertEqual(output, "")
placeholder = Placeholder.objects.create(slot="test")
output = self.render_template_obj(template, {'placeholder': placeholder}, request)
self.assertEqual(output, "")
self.assertEqual(placeholder.get_plugins().count(), 0)
add_plugin(placeholder, "TextPlugin", settings.LANGUAGES[0][0], body="test")
self.assertEqual(placeholder.get_plugins().count(), 1)
placeholder = self.reload(placeholder)
output = self.render_template_obj(template, {'placeholder': placeholder}, request)
self.assertEqual(output, "test")
def test_placeholder_tag_language(self):
template = "{% load cms_tags %}{% render_placeholder placeholder language language %}"
placeholder = Placeholder.objects.create(slot="test")
add_plugin(placeholder, "TextPlugin", 'en', body="English")
add_plugin(placeholder, "TextPlugin", 'de', body="Deutsch")
request = self.get_request('/')
output = self.render_template_obj(template, {'placeholder': placeholder, 'language': 'en'}, request)
self.assertEqual(output.strip(), "English")
del placeholder._plugins_cache
output = self.render_template_obj(template, {'placeholder': placeholder, 'language': 'de'}, request)
self.assertEqual(output.strip(), "Deutsch")
def test_get_placeholder_conf(self):
TEST_CONF = {
'main': {
'name': 'main content',
'plugins': ['TextPlugin', 'LinkPlugin'],
'default_plugins': [
{
'plugin_type': 'TextPlugin',
'values': {
'body': '<p>Some default text</p>'
},
},
],
},
'layout/home.html main': {
'name': u'main content with FilerImagePlugin and limit',
'plugins': ['TextPlugin', 'FilerImagePlugin', 'LinkPlugin'],
'inherit': 'main',
'limits': {'global': 1},
},
'layout/other.html main': {
'name': u'main content with FilerImagePlugin and no limit',
'inherit': 'layout/home.html main',
'limits': {},
'excluded_plugins': ['LinkPlugin']
},
None: {
'name': u'All',
'plugins': ['FilerImagePlugin', 'LinkPlugin'],
'limits': {},
},
}
with self.settings(CMS_PLACEHOLDER_CONF=TEST_CONF):
returned = get_placeholder_conf('plugins', 'main')
self.assertEqual(returned, TEST_CONF['main']['plugins'])
returned = get_placeholder_conf('plugins', 'main', 'layout/home.html')
self.assertEqual(returned, TEST_CONF['layout/home.html main']['plugins'])
returned = get_placeholder_conf('plugins', 'main', 'layout/other.html')
self.assertEqual(returned, TEST_CONF['layout/home.html main']['plugins'])
returned = get_placeholder_conf('excluded_plugins', 'main', 'layout/other.html')
self.assertEqual(returned, TEST_CONF['layout/other.html main']['excluded_plugins'])
returned = get_placeholder_conf('default_plugins', 'main', 'layout/other.html')
self.assertEqual(returned, TEST_CONF['main']['default_plugins'])
returned = get_placeholder_conf('plugins', 'something')
self.assertEqual(returned, TEST_CONF[None]['plugins'])
def test_placeholder_context_leaking(self):
TEST_CONF = {'test': {'extra_context': {'extra_width': 10}}}
ph = Placeholder.objects.create(slot='test')
content_renderer = self.get_content_renderer()
context = SekizaiContext()
context['request'] = content_renderer.request
with self.settings(CMS_PLACEHOLDER_CONF=TEST_CONF):
_render_placeholder(ph, context)
self.assertFalse('extra_width' in context)
ph.render(context, None)
self.assertFalse('extra_width' in context)
def test_placeholder_scanning_nested_super(self):
placeholders = get_placeholders('placeholder_tests/nested_super_level1.html')
self.assertEqual(sorted(placeholders), sorted([u'level1', u'level2', u'level3', u'level4']))
def test_placeholder_field_no_related_name(self):
self.assertRaises(ValueError, PlaceholderField, 'placeholder', related_name='+')
def test_placeholder_field_db_table(self):
example = Category.objects.create(
name='category',
parent=None, depth=1,
)
self.assertEqual(example.description._get_attached_fields()[0].model, Category)
self.assertEqual(len(example.description._get_attached_fields()), 1)
def test_placeholder_field_valid_slotname(self):
self.assertRaises(ImproperlyConfigured, PlaceholderField, 10)
def test_placeholder_field_dynamic_slot_generation(self):
instance = DynamicPlaceholderSlotExample.objects.create(char_1='slot1', char_2='slot2')
self.assertEqual(instance.char_1, instance.placeholder_1.slot)
self.assertEqual(instance.char_2, instance.placeholder_2.slot)
def test_placeholder_field_dynamic_slot_update(self):
instance = DynamicPlaceholderSlotExample.objects.create(char_1='slot1', char_2='slot2')
old_placeholder_1_plugin_count = len(instance.placeholder_1.get_plugins())
old_placeholder_2_plugin_count = len(instance.placeholder_2.get_plugins())
instance.char_1, instance.char_2 = instance.char_2, instance.char_1
placeholder_1_id = instance.placeholder_1.pk
placeholder_2_id = instance.placeholder_2.pk
instance.save()
current_placeholder_1_plugin_count = len(instance.placeholder_1.get_plugins())
current_placeholder_2_plugin_count = len(instance.placeholder_2.get_plugins())
self.assertEqual(instance.char_2, 'slot1')
self.assertEqual(instance.char_1, 'slot2')
self.assertEqual(instance.placeholder_1.pk, placeholder_1_id)
self.assertEqual(instance.placeholder_2.pk, placeholder_2_id)
self.assertEqual(old_placeholder_1_plugin_count, current_placeholder_1_plugin_count)
self.assertEqual(old_placeholder_2_plugin_count, current_placeholder_2_plugin_count)
def test_plugins_language_fallback(self):
page_en = create_page('page_en', 'col_two.html', 'en')
title_de = create_title("de", "page_de", page_en)
placeholder_en = page_en.placeholders.get(slot='col_left')
placeholder_de = title_de.page.placeholders.get(slot='col_left')
add_plugin(placeholder_en, TextPlugin, 'en', body='en body')
context_en = SekizaiContext()
context_en['request'] = self.get_request(language="en", page=page_en)
context_de = SekizaiContext()
context_de['request'] = self.get_request(language="de", page=page_en)
placeholder_en, context_en)
self.assertRegexpMatches(content_en, "^en body$")
r(placeholder_de, context_de)
self.assertRegexpMatches(content_de, "^en body$")
self.assertEqual(len(content_de), 7)
conf = {
'col_left': {
'language_fallback': False,
},
}
with self.settings(CMS_PLACEHOLDER_CONF=conf):
lugins_cache)
cache.clear()
content_de = _render_placeholder(placeholder_de, context_de)
"^en body$")
context_de2 = SekizaiContext()
request = self.get_request(language="de", page=page_en)
request.user = self.get_superuser()
request.toolbar = CMSToolbar(request)
request.toolbar.edit_mode = True
context_de2['request'] = request
del(placeholder_de._plugins_cache)
cache.clear()
content_de2 = _render_placeholder(placeholder_de, context_de2)
self.assertFalse("en body" in content_de2)
del(placeholder_de._plugins_cache)
cache.clear()
add_plugin(placeholder_de, TextPlugin, 'de', body='de body')
content_de = _render_placeholder(placeholder_de, context_de)
self.assertRegexpMatches(content_de, "^de body$")
def test_nested_plugins_language_fallback(self):
page_en = create_page('page_en', 'col_two.html', 'en')
title_de = create_title("de", "page_de", page_en)
placeholder_en = page_en.placeholders.get(slot='col_left')
placeholder_de = title_de.page.placeholders.get(slot='col_left')
link_en = add_plugin(placeholder_en, LinkPlugin, 'en', name='en name', url='http://example.com/en')
add_plugin(placeholder_en, TextPlugin, 'en', target=link_en, body='en body')
context_en = SekizaiContext()
context_en['request'] = self.get_request(language="en", page=page_en)
context_de = SekizaiContext()
context_de['request'] = self.get_request(language="de", page=page_en)
conf = {
'col_left': {
'language_fallback': True,
},
}
with self.settings(CMS_PLACEHOLDER_CONF=conf):
content_de = _render_placeholder(placeholder_de, context_de)
self.assertRegexpMatches(content_de, "<a href=\"http://example.com/en\"")
self.assertRegexpMatches(content_de, "en body")
context_de2 = SekizaiContext()
request = self.get_request(language="de", page=page_en)
request.user = self.get_superuser()
request.toolbar = CMSToolbar(request)
request.toolbar.edit_mode = True
context_de2['request'] = request
del(placeholder_de._plugins_cache)
cache.clear()
content_de2 = _render_placeholder(placeholder_de, context_de2)
self.assertFalse("en body" in content_de2)
del(placeholder_de._plugins_cache)
cache.clear()
link_de = add_plugin(placeholder_en, LinkPlugin, 'de', name='de name', url='http://example.com/de')
add_plugin(placeholder_en, TextPlugin, 'de', target=link_de, body='de body')
content_de = _render_placeholder(placeholder_de, context_de)
self.assertRegexpMatches(content_de, "<a href=\"http://example.com/de\"")
self.assertRegexpMatches(content_de, "de body")
def test_plugins_non_default_language_fallback(self):
page_en = create_page('page_en', 'col_two.html', 'en')
create_title("de", "page_de", page_en)
placeholder_en = page_en.placeholders.get(slot='col_left')
placeholder_de = page_en.placeholders.get(slot='col_left')
add_plugin(placeholder_de, TextPlugin, 'de', body='de body')
context_en = SekizaiContext()
context_en['request'] = self.get_request(language="en", page=page_en)
context_de = SekizaiContext()
context_de['request'] = self.get_request(language="de", page=page_en)
placeholder_en, context_de)
self.assertRegexpMatches(content_de, "^de body$")
del(placeholder_en._plugins_cache)
cache.clear()
eholder(placeholder_en, context_en)
self.assertRegexpMatches(content_en, "^de body$")
self.assertEqual(len(content_en), 7)
del(placeholder_en._plugins_cache)
cache.clear()
conf = {
'col_left': {
'language_fallback': False,
},
}
with self.settings(CMS_PLACEHOLDER_CONF=conf):
holder(placeholder_en, context_en)
self.assertNotRegex(content_en, "^de body$")
del(placeholder_en._plugins_cache)
cache.clear()
add_plugin(placeholder_en, TextPlugin, 'en', body='en body')
content_en = _render_placeholder(placeholder_en, context_en)
self.assertRegexpMatches(content_en, "^en body$")
def test_plugins_discarded_with_language_fallback(self):
page_en = create_page('page_en', 'col_two.html', 'en')
create_title("de", "page_de", page_en)
placeholder_sidebar_en = page_en.placeholders.get(slot='col_sidebar')
placeholder_en = page_en.placeholders.get(slot='col_left')
add_plugin(placeholder_sidebar_en, TextPlugin, 'en', body='en body')
context_en = SekizaiContext()
context_en['request'] = self.get_request(language="en", page=page_en)
conf = {
'col_left': {
'language_fallback': True,
},
}
with self.settings(CMS_PLACEHOLDER_CONF=conf):
assign_plugins(context_en['request'],
[placeholder_sidebar_en, placeholder_en], 'col_two.html')
content_en = _render_placeholder(placeholder_sidebar_en, context_en)
self.assertRegexpMatches(content_en, "^en body$")
del(placeholder_sidebar_en._plugins_cache)
cache.clear()
def test_plugins_prepopulate(self):
conf = {
'col_left': {
'default_plugins' : [
{
'plugin_type':'TextPlugin',
'values':{'body':'<p>en default body 1</p>'},
},
{
'plugin_type':'TextPlugin',
'values':{'body':'<p>en default body 2</p>'},
},
]
},
}
with self.settings(CMS_PLACEHOLDER_CONF=conf):
page = create_page('page_en', 'col_two.html', 'en')
placeholder = page.placeholders.get(slot='col_left')
context = SekizaiContext()
context['request'] = self.get_request(language="en", page=page)
content = _render_placeholder(placeholder, context)
self.assertRegexpMatches(content, "^<p>en default body 1</p>\s*<p>en default body 2</p>$")
def test_plugins_children_prepopulate(self):
conf = {
'col_left': {
'default_plugins': [
{
'plugin_type': 'TextPlugin',
'values': {
'body': '<p>body %(_tag_child_1)s and %(_tag_child_2)s</p>'
},
'children': [
{
'plugin_type': 'LinkPlugin',
'values': {
'name': 'django',
'url': 'https://www.djangoproject.com/'
},
},
{
'plugin_type': 'LinkPlugin',
'values': {
'name': 'django-cms',
'url': 'https://www.django-cms.org'
},
},
]
},
]
},
}
with self.settings(CMS_PLACEHOLDER_CONF=conf):
page = create_page('page_en', 'col_two.html', 'en')
placeholder = page.placeholders.get(slot='col_left')
context = SekizaiContext()
context['request'] = self.get_request(language="en", page=page)
_render_placeholder(placeholder, context)
plugins = placeholder.get_plugins_list()
self.assertEqual(len(plugins), 3)
self.assertEqual(plugins[0].plugin_type, 'TextPlugin')
self.assertEqual(plugins[1].plugin_type, 'LinkPlugin')
self.assertEqual(plugins[2].plugin_type, 'LinkPlugin')
self.assertTrue(plugins[1].parent == plugins[2].parent and plugins[1].parent == plugins[0])
def test_placeholder_pk_thousands_format(self):
page = create_page("page", "nav_playground.html", "en", published=True)
for placeholder in page.placeholders.all():
page.placeholders.remove(placeholder)
placeholder.pk += 1000
placeholder.save()
page.placeholders.add(placeholder)
page.reload()
for placeholder in page.placeholders.all():
add_plugin(placeholder, "TextPlugin", "en", body="body")
with self.settings(USE_THOUSAND_SEPARATOR=True, USE_L10N=True):
user = self.get_superuser()
self.client.login(username=getattr(user, get_user_model().USERNAME_FIELD),
password=getattr(user, get_user_model().USERNAME_FIELD))
response = self.client.get("/en/?%s" % get_cms_setting('CMS_TOOLBAR_URL__EDIT_ON'))
for placeholder in page.placeholders.all():
self.assertContains(
response, '"placeholder_id": "%s"' % placeholder.pk)
self.assertNotContains(
response, '"placeholder_id": "%s"' % format(
placeholder.pk, ".", grouping=3, thousand_sep=","))
self.assertNotContains(
response, '"plugin_id": "%s"' % format(
placeholder.pk, ".", grouping=3, thousand_sep=","))
self.assertNotContains(
response, '"clipboard": "%s"' % format(
response.context['request'].toolbar.clipboard.pk, ".",
grouping=3, thousand_sep=","))
def test_placeholder_languages_model(self):
avail_langs = set([u'en', u'de', u'fr'])
ex = Example1(
char_1='one',
char_2='two',
char_3='tree',
char_4='four'
)
ex.save()
for lang in avail_langs:
add_plugin(ex.placeholder, u"EmptyPlugin", lang)
ex = Example1.objects.get(pk=ex.pk)
langs = [lang['code'] for lang in ex.placeholder.get_filled_languages()]
self.assertEqual(avail_langs, set(langs))
def test_placeholder_languages_page(self):
avail_langs = set([u'en', u'de', u'fr'])
page = create_page('test page', 'col_two.html', u'en')
for lang in avail_langs:
if lang != u'en':
create_title(lang, 'test page %s' % lang, page)
placeholder = page.placeholders.get(slot='col_sidebar')
for lang in avail_langs:
add_plugin(placeholder, u"EmptyPlugin", lang)
placeholder = page.placeholders.get(slot='col_sidebar')
langs = [lang['code'] for lang in placeholder.get_filled_languages()]
self.assertEqual(avail_langs, set(langs))
@override_settings(TEMPLATE_LOADERS=(
('django.template.loaders.cached.Loader', (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)),))
def test_cached_template_not_corrupted_by_placeholder_scan(self):
nodelist = _get_nodelist(get_template("placeholder_tests/test_super_extends_2.html"))
self.assertNotIn('one',
nodelist[0].blocks.keys(),
"test_super_extends_1.html contains a block called 'one', "
"but _2.html does not.")
get_placeholders("placeholder_tests/test_super_extends_2.html")
nodelist = _get_nodelist(get_template("placeholder_tests/test_super_extends_2.html"))
self.assertNotIn('one',
nodelist[0].blocks.keys(),
"test_super_extends_1.html still should not contain a block "
"called 'one' after rescanning placeholders.")
@override_settings(TEMPLATE_LOADERS=(
('django.template.loaders.cached.Loader', (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)),))
def test_super_extends_not_corrupted_by_placeholder_scan(self):
template = get_template("placeholder_tests/test_super_extends_2.html")
output = template.render({})
self.assertEqual(['Whee'], [o for o in output.split('\n')
if 'Whee' in o])
get_placeholders("placeholder_tests/test_super_extends_2.html")
template = get_template("placeholder_tests/test_super_extends_2.html")
output = template.render({})
self.assertEqual(['Whee'], [o for o in output.split('\n')
if 'Whee' in o])
class PlaceholderActionTests(FakemlngFixtures, CMSTestCase):
def test_placeholder_no_action(self):
actions = PlaceholderNoAction()
self.assertEqual(actions.get_copy_languages(), [])
self.assertFalse(actions.copy())
def test_mlng_placeholder_actions_get_copy_languages(self):
actions = MLNGPlaceholderActions()
fr = Translations.objects.get(language_code='fr')
de = Translations.objects.get(language_code='de')
en = Translations.objects.get(language_code='en')
fieldname = 'placeholder'
fr_copy_languages = actions.get_copy_languages(
fr.placeholder, Translations, fieldname
)
de_copy_languages = actions.get_copy_languages(
de.placeholder, Translations, fieldname
)
en_copy_languages = actions.get_copy_languages(
en.placeholder, Translations, fieldname
)
EN = ('en', 'English')
FR = ('fr', 'French')
self.assertEqual(set(fr_copy_languages), set([EN]))
self.assertEqual(set(de_copy_languages), set([EN, FR]))
self.assertEqual(set(en_copy_languages), set([FR]))
def test_mlng_placeholder_actions_copy(self):
actions = MLNGPlaceholderActions()
fr = Translations.objects.get(language_code='fr')
de = Translations.objects.get(language_code='de')
self.assertEqual(fr.placeholder.get_plugins().count(), 1)
self.assertEqual(de.placeholder.get_plugins().count(), 0)
new_plugins = actions.copy(de.placeholder, 'fr', 'placeholder', Translations, 'de')
self.assertEqual(len(new_plugins), 1)
de = self.reload(de)
fr = self.reload(fr)
self.assertEqual(fr.placeholder.get_plugins().count(), 1)
self.assertEqual(de.placeholder.get_plugins().count(), 1)
def test_mlng_placeholder_actions_empty_copy(self):
actions = MLNGPlaceholderActions()
fr = Translations.objects.get(language_code='fr')
de = Translations.objects.get(language_code='de')
self.assertEqual(fr.placeholder.get_plugins().count(), 1)
self.assertEqual(de.placeholder.get_plugins().count(), 0)
new_plugins = actions.copy(fr.placeholder, 'de', 'placeholder', Translations, 'fr')
self.assertEqual(len(new_plugins), 0)
de = self.reload(de)
fr = self.reload(fr)
self.assertEqual(fr.placeholder.get_plugins().count(), 1)
self.assertEqual(de.placeholder.get_plugins().count(), 0)
def test_mlng_placeholder_actions_no_placeholder(self):
actions = MLNGPlaceholderActions()
Translations.objects.filter(language_code='nl').update(placeholder=None)
de = Translations.objects.get(language_code='de')
nl = Translations.objects.get(language_code='nl')
self.assertEqual(nl.placeholder, None)
self.assertEqual(de.placeholder.get_plugins().count(), 0)
okay = actions.copy(de.placeholder, 'nl', 'placeholder', Translations, 'de')
self.assertEqual(okay, False)
de = self.reload(de)
nl = self.reload(nl)
nl = Translations.objects.get(language_code='nl')
de = Translations.objects.get(language_code='de')
@override_settings(CMS_PERMISSION=False)
class PlaceholderModelTests(ToolbarTestBase, CMSTestCase):
def get_mock_user(self, superuser):
return AttributeObject(
is_superuser=superuser,
has_perm=lambda string: False,
)
def get_mock_request(self, superuser=True):
return AttributeObject(
superuser=superuser,
user=self.get_mock_user(superuser)
)
def test_check_placeholder_permissions_ok_for_superuser(self):
ph = Placeholder.objects.create(slot='test', default_width=300)
user = self.get_mock_user(True)
result = ph.has_change_permission(user)
self.assertTrue(result)
def test_check_placeholder_permissions_nok_for_user(self):
ph = Placeholder.objects.create(slot='test', default_width=300)
user = self.get_mock_user(False)
result = ph.has_change_permission(user)
self.assertFalse(result)
def test_check_unicode_rendering(self):
ph = Placeholder.objects.create(slot='test', default_width=300)
result = force_text(ph)
self.assertEqual(result, u'test')
def test_excercise_get_attached_model(self):
ph = Placeholder.objects.create(slot='test', default_width=300)
result = ph._get_attached_model()
self.assertEqual(result, None)
def test_excercise_get_attached_field_name(self):
ph = Placeholder.objects.create(slot='test', default_width=300)
result = ph._get_attached_field_name()
self.assertEqual(result, None)
def test_excercise_get_attached_models_notplugins(self):
ex = Example1(
char_1='one',
char_2='two',
char_3='tree',
char_4='four'
)
ex.save()
ph = ex.placeholder
result = list(ph._get_attached_models())
self.assertEqual(result, [Example1])
add_plugin(ph, TextPlugin, 'en', body='en body')
result = list(ph._get_attached_models())
self.assertEqual(result, [Example1])
def test_excercise_get_attached_fields_notplugins(self):
ex = Example1(
char_1='one',
char_2='two',
char_3='tree',
char_4='four',
)
ex.save()
ph = ex.placeholder
result = [f.name for f in list(ph._get_attached_fields())]
self.assertEqual(result, ['placeholder'])
add_plugin(ph, TextPlugin, 'en', body='en body')
result = [f.name for f in list(ph._get_attached_fields())]
self.assertEqual(result, ['placeholder'])
class PlaceholderConfTests(TestCase):
def test_get_all_plugins_single_page(self):
page = create_page('page', 'col_two.html', 'en')
placeholder = page.placeholders.get(slot='col_left')
conf = {
'col_two': {
'plugins': ['TextPlugin', 'LinkPlugin'],
},
'col_two.html col_left': {
'plugins': ['LinkPlugin'],
},
}
with self.settings(CMS_PLACEHOLDER_CONF=conf):
plugins = plugin_pool.get_all_plugins(placeholder, page)
self.assertEqual(len(plugins), 1, plugins)
self.assertEqual(plugins[0], LinkPlugin)
def test_get_all_plugins_inherit(self):
parent = create_page('parent', 'col_two.html', 'en')
page = create_page('page', constants.TEMPLATE_INHERITANCE_MAGIC, 'en', parent=parent)
placeholder = page.placeholders.get(slot='col_left')
conf = {
'col_two': {
'plugins': ['TextPlugin', 'LinkPlugin'],
},
'col_two.html col_left': {
'plugins': ['LinkPlugin'],
},
}
with self.settings(CMS_PLACEHOLDER_CONF=conf):
plugins = plugin_pool.get_all_plugins(placeholder, page)
self.assertEqual(len(plugins), 1, plugins)
self.assertEqual(plugins[0], LinkPlugin)
class PlaceholderI18NTest(CMSTestCase):
def _get_url(self, app, model, pk):
if DJANGO_1_8:
return '/de/admin/%s/%s/%d/' % (app, model, pk)
else:
return '/de/admin/%s/%s/%d/change/' % (app, model, pk)
def _testuser(self):
User = get_user_model()
u = User(is_staff=True, is_active=True, is_superuser=True)
setattr(u, u.USERNAME_FIELD, "test")
u.set_password("test")
u.save()
return u
def test_hvad_tabs(self):
ex = MultilingualExample1.objects.language('en').create(char_1='one', char_2='two')
self._testuser()
self.client.login(username='test', password='test')
response = self.client.get(self._get_url('placeholderapp', 'multilingualexample1', ex.pk))
self.assertContains(response, '<input type="hidden" class="language_button selected" name="de" />')
def test_no_tabs(self):
ex = Example1.objects.create(
char_1='one',
char_2='two',
char_3='one',
char_4='two',
)
self._testuser()
self.client.login(username='test', password='test')
response = self.client.get(self._get_url('placeholderapp', 'example1', ex.pk))
self.assertNotContains(response, '<input type="hidden" class="language_button selected" name="de" />')
def test_placeholder_tabs(self):
ex = TwoPlaceholderExample.objects.create(
char_1='one',
char_2='two',
char_3='one',
char_4='two',
)
self._testuser()
self.client.login(username='test', password='test')
response = self.client.get(self._get_url('placeholderapp', 'twoplaceholderexample', ex.pk))
self.assertNotContains(response,
"""<input type="button" onclick="trigger_lang_button(this,'./?language=en');" class="language_button selected" id="debutton" name="en" value="English">""")
| true | true |
f7fde447fc00d01bd9a584e0c6c4928f1c5b2a6c | 320 | py | Python | pritunl_wireguard_client/utils/__init__.py | SuperBo/pritunl-wireguard-cient | bed4407bf2b7811f7180d72446a2dc26d45db90d | [
"MIT"
] | 1 | 2021-02-16T07:08:46.000Z | 2021-02-16T07:08:46.000Z | pritunl_wireguard_client/utils/__init__.py | SuperBo/pritunl-wireguard-cient | bed4407bf2b7811f7180d72446a2dc26d45db90d | [
"MIT"
] | 1 | 2022-02-08T13:34:18.000Z | 2022-02-08T13:34:18.000Z | pritunl_wireguard_client/utils/__init__.py | SuperBo/pritunl-wireguard-cient | bed4407bf2b7811f7180d72446a2dc26d45db90d | [
"MIT"
] | 1 | 2021-03-18T14:34:41.000Z | 2021-03-18T14:34:41.000Z | from pritunl_wireguard_client.utils.importer import download_profile
from pritunl_wireguard_client.utils.pritunl_auth import \
pritunl_auth, pritunl_sign, verify_signature, ClientBox
from pritunl_wireguard_client.utils.token import Tokens
from pritunl_wireguard_client.utils.random import rand_str, rand_str_complex
| 53.333333 | 76 | 0.88125 | from pritunl_wireguard_client.utils.importer import download_profile
from pritunl_wireguard_client.utils.pritunl_auth import \
pritunl_auth, pritunl_sign, verify_signature, ClientBox
from pritunl_wireguard_client.utils.token import Tokens
from pritunl_wireguard_client.utils.random import rand_str, rand_str_complex
| true | true |
f7fde454834319d4d9c8d2871748e00cbe81e975 | 3,325 | py | Python | nicos_jcns/dls01/setups/dls.py | ebadkamil/nicos | 0355a970d627aae170c93292f08f95759c97f3b5 | [
"CC-BY-3.0",
"Apache-2.0",
"CC-BY-4.0"
] | null | null | null | nicos_jcns/dls01/setups/dls.py | ebadkamil/nicos | 0355a970d627aae170c93292f08f95759c97f3b5 | [
"CC-BY-3.0",
"Apache-2.0",
"CC-BY-4.0"
] | 1 | 2021-08-18T10:55:42.000Z | 2021-08-18T10:55:42.000Z | nicos_jcns/dls01/setups/dls.py | ISISComputingGroup/nicos | 94cb4d172815919481f8c6ee686f21ebb76f2068 | [
"CC-BY-3.0",
"Apache-2.0",
"CC-BY-4.0"
] | null | null | null | # -*- coding: utf-8 -*-
description = 'Dynamic light scattering setup'
group = 'basic'
includes = []
sysconfig = dict(
datasinks = ['dlssink'],
)
tango_base = 'tango://localhost:10000/dls/'
devices = dict(
card1 = device('nicos_mlz.kws1.devices.dls.DLSCard',
description = 'DLS correlator card 1',
tangodevice = tango_base + 'corr1/spectra',
angles = [75, 0],
wheels = ['wheel_laser'],
unit = '',
),
card2 = device('nicos_mlz.kws1.devices.dls.DLSCard',
description = 'DLS correlator card 2',
tangodevice = tango_base + 'corr2/spectra',
angles = [115, 100],
wheels = ['wheel_laser', 'wheel_det1', 'wheel_det2'],
unit = '',
),
shutter = device('nicos.devices.tango.NamedDigitalOutput',
description = 'Laser shutter for DLS',
tangodevice = tango_base + 'shutter/ctrl',
mapping = {'open': 1, 'closed': 0},
),
wheel_laser = device('nicos.devices.tango.DigitalOutput',
description = 'Filter wheel in front of laser',
tangodevice = tango_base + 'wheel0/pos',
fmtstr = '%d',
),
wheel_det1 = device('nicos.devices.tango.DigitalOutput',
description = 'Filter wheel in front of detector 1',
tangodevice = tango_base + 'wheel1/pos',
fmtstr = '%d',
),
wheel_det2 = device('nicos.devices.tango.DigitalOutput',
description = 'Filter wheel in front of detector 2',
tangodevice = tango_base + 'wheel2/pos',
fmtstr = '%d',
),
limiter_laser = device('nicos.devices.tango.AnalogOutput',
description = 'Helper device to limit photon intensity using filter wheel',
tangodevice = tango_base + 'wheel0/limiter',
),
limiter_det1 = device('nicos.devices.tango.AnalogOutput',
description = 'Helper device to limit photon intensity using filter wheel',
tangodevice = tango_base + 'wheel1/limiter',
),
limiter_det2 = device('nicos.devices.tango.AnalogOutput',
description = 'Helper device to limit photon intensity using filter wheel',
tangodevice = tango_base + 'wheel2/limiter',
),
#virtual_limiter = device('nicos.devices.generic.ManualMove',
# description = 'Virtual limiter to use if filter wheel is not present',
# unit = 'kHz',
# abslimits = (0, 1e6),
#),
dlssink = device('nicos_mlz.kws1.devices.dls.DLSFileSink',
detectors = ['DLSdet'],
),
DLSdet = device('nicos_mlz.kws1.devices.dls.DLSDetector',
description = 'DLS detector',
cards = ['card1', 'card2'],
limiters = ['limiter_laser', 'limiter_det1', 'limiter_det2'],
shutter = 'shutter',
lasersel = 'laser',
wavelengthmap = {'red': 650, 'green': 550},
),
mirror_pos = device('nicos.devices.tango.Motor',
description = 'Mirror table to select laser',
tangodevice = tango_base + 'mirror/table',
fmtstr = '%.2f',
precision = 0.1,
),
laser = device('nicos.devices.generic.Switcher',
description = 'Selected laser from mirror table',
moveable = 'mirror_pos',
mapping = {'red': 0, 'green': 21.9},
precision = 0.1,
),
)
startupcode = '''
SetDetectors(DLSdet)
'''
extended = dict(
representative = 'DLSdet',
)
| 33.928571 | 83 | 0.609624 |
description = 'Dynamic light scattering setup'
group = 'basic'
includes = []
sysconfig = dict(
datasinks = ['dlssink'],
)
tango_base = 'tango://localhost:10000/dls/'
devices = dict(
card1 = device('nicos_mlz.kws1.devices.dls.DLSCard',
description = 'DLS correlator card 1',
tangodevice = tango_base + 'corr1/spectra',
angles = [75, 0],
wheels = ['wheel_laser'],
unit = '',
),
card2 = device('nicos_mlz.kws1.devices.dls.DLSCard',
description = 'DLS correlator card 2',
tangodevice = tango_base + 'corr2/spectra',
angles = [115, 100],
wheels = ['wheel_laser', 'wheel_det1', 'wheel_det2'],
unit = '',
),
shutter = device('nicos.devices.tango.NamedDigitalOutput',
description = 'Laser shutter for DLS',
tangodevice = tango_base + 'shutter/ctrl',
mapping = {'open': 1, 'closed': 0},
),
wheel_laser = device('nicos.devices.tango.DigitalOutput',
description = 'Filter wheel in front of laser',
tangodevice = tango_base + 'wheel0/pos',
fmtstr = '%d',
),
wheel_det1 = device('nicos.devices.tango.DigitalOutput',
description = 'Filter wheel in front of detector 1',
tangodevice = tango_base + 'wheel1/pos',
fmtstr = '%d',
),
wheel_det2 = device('nicos.devices.tango.DigitalOutput',
description = 'Filter wheel in front of detector 2',
tangodevice = tango_base + 'wheel2/pos',
fmtstr = '%d',
),
limiter_laser = device('nicos.devices.tango.AnalogOutput',
description = 'Helper device to limit photon intensity using filter wheel',
tangodevice = tango_base + 'wheel0/limiter',
),
limiter_det1 = device('nicos.devices.tango.AnalogOutput',
description = 'Helper device to limit photon intensity using filter wheel',
tangodevice = tango_base + 'wheel1/limiter',
),
limiter_det2 = device('nicos.devices.tango.AnalogOutput',
description = 'Helper device to limit photon intensity using filter wheel',
tangodevice = tango_base + 'wheel2/limiter',
),
dlssink = device('nicos_mlz.kws1.devices.dls.DLSFileSink',
detectors = ['DLSdet'],
),
DLSdet = device('nicos_mlz.kws1.devices.dls.DLSDetector',
description = 'DLS detector',
cards = ['card1', 'card2'],
limiters = ['limiter_laser', 'limiter_det1', 'limiter_det2'],
shutter = 'shutter',
lasersel = 'laser',
wavelengthmap = {'red': 650, 'green': 550},
),
mirror_pos = device('nicos.devices.tango.Motor',
description = 'Mirror table to select laser',
tangodevice = tango_base + 'mirror/table',
fmtstr = '%.2f',
precision = 0.1,
),
laser = device('nicos.devices.generic.Switcher',
description = 'Selected laser from mirror table',
moveable = 'mirror_pos',
mapping = {'red': 0, 'green': 21.9},
precision = 0.1,
),
)
startupcode = '''
SetDetectors(DLSdet)
'''
extended = dict(
representative = 'DLSdet',
)
| true | true |
f7fde46912c623b128f20f9d652dc37ac047f57b | 4,660 | py | Python | utils/pysot/datasets/vot.py | ywang-37/EnhancedSiamShipTracking | 0b25cf02b6088268a6c374cb20a7f0355bc65b2e | [
"Apache-2.0"
] | 3 | 2022-03-03T09:14:50.000Z | 2022-03-28T13:46:29.000Z | utils/pysot/datasets/vot.py | ywang-37/EnhancedSiamShipTracking | 0b25cf02b6088268a6c374cb20a7f0355bc65b2e | [
"Apache-2.0"
] | null | null | null | utils/pysot/datasets/vot.py | ywang-37/EnhancedSiamShipTracking | 0b25cf02b6088268a6c374cb20a7f0355bc65b2e | [
"Apache-2.0"
] | null | null | null | import os
import json
import numpy as np
from glob import glob
from tqdm import tqdm
from .dataset import Dataset
from .video import Video
class VOTVideo(Video):
"""
Args:
name: video name
root: dataset root
video_dir: video directory
init_rect: init rectangle
img_names: image names
gt_rect: groundtruth rectangle
camera_motion: camera motion tag
illum_change: illum change tag
motion_change: motion change tag
size_change: size change
occlusion: occlusion
"""
def __init__(self, name, root, video_dir, init_rect, img_names, gt_rect,
camera_motion, illum_change, motion_change, size_change, occlusion, width, height):
super(VOTVideo, self).__init__(name, root, video_dir, init_rect, img_names, gt_rect, None)
self.tags= {'all': [1] * len(gt_rect)}
self.tags['camera_motion'] = camera_motion
self.tags['illum_change'] = illum_change
self.tags['motion_change'] = motion_change
self.tags['size_change'] = size_change
self.tags['occlusion'] = occlusion
self.width = width
self.height = height
# empty tag
all_tag = [v for k, v in self.tags.items() if len(v) > 0 ]
self.tags['empty'] = np.all(1 - np.array(all_tag), axis=1).astype(np.int32).tolist()
self.tag_names = list(self.tags.keys())
def select_tag(self, tag, start=0, end=0):
if tag == 'empty':
return self.tags[tag]
return self.tags[tag][start:end]
def load_tracker(self, path, tracker_names=None, store=True):
"""
Args:
path(str): path to result
tracker_name(list): name of tracker
"""
if not tracker_names:
tracker_names = [x.split('/')[-1] for x in glob(path)
if os.path.isdir(x)]
if isinstance(tracker_names, str):
tracker_names = [tracker_names]
for name in tracker_names:
traj_files = glob(os.path.join(path, name, 'baseline', self.name, '*0*.txt'))
if len(traj_files) == 15:
traj_files = traj_files
else:
traj_files = traj_files[0:1]
pred_traj = []
for traj_file in traj_files:
with open(traj_file, 'r') as f:
traj = [list(map(float, x.strip().split(',')))
for x in f.readlines()]
pred_traj.append(traj)
if store:
self.pred_trajs[name] = pred_traj
else:
return pred_traj
class VOTDataset(Dataset):
"""
Args:
name: dataset name, should be 'VOT2018', 'VOT2016'
dataset_root: dataset root
load_img: wether to load all imgs
"""
def __init__(self, name, dataset_root):
super(VOTDataset, self).__init__(name, dataset_root)
try:
with open(os.path.join(dataset_root, name+'.json'), 'r') as f:
meta_data = json.load(f)
except:
download_str = '# download json file for eval toolkit\n'+\
'cd $SiamMask/data\n'+\
'wget http://www.robots.ox.ac.uk/~qwang/VOT2016.json\n'+\
'wget http://www.robots.ox.ac.uk/~qwang/VOT2018.json'
print(download_str)
exit()
# load videos
pbar = tqdm(meta_data.keys(), desc='loading '+name, ncols=100)
self.videos = {}
for video in pbar:
pbar.set_postfix_str(video)
self.videos[video] = VOTVideo(video,
dataset_root,
meta_data[video]['video_dir'],
meta_data[video]['init_rect'],
meta_data[video]['img_names'],
meta_data[video]['gt_rect'],
meta_data[video]['camera_motion'],
meta_data[video]['illum_change'],
meta_data[video]['motion_change'],
meta_data[video]['size_change'],
meta_data[video]['occlusion'],
meta_data[video]['width'],
meta_data[video]['height'])
self.tags = ['all', 'camera_motion', 'illum_change', 'motion_change',
'size_change', 'occlusion', 'empty']
| 38.512397 | 98 | 0.516094 | import os
import json
import numpy as np
from glob import glob
from tqdm import tqdm
from .dataset import Dataset
from .video import Video
class VOTVideo(Video):
def __init__(self, name, root, video_dir, init_rect, img_names, gt_rect,
camera_motion, illum_change, motion_change, size_change, occlusion, width, height):
super(VOTVideo, self).__init__(name, root, video_dir, init_rect, img_names, gt_rect, None)
self.tags= {'all': [1] * len(gt_rect)}
self.tags['camera_motion'] = camera_motion
self.tags['illum_change'] = illum_change
self.tags['motion_change'] = motion_change
self.tags['size_change'] = size_change
self.tags['occlusion'] = occlusion
self.width = width
self.height = height
all_tag = [v for k, v in self.tags.items() if len(v) > 0 ]
self.tags['empty'] = np.all(1 - np.array(all_tag), axis=1).astype(np.int32).tolist()
self.tag_names = list(self.tags.keys())
def select_tag(self, tag, start=0, end=0):
if tag == 'empty':
return self.tags[tag]
return self.tags[tag][start:end]
def load_tracker(self, path, tracker_names=None, store=True):
if not tracker_names:
tracker_names = [x.split('/')[-1] for x in glob(path)
if os.path.isdir(x)]
if isinstance(tracker_names, str):
tracker_names = [tracker_names]
for name in tracker_names:
traj_files = glob(os.path.join(path, name, 'baseline', self.name, '*0*.txt'))
if len(traj_files) == 15:
traj_files = traj_files
else:
traj_files = traj_files[0:1]
pred_traj = []
for traj_file in traj_files:
with open(traj_file, 'r') as f:
traj = [list(map(float, x.strip().split(',')))
for x in f.readlines()]
pred_traj.append(traj)
if store:
self.pred_trajs[name] = pred_traj
else:
return pred_traj
class VOTDataset(Dataset):
def __init__(self, name, dataset_root):
super(VOTDataset, self).__init__(name, dataset_root)
try:
with open(os.path.join(dataset_root, name+'.json'), 'r') as f:
meta_data = json.load(f)
except:
download_str = '# download json file for eval toolkit\n'+\
'cd $SiamMask/data\n'+\
'wget http://www.robots.ox.ac.uk/~qwang/VOT2016.json\n'+\
'wget http://www.robots.ox.ac.uk/~qwang/VOT2018.json'
print(download_str)
exit()
pbar = tqdm(meta_data.keys(), desc='loading '+name, ncols=100)
self.videos = {}
for video in pbar:
pbar.set_postfix_str(video)
self.videos[video] = VOTVideo(video,
dataset_root,
meta_data[video]['video_dir'],
meta_data[video]['init_rect'],
meta_data[video]['img_names'],
meta_data[video]['gt_rect'],
meta_data[video]['camera_motion'],
meta_data[video]['illum_change'],
meta_data[video]['motion_change'],
meta_data[video]['size_change'],
meta_data[video]['occlusion'],
meta_data[video]['width'],
meta_data[video]['height'])
self.tags = ['all', 'camera_motion', 'illum_change', 'motion_change',
'size_change', 'occlusion', 'empty']
| true | true |
f7fde4866974141f0e894bdffffd34c63e8823ff | 1,390 | py | Python | server/alembic/versions/3b121603bb7c_add_user_table.py | josenava/meal-calendar | d5182f9b9ee30c02efc8bd22e79bb7a53e778919 | [
"MIT"
] | null | null | null | server/alembic/versions/3b121603bb7c_add_user_table.py | josenava/meal-calendar | d5182f9b9ee30c02efc8bd22e79bb7a53e778919 | [
"MIT"
] | 5 | 2020-07-24T14:45:31.000Z | 2022-02-27T09:49:55.000Z | server/alembic/versions/3b121603bb7c_add_user_table.py | josenava/meal-calendar | d5182f9b9ee30c02efc8bd22e79bb7a53e778919 | [
"MIT"
] | null | null | null | """Add user table
Revision ID: 3b121603bb7c
Revises:
Create Date: 2020-07-20 16:52:16.928316
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.schema import Sequence, CreateSequence, DropSequence
# revision identifiers, used by Alembic.
revision = '3b121603bb7c'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.execute(CreateSequence(Sequence("user_id_seq")))
op.create_table('users',
sa.Column('id', sa.Integer(), server_default=sa.text("nextval('user_id_seq')"), nullable=False),
sa.Column('email', sa.String(), nullable=True),
sa.Column('hashed_password', sa.String(), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_users_email'), 'users', ['email'], unique=True)
op.create_index(op.f('ix_users_id'), 'users', ['id'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_users_id'), table_name='users')
op.drop_index(op.f('ix_users_email'), table_name='users')
op.drop_table('users')
op.execute(DropSequence(Sequence("user_id_seq")))
# ### end Alembic commands ###
| 32.325581 | 100 | 0.690647 | from alembic import op
import sqlalchemy as sa
from sqlalchemy.schema import Sequence, CreateSequence, DropSequence
revision = '3b121603bb7c'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
alse),
sa.Column('email', sa.String(), nullable=True),
sa.Column('hashed_password', sa.String(), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_users_email'), 'users', ['email'], unique=True)
op.create_index(op.f('ix_users_id'), 'users', ['id'], unique=False)
| true | true |
f7fde4f0aa746574dd27a0c9722065bb924d28a3 | 16,227 | py | Python | research/tcn/dataset/webcam.py | SimiaCryptus/models | c652a23a650070b71e286f1ded93726670161940 | [
"Apache-2.0"
] | null | null | null | research/tcn/dataset/webcam.py | SimiaCryptus/models | c652a23a650070b71e286f1ded93726670161940 | [
"Apache-2.0"
] | null | null | null | research/tcn/dataset/webcam.py | SimiaCryptus/models | c652a23a650070b71e286f1ded93726670161940 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 The TensorFlow Authors All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
r"""Collect images from multiple simultaneous webcams.
Usage:
1. Define some environment variables that describe what you're collecting.
dataset=your_dataset_name
mode=train
num_views=2
viddir=/tmp/tcn/videos
tmp_imagedir=/tmp/tcn/tmp_images
debug_vids=1
2. Run the script.
export DISPLAY=:0.0 && \
root=learning/brain/research/tcn && \
bazel build -c opt --copt=-mavx tcn/webcam && \
bazel-bin/tcn/webcam \
--dataset $dataset \
--mode $mode \
--num_views $num_views \
--tmp_imagedir $tmp_imagedir \
--viddir $viddir \
--debug_vids 1 \
--logtostderr
3. Hit Ctrl-C when done collecting, upon which the script will compile videos
for each view and optionally a debug video concatenating multiple
simultaneous views.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import multiprocessing
import os
import subprocess
import sys
import time
from multiprocessing import Process
import cv2
import matplotlib
matplotlib.use('TkAgg')
from matplotlib import animation # pylint: disable=g-import-not-at-top
import matplotlib.pyplot as plt
import numpy as np
from six.moves import input
import tensorflow as tf
tf.logging.set_verbosity(tf.logging.INFO)
tf.flags.DEFINE_string('dataset', '', 'Name of the dataset we`re collecting.')
tf.flags.DEFINE_string('mode', '',
'What type of data we`re collecting. E.g.:'
'`train`,`valid`,`test`, or `demo`')
tf.flags.DEFINE_string('seqname', '',
'Name of this sequence. If empty, the script will use'
'the name seq_N+1 where seq_N is the latest'
'integer-named sequence in the videos directory.')
tf.flags.DEFINE_integer('num_views', 2,
'Number of webcams.')
tf.flags.DEFINE_string('tmp_imagedir', '/tmp/tcn/data',
'Temporary outdir to write images.')
tf.flags.DEFINE_string('viddir', '/tmp/tcn/videos',
'Base directory to write debug videos.')
tf.flags.DEFINE_boolean('debug_vids', True,
'Whether to generate debug vids with multiple'
'concatenated views.')
tf.flags.DEFINE_string('debug_lhs_view', '0',
'Which viewpoint to use for the lhs video.')
tf.flags.DEFINE_string('debug_rhs_view', '1',
'Which viewpoint to use for the rhs video.')
tf.flags.DEFINE_integer('height', 1080, 'Raw input height.')
tf.flags.DEFINE_integer('width', 1920, 'Raw input width.')
tf.flags.DEFINE_string('webcam_ports', None,
'Comma-separated list of each webcam usb port.')
FLAGS = tf.app.flags.FLAGS
class ImageQueue(object):
"""An image queue holding each stream's most recent image.
Basically implements a process-safe collections.deque(maxlen=1).
"""
def __init__(self):
self.lock = multiprocessing.Lock()
self._queue = multiprocessing.Queue(maxsize=1)
def append(self, data):
with self.lock:
if self._queue.full():
# Pop the first element.
_ = self._queue.get()
self._queue.put(data)
def get(self):
with self.lock:
return self._queue.get()
def empty(self):
return self._queue.empty()
def close(self):
return self._queue.close()
class WebcamViewer(object):
"""A class which displays a live stream from the webcams."""
def __init__(self, display_queues):
"""Create a WebcamViewer instance."""
self.height = FLAGS.height
self.width = FLAGS.width
self.queues = display_queues
def _get_next_images(self):
"""Gets the next image to display."""
# Wait for one image per view.
not_found = True
while not_found:
if True in [q.empty() for q in self.queues]:
# At least one image queue is empty; wait.
continue
else:
# Retrieve the images.
latest = [q.get() for q in self.queues]
combined = np.concatenate(latest, axis=1)
not_found = False
return combined
def run(self):
"""Displays the Kcam live stream in a window.
This function blocks until the window is closed.
"""
fig, rgb_axis = plt.subplots()
image_rows = self.height
image_cols = self.width * FLAGS.num_views
initial_image = np.zeros((image_rows, image_cols, 3))
rgb_image = rgb_axis.imshow(initial_image, interpolation='nearest')
def update_figure(frame_index):
"""Animation function for matplotlib FuncAnimation. Updates the image.
Args:
frame_index: The frame number.
Returns:
An iterable of matplotlib drawables to clear.
"""
_ = frame_index
images = self._get_next_images()
images = images[..., [2, 1, 0]]
rgb_image.set_array(images)
return rgb_image,
# We must keep a reference to this animation in order for it to work.
unused_animation = animation.FuncAnimation(
fig, update_figure, interval=50, blit=True)
mng = plt.get_current_fig_manager()
mng.resize(*mng.window.maxsize())
plt.show()
def reconcile(queues, write_queue):
"""Gets a list of concurrent images from each view queue.
This waits for latest images to be available in all view queues,
then continuously:
- Creates a list of current images for each view.
- Writes the list to a queue of image lists to write to disk.
Args:
queues: A list of `ImageQueues`, holding the latest image from each webcam.
write_queue: A multiprocessing.Queue holding lists of concurrent images.
"""
# Loop forever.
while True:
# Wait till all queues have an image.
if True in [q.empty() for q in queues]:
continue
else:
# Retrieve all views' images.
latest = [q.get() for q in queues]
# Copy the list of all concurrent images to the write queue.
write_queue.put(latest)
def persist(write_queue, view_dirs):
"""Pulls lists of concurrent images off a write queue, writes them to disk.
Args:
write_queue: A multiprocessing.Queue holding lists of concurrent images;
one image per view.
view_dirs: A list of strings, holding the output image directories for each
view.
"""
timestep = 0
while True:
# Wait till there is work in the queue.
if write_queue.empty():
continue
# Get a list of concurrent images to write to disk.
view_ims = write_queue.get()
for view_idx, image in enumerate(view_ims):
view_base = view_dirs[view_idx]
# Assign all concurrent view images the same sequence timestep.
fname = os.path.join(view_base, '%s.png' % str(timestep).zfill(10))
cv2.imwrite(fname, image)
# Move to the next timestep.
timestep += 1
def get_image(camera):
"""Captures a single image from the camera and returns it in PIL format."""
data = camera.read()
_, im = data
return im
def capture_webcam(camera, display_queue, reconcile_queue):
"""Captures images from simultaneous webcams, writes them to queues.
Args:
camera: A cv2.VideoCapture object representing an open webcam stream.
display_queue: An ImageQueue.
reconcile_queue: An ImageQueue.
"""
# Take some ramp images to allow cams to adjust for brightness etc.
for i in range(60):
tf.logging.info('Taking ramp image %d.' % i)
get_image(camera)
cnt = 0
start = time.time()
while True:
# Get images for all cameras.
im = get_image(camera)
# Replace the current image in the display and reconcile queues.
display_queue.append(im)
reconcile_queue.append(im)
cnt += 1
current = time.time()
if cnt % 100 == 0:
tf.logging.info('Collected %s of video, %d frames at ~%.2f fps.' % (
timer(start, current), cnt, cnt/(current-start)))
def timer(start, end):
"""Returns a formatted time elapsed."""
hours, rem = divmod(end-start, 3600)
minutes, seconds = divmod(rem, 60)
return '{:0>2}:{:0>2}:{:05.2f}'.format(int(hours), int(minutes), seconds)
def display_webcams(display_queues):
"""Builds an WebcamViewer to animate incoming images, runs it."""
viewer = WebcamViewer(display_queues)
viewer.run()
def create_vids(view_dirs, seqname):
"""Creates one video per view per sequence."""
vidbase = os.path.join(FLAGS.viddir, FLAGS.dataset, FLAGS.mode)
if not os.path.exists(vidbase):
os.makedirs(vidbase)
vidpaths = []
for idx, view_dir in enumerate(view_dirs):
vidname = os.path.join(vidbase, '%s_view%d.mp4' % (seqname, idx))
encode_vid_cmd = r'mencoder mf://%s/*.png \
-mf fps=29:type=png \
-ovc lavc -lavcopts vcodec=mpeg4:mbd=2:trell \
-oac copy -o %s' % (view_dir, vidname)
os.system(encode_vid_cmd)
vidpaths.append(vidname)
debugpath = None
if FLAGS.debug_vids:
lhs = vidpaths[FLAGS.debug_lhs_view]
rhs = vidpaths[FLAGS.debug_rhs_view]
debug_base = os.path.join('%s_debug' % FLAGS.viddir, FLAGS.dataset,
FLAGS.mode)
if not os.path.exists(debug_base):
os.makedirs(debug_base)
debugpath = '%s/%s.mp4' % (debug_base, seqname)
os.system(r"avconv \
-i %s \
-i %s \
-filter_complex '[0:v]pad=iw*2:ih[int];[int][1:v]overlay=W/2:0[vid]' \
-map [vid] \
-c:v libx264 \
-crf 23 \
-preset veryfast \
%s" % (lhs, rhs, debugpath))
return vidpaths, debugpath
def setup_paths():
"""Sets up the necessary paths to collect videos."""
assert FLAGS.dataset
assert FLAGS.mode
assert FLAGS.num_views
# Setup directory for final images used to create videos for this sequence.
tmp_imagedir = os.path.join(FLAGS.tmp_imagedir, FLAGS.dataset, FLAGS.mode)
if not os.path.exists(tmp_imagedir):
os.makedirs(tmp_imagedir)
# Create a base directory to hold all sequence videos if it doesn't exist.
vidbase = os.path.join(FLAGS.viddir, FLAGS.dataset, FLAGS.mode)
if not os.path.exists(vidbase):
os.makedirs(vidbase)
# Get one directory per concurrent view and a sequence name.
view_dirs, seqname = get_view_dirs(vidbase, tmp_imagedir)
# Get an output path to each view's video.
vid_paths = []
for idx, _ in enumerate(view_dirs):
vid_path = os.path.join(vidbase, '%s_view%d.mp4' % (seqname, idx))
vid_paths.append(vid_path)
# Optionally build paths to debug_videos.
debug_path = None
if FLAGS.debug_vids:
debug_base = os.path.join('%s_debug' % FLAGS.viddir, FLAGS.dataset,
FLAGS.mode)
if not os.path.exists(debug_base):
os.makedirs(debug_base)
debug_path = '%s/%s.mp4' % (debug_base, seqname)
return view_dirs, vid_paths, debug_path
def get_view_dirs(vidbase, tmp_imagedir):
"""Creates and returns one view directory per webcam."""
# Create and append a sequence name.
if FLAGS.seqname:
seqname = FLAGS.seqname
else:
# If there's no video directory, this is the first sequence.
if not os.listdir(vidbase):
seqname = '0'
else:
# Otherwise, get the latest sequence name and increment it.
seq_names = [i.split('_')[0] for i in os.listdir(vidbase)]
latest_seq = sorted(map(int, seq_names), reverse=True)[0]
seqname = str(latest_seq+1)
tf.logging.info('No seqname specified, using: %s' % seqname)
view_dirs = [os.path.join(
tmp_imagedir, '%s_view%d' % (seqname, v)) for v in range(FLAGS.num_views)]
for d in view_dirs:
if not os.path.exists(d):
os.makedirs(d)
return view_dirs, seqname
def get_cameras():
"""Opens cameras using cv2, ensures they can take images."""
# Try to get free webcam ports.
if FLAGS.webcam_ports:
ports = map(int, FLAGS.webcam_ports.split(','))
else:
ports = range(FLAGS.num_views)
cameras = [cv2.VideoCapture(i) for i in ports]
if not all([i.isOpened() for i in cameras]):
try:
# Try to find and kill hanging cv2 process_ids.
output = subprocess.check_output(['lsof -t /dev/video*'], shell=True)
tf.logging.info('Found hanging cv2 process_ids: \n')
tf.logging.info(output)
tf.logging.info('Killing hanging processes...')
for process_id in output.split('\n')[:-1]:
subprocess.call(['kill %s' % process_id], shell=True)
time.sleep(3)
# Recapture webcams.
cameras = [cv2.VideoCapture(i) for i in ports]
except subprocess.CalledProcessError:
raise ValueError(
'Cannot connect to cameras. Try running: \n'
'ls -ltrh /dev/video* \n '
'to see which ports your webcams are connected to. Then hand those '
'ports as a comma-separated list to --webcam_ports, e.g. '
'--webcam_ports 0,1')
# Verify each camera is able to capture images.
ims = map(get_image, cameras)
assert False not in [i is not None for i in ims]
return cameras
def launch_images_to_videos(view_dirs, vid_paths, debug_path):
"""Launch job in separate process to convert images to videos."""
f = 'learning/brain/research/tcn/dataset/images_to_videos.py'
cmd = ['python %s ' % f]
cmd += ['--view_dirs %s ' % ','.join(i for i in view_dirs)]
cmd += ['--vid_paths %s ' % ','.join(i for i in vid_paths)]
cmd += ['--debug_path %s ' % debug_path]
cmd += ['--debug_lhs_view %s ' % FLAGS.debug_lhs_view]
cmd += ['--debug_rhs_view %s ' % FLAGS.debug_rhs_view]
cmd += [' & ']
cmd = ''.join(i for i in cmd)
# Call images_to_videos asynchronously.
fnull = open(os.devnull, 'w')
subprocess.Popen([cmd], stdout=fnull, stderr=subprocess.STDOUT, shell=True)
for p in vid_paths:
tf.logging.info('Writing final video to: %s' % p)
if debug_path:
tf.logging.info('Writing debug video to: %s' % debug_path)
def main(_):
# Initialize the camera capture objects.
cameras = get_cameras()
# Get one output directory per view.
view_dirs, vid_paths, debug_path = setup_paths()
try:
# Wait for user input.
try:
tf.logging.info('About to write to:')
for v in view_dirs:
tf.logging.info(v)
input('Press Enter to continue...')
except SyntaxError:
pass
# Create a queue per view for displaying and saving images.
display_queues = [ImageQueue() for _ in range(FLAGS.num_views)]
reconcile_queues = [ImageQueue() for _ in range(FLAGS.num_views)]
# Create a queue for collecting all tuples of multi-view images to write to
# disk.
write_queue = multiprocessing.Queue()
processes = []
# Create a process to display collected images in real time.
processes.append(Process(target=display_webcams, args=(display_queues,)))
# Create a process to collect the latest simultaneous images from each view.
processes.append(Process(
target=reconcile, args=(reconcile_queues, write_queue,)))
# Create a process to collect the latest simultaneous images from each view.
processes.append(Process(
target=persist, args=(write_queue, view_dirs,)))
for (cam, dq, rq) in zip(cameras, display_queues, reconcile_queues):
processes.append(Process(
target=capture_webcam, args=(cam, dq, rq,)))
for p in processes:
p.start()
for p in processes:
p.join()
except KeyboardInterrupt:
# Close the queues.
for q in display_queues + reconcile_queues:
q.close()
# Release the cameras.
for cam in cameras:
cam.release()
# Launch images_to_videos script asynchronously.
launch_images_to_videos(view_dirs, vid_paths, debug_path)
try:
sys.exit(0)
except SystemExit:
os._exit(0) # pylint: disable=protected-access
if __name__ == '__main__':
tf.app.run()
| 32.848178 | 80 | 0.668824 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import multiprocessing
import os
import subprocess
import sys
import time
from multiprocessing import Process
import cv2
import matplotlib
matplotlib.use('TkAgg')
from matplotlib import animation
import matplotlib.pyplot as plt
import numpy as np
from six.moves import input
import tensorflow as tf
tf.logging.set_verbosity(tf.logging.INFO)
tf.flags.DEFINE_string('dataset', '', 'Name of the dataset we`re collecting.')
tf.flags.DEFINE_string('mode', '',
'What type of data we`re collecting. E.g.:'
'`train`,`valid`,`test`, or `demo`')
tf.flags.DEFINE_string('seqname', '',
'Name of this sequence. If empty, the script will use'
'the name seq_N+1 where seq_N is the latest'
'integer-named sequence in the videos directory.')
tf.flags.DEFINE_integer('num_views', 2,
'Number of webcams.')
tf.flags.DEFINE_string('tmp_imagedir', '/tmp/tcn/data',
'Temporary outdir to write images.')
tf.flags.DEFINE_string('viddir', '/tmp/tcn/videos',
'Base directory to write debug videos.')
tf.flags.DEFINE_boolean('debug_vids', True,
'Whether to generate debug vids with multiple'
'concatenated views.')
tf.flags.DEFINE_string('debug_lhs_view', '0',
'Which viewpoint to use for the lhs video.')
tf.flags.DEFINE_string('debug_rhs_view', '1',
'Which viewpoint to use for the rhs video.')
tf.flags.DEFINE_integer('height', 1080, 'Raw input height.')
tf.flags.DEFINE_integer('width', 1920, 'Raw input width.')
tf.flags.DEFINE_string('webcam_ports', None,
'Comma-separated list of each webcam usb port.')
FLAGS = tf.app.flags.FLAGS
class ImageQueue(object):
def __init__(self):
self.lock = multiprocessing.Lock()
self._queue = multiprocessing.Queue(maxsize=1)
def append(self, data):
with self.lock:
if self._queue.full():
_ = self._queue.get()
self._queue.put(data)
def get(self):
with self.lock:
return self._queue.get()
def empty(self):
return self._queue.empty()
def close(self):
return self._queue.close()
class WebcamViewer(object):
def __init__(self, display_queues):
self.height = FLAGS.height
self.width = FLAGS.width
self.queues = display_queues
def _get_next_images(self):
not_found = True
while not_found:
if True in [q.empty() for q in self.queues]:
continue
else:
latest = [q.get() for q in self.queues]
combined = np.concatenate(latest, axis=1)
not_found = False
return combined
def run(self):
fig, rgb_axis = plt.subplots()
image_rows = self.height
image_cols = self.width * FLAGS.num_views
initial_image = np.zeros((image_rows, image_cols, 3))
rgb_image = rgb_axis.imshow(initial_image, interpolation='nearest')
def update_figure(frame_index):
_ = frame_index
images = self._get_next_images()
images = images[..., [2, 1, 0]]
rgb_image.set_array(images)
return rgb_image,
unused_animation = animation.FuncAnimation(
fig, update_figure, interval=50, blit=True)
mng = plt.get_current_fig_manager()
mng.resize(*mng.window.maxsize())
plt.show()
def reconcile(queues, write_queue):
while True:
if True in [q.empty() for q in queues]:
continue
else:
latest = [q.get() for q in queues]
# Copy the list of all concurrent images to the write queue.
write_queue.put(latest)
def persist(write_queue, view_dirs):
timestep = 0
while True:
# Wait till there is work in the queue.
if write_queue.empty():
continue
# Get a list of concurrent images to write to disk.
view_ims = write_queue.get()
for view_idx, image in enumerate(view_ims):
view_base = view_dirs[view_idx]
# Assign all concurrent view images the same sequence timestep.
fname = os.path.join(view_base, '%s.png' % str(timestep).zfill(10))
cv2.imwrite(fname, image)
# Move to the next timestep.
timestep += 1
def get_image(camera):
data = camera.read()
_, im = data
return im
def capture_webcam(camera, display_queue, reconcile_queue):
# Take some ramp images to allow cams to adjust for brightness etc.
for i in range(60):
tf.logging.info('Taking ramp image %d.' % i)
get_image(camera)
cnt = 0
start = time.time()
while True:
# Get images for all cameras.
im = get_image(camera)
# Replace the current image in the display and reconcile queues.
display_queue.append(im)
reconcile_queue.append(im)
cnt += 1
current = time.time()
if cnt % 100 == 0:
tf.logging.info('Collected %s of video, %d frames at ~%.2f fps.' % (
timer(start, current), cnt, cnt/(current-start)))
def timer(start, end):
hours, rem = divmod(end-start, 3600)
minutes, seconds = divmod(rem, 60)
return '{:0>2}:{:0>2}:{:05.2f}'.format(int(hours), int(minutes), seconds)
def display_webcams(display_queues):
viewer = WebcamViewer(display_queues)
viewer.run()
def create_vids(view_dirs, seqname):
vidbase = os.path.join(FLAGS.viddir, FLAGS.dataset, FLAGS.mode)
if not os.path.exists(vidbase):
os.makedirs(vidbase)
vidpaths = []
for idx, view_dir in enumerate(view_dirs):
vidname = os.path.join(vidbase, '%s_view%d.mp4' % (seqname, idx))
encode_vid_cmd = r'mencoder mf://%s/*.png \
-mf fps=29:type=png \
-ovc lavc -lavcopts vcodec=mpeg4:mbd=2:trell \
-oac copy -o %s' % (view_dir, vidname)
os.system(encode_vid_cmd)
vidpaths.append(vidname)
debugpath = None
if FLAGS.debug_vids:
lhs = vidpaths[FLAGS.debug_lhs_view]
rhs = vidpaths[FLAGS.debug_rhs_view]
debug_base = os.path.join('%s_debug' % FLAGS.viddir, FLAGS.dataset,
FLAGS.mode)
if not os.path.exists(debug_base):
os.makedirs(debug_base)
debugpath = '%s/%s.mp4' % (debug_base, seqname)
os.system(r"avconv \
-i %s \
-i %s \
-filter_complex '[0:v]pad=iw*2:ih[int];[int][1:v]overlay=W/2:0[vid]' \
-map [vid] \
-c:v libx264 \
-crf 23 \
-preset veryfast \
%s" % (lhs, rhs, debugpath))
return vidpaths, debugpath
def setup_paths():
assert FLAGS.dataset
assert FLAGS.mode
assert FLAGS.num_views
# Setup directory for final images used to create videos for this sequence.
tmp_imagedir = os.path.join(FLAGS.tmp_imagedir, FLAGS.dataset, FLAGS.mode)
if not os.path.exists(tmp_imagedir):
os.makedirs(tmp_imagedir)
# Create a base directory to hold all sequence videos if it doesn't exist.
vidbase = os.path.join(FLAGS.viddir, FLAGS.dataset, FLAGS.mode)
if not os.path.exists(vidbase):
os.makedirs(vidbase)
view_dirs, seqname = get_view_dirs(vidbase, tmp_imagedir)
vid_paths = []
for idx, _ in enumerate(view_dirs):
vid_path = os.path.join(vidbase, '%s_view%d.mp4' % (seqname, idx))
vid_paths.append(vid_path)
# Optionally build paths to debug_videos.
debug_path = None
if FLAGS.debug_vids:
debug_base = os.path.join('%s_debug' % FLAGS.viddir, FLAGS.dataset,
FLAGS.mode)
if not os.path.exists(debug_base):
os.makedirs(debug_base)
debug_path = '%s/%s.mp4' % (debug_base, seqname)
return view_dirs, vid_paths, debug_path
def get_view_dirs(vidbase, tmp_imagedir):
# Create and append a sequence name.
if FLAGS.seqname:
seqname = FLAGS.seqname
else:
# If there's no video directory, this is the first sequence.
if not os.listdir(vidbase):
seqname = '0'
else:
seq_names = [i.split('_')[0] for i in os.listdir(vidbase)]
latest_seq = sorted(map(int, seq_names), reverse=True)[0]
seqname = str(latest_seq+1)
tf.logging.info('No seqname specified, using: %s' % seqname)
view_dirs = [os.path.join(
tmp_imagedir, '%s_view%d' % (seqname, v)) for v in range(FLAGS.num_views)]
for d in view_dirs:
if not os.path.exists(d):
os.makedirs(d)
return view_dirs, seqname
def get_cameras():
if FLAGS.webcam_ports:
ports = map(int, FLAGS.webcam_ports.split(','))
else:
ports = range(FLAGS.num_views)
cameras = [cv2.VideoCapture(i) for i in ports]
if not all([i.isOpened() for i in cameras]):
try:
output = subprocess.check_output(['lsof -t /dev/video*'], shell=True)
tf.logging.info('Found hanging cv2 process_ids: \n')
tf.logging.info(output)
tf.logging.info('Killing hanging processes...')
for process_id in output.split('\n')[:-1]:
subprocess.call(['kill %s' % process_id], shell=True)
time.sleep(3)
cameras = [cv2.VideoCapture(i) for i in ports]
except subprocess.CalledProcessError:
raise ValueError(
'Cannot connect to cameras. Try running: \n'
'ls -ltrh /dev/video* \n '
'to see which ports your webcams are connected to. Then hand those '
'ports as a comma-separated list to --webcam_ports, e.g. '
'--webcam_ports 0,1')
ims = map(get_image, cameras)
assert False not in [i is not None for i in ims]
return cameras
def launch_images_to_videos(view_dirs, vid_paths, debug_path):
f = 'learning/brain/research/tcn/dataset/images_to_videos.py'
cmd = ['python %s ' % f]
cmd += ['--view_dirs %s ' % ','.join(i for i in view_dirs)]
cmd += ['--vid_paths %s ' % ','.join(i for i in vid_paths)]
cmd += ['--debug_path %s ' % debug_path]
cmd += ['--debug_lhs_view %s ' % FLAGS.debug_lhs_view]
cmd += ['--debug_rhs_view %s ' % FLAGS.debug_rhs_view]
cmd += [' & ']
cmd = ''.join(i for i in cmd)
fnull = open(os.devnull, 'w')
subprocess.Popen([cmd], stdout=fnull, stderr=subprocess.STDOUT, shell=True)
for p in vid_paths:
tf.logging.info('Writing final video to: %s' % p)
if debug_path:
tf.logging.info('Writing debug video to: %s' % debug_path)
def main(_):
cameras = get_cameras()
view_dirs, vid_paths, debug_path = setup_paths()
try:
try:
tf.logging.info('About to write to:')
for v in view_dirs:
tf.logging.info(v)
input('Press Enter to continue...')
except SyntaxError:
pass
display_queues = [ImageQueue() for _ in range(FLAGS.num_views)]
reconcile_queues = [ImageQueue() for _ in range(FLAGS.num_views)]
write_queue = multiprocessing.Queue()
processes = []
processes.append(Process(target=display_webcams, args=(display_queues,)))
processes.append(Process(
target=reconcile, args=(reconcile_queues, write_queue,)))
processes.append(Process(
target=persist, args=(write_queue, view_dirs,)))
for (cam, dq, rq) in zip(cameras, display_queues, reconcile_queues):
processes.append(Process(
target=capture_webcam, args=(cam, dq, rq,)))
for p in processes:
p.start()
for p in processes:
p.join()
except KeyboardInterrupt:
for q in display_queues + reconcile_queues:
q.close()
for cam in cameras:
cam.release()
launch_images_to_videos(view_dirs, vid_paths, debug_path)
try:
sys.exit(0)
except SystemExit:
os._exit(0)
if __name__ == '__main__':
tf.app.run()
| true | true |
f7fde73a4819b3f8fbcb8114c24be6c3c3692def | 10,920 | py | Python | tests/unit/stats/test_measure_to_view_map.py | zeako/opencensus-python | 5331d7476edd4af65885295f10d23b7864e5e741 | [
"Apache-2.0"
] | null | null | null | tests/unit/stats/test_measure_to_view_map.py | zeako/opencensus-python | 5331d7476edd4af65885295f10d23b7864e5e741 | [
"Apache-2.0"
] | null | null | null | tests/unit/stats/test_measure_to_view_map.py | zeako/opencensus-python | 5331d7476edd4af65885295f10d23b7864e5e741 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018, OpenCensus Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import mock
import logging
from datetime import datetime
from opencensus.stats.view import View
from opencensus.stats.view_data import ViewData
from opencensus.stats.measurement import Measurement
from opencensus.stats.measure import BaseMeasure
from opencensus.stats.measure import MeasureInt
from opencensus.stats import measure_to_view_map as measure_to_view_map_module
class TestMeasureToViewMap(unittest.TestCase):
@staticmethod
def _get_target_class():
return measure_to_view_map_module.MeasureToViewMap
def _make_one(self, *args, **kw):
return self._get_target_class()(*args, **kw)
def test_constructor(self):
measure_to_view_map = measure_to_view_map_module.MeasureToViewMap()
self.assertEqual({},
measure_to_view_map._measure_to_view_data_list_map)
self.assertEqual({}, measure_to_view_map._registered_views)
self.assertEqual({}, measure_to_view_map._registered_measures)
self.assertEqual(set(), measure_to_view_map.exported_views)
def test_get_view(self):
name = "testView"
description = "testDescription"
columns = mock.Mock()
measure = mock.Mock()
aggregation = mock.Mock()
view = View(
name=name,
description=description,
columns=columns,
measure=measure,
aggregation=aggregation)
timestamp = mock.Mock()
measure_to_view_map = measure_to_view_map_module.MeasureToViewMap()
measure_to_view_map._registered_views = {}
no_registered_views = measure_to_view_map.get_view(
view_name=name, timestamp=timestamp)
self.assertEqual(None, no_registered_views)
measure_to_view_map._registered_views = {name: view}
measure_to_view_map._measure_to_view_data_list_map = {
view.measure.name:
[ViewData(view=view, start_time=timestamp, end_time=timestamp)]
}
print(measure_to_view_map._measure_to_view_data_list_map)
view_data = measure_to_view_map.get_view(
view_name=name, timestamp=timestamp)
self.assertIsNotNone(view_data)
measure_to_view_map._measure_to_view_data_list_map = {}
view_data = measure_to_view_map.get_view(
view_name=name, timestamp=timestamp)
self.assertIsNone(view_data)
measure_to_view_map._measure_to_view_data_list_map = {
view.measure.name: [
ViewData(
view=mock.Mock(), start_time=timestamp, end_time=timestamp)
]
}
view_data = measure_to_view_map.get_view(
view_name=name, timestamp=timestamp)
self.assertIsNone(view_data)
def test_filter_exported_views(self):
test_view_1_name = "testView1"
description = "testDescription"
columns = mock.Mock()
measure = mock.Mock()
aggregation = mock.Mock()
test_view_1 = View(
name=test_view_1_name,
description=description,
columns=columns,
measure=measure,
aggregation=aggregation)
print("test view 1", test_view_1)
test_view_2_name = "testView2"
test_view_2 = View(
name=test_view_2_name,
description=description,
columns=columns,
measure=measure,
aggregation=aggregation)
print("test view 2", test_view_2)
all_the_views = {test_view_1, test_view_2}
print("all the views", all_the_views)
measure_to_view_map = measure_to_view_map_module.MeasureToViewMap()
views = measure_to_view_map.filter_exported_views(
all_views=all_the_views)
print("filtered views", views)
self.assertEqual(views, all_the_views)
def test_register_view(self):
name = "testView"
description = "testDescription"
columns = mock.Mock()
measure = MeasureInt("measure", "description", "1")
aggregation = mock.Mock()
view = View(
name=name,
description=description,
columns=columns,
measure=measure,
aggregation=aggregation)
timestamp = mock.Mock()
measure_to_view_map = measure_to_view_map_module.MeasureToViewMap()
measure_to_view_map._registered_views = {}
measure_to_view_map._registered_measures = {}
measure_to_view_map.register_view(view=view, timestamp=timestamp)
self.assertIsNone(measure_to_view_map.exported_views)
self.assertEqual(measure_to_view_map._registered_views[view.name],
view)
self.assertEqual(
measure_to_view_map._registered_measures[measure.name], measure)
self.assertIsNotNone(
measure_to_view_map._measure_to_view_data_list_map[
view.measure.name])
# Registers a view with an existing measure.
view2 = View(
name="testView2",
description=description,
columns=columns,
measure=measure,
aggregation=aggregation)
test_with_registered_measures = measure_to_view_map.register_view(
view=view2, timestamp=timestamp)
self.assertIsNone(test_with_registered_measures)
self.assertEqual(
measure_to_view_map._registered_measures[measure.name], measure)
# Registers a view with a measure that has the same name as an existing measure,
# but with different schema. measure2 and view3 should be ignored.
measure2 = MeasureInt("measure", "another measure", "ms")
view3 = View(
name="testView3",
description=description,
columns=columns,
measure=measure2,
aggregation=aggregation)
test_with_registered_measures = measure_to_view_map.register_view(
view=view3, timestamp=timestamp)
self.assertIsNone(test_with_registered_measures)
self.assertEqual(
measure_to_view_map._registered_measures[measure2.name], measure)
measure_to_view_map._registered_measures = {measure.name: None}
self.assertIsNone(
measure_to_view_map._registered_measures.get(measure.name))
measure_to_view_map.register_view(view=view, timestamp=timestamp)
# view is already registered, measure will not be registered again.
self.assertIsNone(
measure_to_view_map._registered_measures.get(measure.name))
self.assertIsNotNone(
measure_to_view_map._measure_to_view_data_list_map[
view.measure.name])
measure_to_view_map._registered_views = {name: view}
test_result_1 = measure_to_view_map.register_view(
view=view, timestamp=timestamp)
self.assertIsNone(test_result_1)
self.assertIsNotNone(
measure_to_view_map._measure_to_view_data_list_map[
view.measure.name])
def test_record(self):
measure_name = "test_measure"
measure_description = "test_description"
measure = BaseMeasure(
name=measure_name, description=measure_description)
view_name = "test_view"
view_description = "test_description"
view_columns = ["testTag1", "testColumn2"]
view_measure = measure
view_aggregation = mock.Mock()
view = View(
name=view_name,
description=view_description,
columns=view_columns,
measure=view_measure,
aggregation=view_aggregation)
measure_value = 5
tags = {"testTag1": "testTag1Value"}
measurement_map = {measure: measure_value}
timestamp = mock.Mock()
measure_to_view_map = measure_to_view_map_module.MeasureToViewMap()
measure_to_view_map._registered_measures = {}
record = measure_to_view_map.record(
tags=tags, measurement_map=measurement_map, timestamp=timestamp)
self.assertNotEqual(
measure,
measure_to_view_map._registered_measures.get(measure.name))
self.assertIsNone(record)
measure_to_view_map._registered_measures = {measure.name: measure}
measure_to_view_map._measure_to_view_data_list_map = {}
record = measure_to_view_map.record(
tags=tags, measurement_map=measurement_map, timestamp=timestamp)
self.assertEqual(
measure,
measure_to_view_map._registered_measures.get(measure.name))
self.assertIsNone(record)
measure_to_view_map._measure_to_view_data_list_map = {
measure.name: [mock.Mock()]
}
measure_to_view_map.record(
tags=tags, measurement_map=measurement_map, timestamp=timestamp)
self.assertEqual(
measure,
measure_to_view_map._registered_measures.get(measure.name))
self.assertTrue(
measure.name in measure_to_view_map._measure_to_view_data_list_map)
measure_to_view_map._measure_to_view_data_list_map = {
"testing": [mock.Mock()]
}
measure_to_view_map.record(
tags=tags, measurement_map=measurement_map, timestamp=timestamp)
self.assertTrue(measure.name not in
measure_to_view_map._measure_to_view_data_list_map)
measure_to_view_map_mock = mock.Mock()
measure_to_view_map = measure_to_view_map_mock
measure_to_view_map._registered_measures = {measure.name: measure}
measure_to_view_map._measure_to_view_data_list_map = mock.Mock()
measure_to_view_map.record(
tags=mock.Mock(), stats=mock.Mock(), timestamp=mock.Mock())
self.assertEqual(
measure,
measure_to_view_map._registered_measures.get(measure.name))
self.assertIsNotNone(measure_to_view_map.view_datas)
self.assertTrue(measure_to_view_map_mock.record.called)
tags = {"testTag1": "testTag1Value"}
measurement_map = {}
measure_to_view_map = measure_to_view_map_module.MeasureToViewMap()
record = measure_to_view_map.record(
tags=tags, measurement_map=measurement_map, timestamp=timestamp)
self.assertIsNone(record)
| 40.295203 | 88 | 0.671978 |
import unittest
import mock
import logging
from datetime import datetime
from opencensus.stats.view import View
from opencensus.stats.view_data import ViewData
from opencensus.stats.measurement import Measurement
from opencensus.stats.measure import BaseMeasure
from opencensus.stats.measure import MeasureInt
from opencensus.stats import measure_to_view_map as measure_to_view_map_module
class TestMeasureToViewMap(unittest.TestCase):
@staticmethod
def _get_target_class():
return measure_to_view_map_module.MeasureToViewMap
def _make_one(self, *args, **kw):
return self._get_target_class()(*args, **kw)
def test_constructor(self):
measure_to_view_map = measure_to_view_map_module.MeasureToViewMap()
self.assertEqual({},
measure_to_view_map._measure_to_view_data_list_map)
self.assertEqual({}, measure_to_view_map._registered_views)
self.assertEqual({}, measure_to_view_map._registered_measures)
self.assertEqual(set(), measure_to_view_map.exported_views)
def test_get_view(self):
name = "testView"
description = "testDescription"
columns = mock.Mock()
measure = mock.Mock()
aggregation = mock.Mock()
view = View(
name=name,
description=description,
columns=columns,
measure=measure,
aggregation=aggregation)
timestamp = mock.Mock()
measure_to_view_map = measure_to_view_map_module.MeasureToViewMap()
measure_to_view_map._registered_views = {}
no_registered_views = measure_to_view_map.get_view(
view_name=name, timestamp=timestamp)
self.assertEqual(None, no_registered_views)
measure_to_view_map._registered_views = {name: view}
measure_to_view_map._measure_to_view_data_list_map = {
view.measure.name:
[ViewData(view=view, start_time=timestamp, end_time=timestamp)]
}
print(measure_to_view_map._measure_to_view_data_list_map)
view_data = measure_to_view_map.get_view(
view_name=name, timestamp=timestamp)
self.assertIsNotNone(view_data)
measure_to_view_map._measure_to_view_data_list_map = {}
view_data = measure_to_view_map.get_view(
view_name=name, timestamp=timestamp)
self.assertIsNone(view_data)
measure_to_view_map._measure_to_view_data_list_map = {
view.measure.name: [
ViewData(
view=mock.Mock(), start_time=timestamp, end_time=timestamp)
]
}
view_data = measure_to_view_map.get_view(
view_name=name, timestamp=timestamp)
self.assertIsNone(view_data)
def test_filter_exported_views(self):
test_view_1_name = "testView1"
description = "testDescription"
columns = mock.Mock()
measure = mock.Mock()
aggregation = mock.Mock()
test_view_1 = View(
name=test_view_1_name,
description=description,
columns=columns,
measure=measure,
aggregation=aggregation)
print("test view 1", test_view_1)
test_view_2_name = "testView2"
test_view_2 = View(
name=test_view_2_name,
description=description,
columns=columns,
measure=measure,
aggregation=aggregation)
print("test view 2", test_view_2)
all_the_views = {test_view_1, test_view_2}
print("all the views", all_the_views)
measure_to_view_map = measure_to_view_map_module.MeasureToViewMap()
views = measure_to_view_map.filter_exported_views(
all_views=all_the_views)
print("filtered views", views)
self.assertEqual(views, all_the_views)
def test_register_view(self):
name = "testView"
description = "testDescription"
columns = mock.Mock()
measure = MeasureInt("measure", "description", "1")
aggregation = mock.Mock()
view = View(
name=name,
description=description,
columns=columns,
measure=measure,
aggregation=aggregation)
timestamp = mock.Mock()
measure_to_view_map = measure_to_view_map_module.MeasureToViewMap()
measure_to_view_map._registered_views = {}
measure_to_view_map._registered_measures = {}
measure_to_view_map.register_view(view=view, timestamp=timestamp)
self.assertIsNone(measure_to_view_map.exported_views)
self.assertEqual(measure_to_view_map._registered_views[view.name],
view)
self.assertEqual(
measure_to_view_map._registered_measures[measure.name], measure)
self.assertIsNotNone(
measure_to_view_map._measure_to_view_data_list_map[
view.measure.name])
view2 = View(
name="testView2",
description=description,
columns=columns,
measure=measure,
aggregation=aggregation)
test_with_registered_measures = measure_to_view_map.register_view(
view=view2, timestamp=timestamp)
self.assertIsNone(test_with_registered_measures)
self.assertEqual(
measure_to_view_map._registered_measures[measure.name], measure)
measure2 = MeasureInt("measure", "another measure", "ms")
view3 = View(
name="testView3",
description=description,
columns=columns,
measure=measure2,
aggregation=aggregation)
test_with_registered_measures = measure_to_view_map.register_view(
view=view3, timestamp=timestamp)
self.assertIsNone(test_with_registered_measures)
self.assertEqual(
measure_to_view_map._registered_measures[measure2.name], measure)
measure_to_view_map._registered_measures = {measure.name: None}
self.assertIsNone(
measure_to_view_map._registered_measures.get(measure.name))
measure_to_view_map.register_view(view=view, timestamp=timestamp)
self.assertIsNone(
measure_to_view_map._registered_measures.get(measure.name))
self.assertIsNotNone(
measure_to_view_map._measure_to_view_data_list_map[
view.measure.name])
measure_to_view_map._registered_views = {name: view}
test_result_1 = measure_to_view_map.register_view(
view=view, timestamp=timestamp)
self.assertIsNone(test_result_1)
self.assertIsNotNone(
measure_to_view_map._measure_to_view_data_list_map[
view.measure.name])
def test_record(self):
measure_name = "test_measure"
measure_description = "test_description"
measure = BaseMeasure(
name=measure_name, description=measure_description)
view_name = "test_view"
view_description = "test_description"
view_columns = ["testTag1", "testColumn2"]
view_measure = measure
view_aggregation = mock.Mock()
view = View(
name=view_name,
description=view_description,
columns=view_columns,
measure=view_measure,
aggregation=view_aggregation)
measure_value = 5
tags = {"testTag1": "testTag1Value"}
measurement_map = {measure: measure_value}
timestamp = mock.Mock()
measure_to_view_map = measure_to_view_map_module.MeasureToViewMap()
measure_to_view_map._registered_measures = {}
record = measure_to_view_map.record(
tags=tags, measurement_map=measurement_map, timestamp=timestamp)
self.assertNotEqual(
measure,
measure_to_view_map._registered_measures.get(measure.name))
self.assertIsNone(record)
measure_to_view_map._registered_measures = {measure.name: measure}
measure_to_view_map._measure_to_view_data_list_map = {}
record = measure_to_view_map.record(
tags=tags, measurement_map=measurement_map, timestamp=timestamp)
self.assertEqual(
measure,
measure_to_view_map._registered_measures.get(measure.name))
self.assertIsNone(record)
measure_to_view_map._measure_to_view_data_list_map = {
measure.name: [mock.Mock()]
}
measure_to_view_map.record(
tags=tags, measurement_map=measurement_map, timestamp=timestamp)
self.assertEqual(
measure,
measure_to_view_map._registered_measures.get(measure.name))
self.assertTrue(
measure.name in measure_to_view_map._measure_to_view_data_list_map)
measure_to_view_map._measure_to_view_data_list_map = {
"testing": [mock.Mock()]
}
measure_to_view_map.record(
tags=tags, measurement_map=measurement_map, timestamp=timestamp)
self.assertTrue(measure.name not in
measure_to_view_map._measure_to_view_data_list_map)
measure_to_view_map_mock = mock.Mock()
measure_to_view_map = measure_to_view_map_mock
measure_to_view_map._registered_measures = {measure.name: measure}
measure_to_view_map._measure_to_view_data_list_map = mock.Mock()
measure_to_view_map.record(
tags=mock.Mock(), stats=mock.Mock(), timestamp=mock.Mock())
self.assertEqual(
measure,
measure_to_view_map._registered_measures.get(measure.name))
self.assertIsNotNone(measure_to_view_map.view_datas)
self.assertTrue(measure_to_view_map_mock.record.called)
tags = {"testTag1": "testTag1Value"}
measurement_map = {}
measure_to_view_map = measure_to_view_map_module.MeasureToViewMap()
record = measure_to_view_map.record(
tags=tags, measurement_map=measurement_map, timestamp=timestamp)
self.assertIsNone(record)
| true | true |
f7fde7796f27f83f7053daafb1f649030d0b77c6 | 10,925 | py | Python | egs/yesno/ASR/local/prepare_lang.py | TIFOSI528/icefall | 6f7860a0a60b53026216fa4ba19048955951333e | [
"Apache-2.0"
] | 173 | 2021-07-01T03:36:53.000Z | 2022-03-30T09:17:51.000Z | egs/yesno/ASR/local/prepare_lang.py | TIFOSI528/icefall | 6f7860a0a60b53026216fa4ba19048955951333e | [
"Apache-2.0"
] | 200 | 2021-07-01T03:14:19.000Z | 2022-03-31T13:15:07.000Z | egs/yesno/ASR/local/prepare_lang.py | TIFOSI528/icefall | 6f7860a0a60b53026216fa4ba19048955951333e | [
"Apache-2.0"
] | 57 | 2021-07-15T09:38:09.000Z | 2022-03-29T02:03:48.000Z | #!/usr/bin/env python3
# Copyright (c) 2021 Xiaomi Corporation (authors: Fangjun Kuang)
"""
This script takes as input a lexicon file "data/lang_phone/lexicon.txt"
consisting of words and tokens (i.e., phones) and does the following:
1. Add disambiguation symbols to the lexicon and generate lexicon_disambig.txt
2. Generate tokens.txt, the token table mapping a token to a unique integer.
3. Generate words.txt, the word table mapping a word to a unique integer.
4. Generate L.pt, in k2 format. It can be loaded by
d = torch.load("L.pt")
lexicon = k2.Fsa.from_dict(d)
5. Generate L_disambig.pt, in k2 format.
"""
import math
from collections import defaultdict
from pathlib import Path
from typing import Any, Dict, List, Tuple
import k2
import torch
from icefall.lexicon import read_lexicon, write_lexicon
Lexicon = List[Tuple[str, List[str]]]
def write_mapping(filename: str, sym2id: Dict[str, int]) -> None:
"""Write a symbol to ID mapping to a file.
Note:
No need to implement `read_mapping` as it can be done
through :func:`k2.SymbolTable.from_file`.
Args:
filename:
Filename to save the mapping.
sym2id:
A dict mapping symbols to IDs.
Returns:
Return None.
"""
with open(filename, "w", encoding="utf-8") as f:
for sym, i in sym2id.items():
f.write(f"{sym} {i}\n")
def get_tokens(lexicon: Lexicon) -> List[str]:
"""Get tokens from a lexicon.
Args:
lexicon:
It is the return value of :func:`read_lexicon`.
Returns:
Return a list of unique tokens.
"""
ans = set()
for _, tokens in lexicon:
ans.update(tokens)
sorted_ans = sorted(list(ans))
return sorted_ans
def get_words(lexicon: Lexicon) -> List[str]:
"""Get words from a lexicon.
Args:
lexicon:
It is the return value of :func:`read_lexicon`.
Returns:
Return a list of unique words.
"""
ans = set()
for word, _ in lexicon:
ans.add(word)
sorted_ans = sorted(list(ans))
return sorted_ans
def add_disambig_symbols(lexicon: Lexicon) -> Tuple[Lexicon, int]:
"""It adds pseudo-token disambiguation symbols #1, #2 and so on
at the ends of tokens to ensure that all pronunciations are different,
and that none is a prefix of another.
See also add_lex_disambig.pl from kaldi.
Args:
lexicon:
It is returned by :func:`read_lexicon`.
Returns:
Return a tuple with two elements:
- The output lexicon with disambiguation symbols
- The ID of the max disambiguation symbol that appears
in the lexicon
"""
# (1) Work out the count of each token-sequence in the
# lexicon.
count = defaultdict(int)
for _, tokens in lexicon:
count[" ".join(tokens)] += 1
# (2) For each left sub-sequence of each token-sequence, note down
# that it exists (for identifying prefixes of longer strings).
issubseq = defaultdict(int)
for _, tokens in lexicon:
tokens = tokens.copy()
tokens.pop()
while tokens:
issubseq[" ".join(tokens)] = 1
tokens.pop()
# (3) For each entry in the lexicon:
# if the token sequence is unique and is not a
# prefix of another word, no disambig symbol.
# Else output #1, or #2, #3, ... if the same token-seq
# has already been assigned a disambig symbol.
ans = []
# We start with #1 since #0 has its own purpose
first_allowed_disambig = 1
max_disambig = first_allowed_disambig - 1
last_used_disambig_symbol_of = defaultdict(int)
for word, tokens in lexicon:
tokenseq = " ".join(tokens)
assert tokenseq != ""
if issubseq[tokenseq] == 0 and count[tokenseq] == 1:
ans.append((word, tokens))
continue
cur_disambig = last_used_disambig_symbol_of[tokenseq]
if cur_disambig == 0:
cur_disambig = first_allowed_disambig
else:
cur_disambig += 1
if cur_disambig > max_disambig:
max_disambig = cur_disambig
last_used_disambig_symbol_of[tokenseq] = cur_disambig
tokenseq += f" #{cur_disambig}"
ans.append((word, tokenseq.split()))
return ans, max_disambig
def generate_id_map(symbols: List[str]) -> Dict[str, int]:
"""Generate ID maps, i.e., map a symbol to a unique ID.
Args:
symbols:
A list of unique symbols.
Returns:
A dict containing the mapping between symbols and IDs.
"""
return {sym: i for i, sym in enumerate(symbols)}
def add_self_loops(
arcs: List[List[Any]], disambig_token: int, disambig_word: int
) -> List[List[Any]]:
"""Adds self-loops to states of an FST to propagate disambiguation symbols
through it. They are added on each state with non-epsilon output symbols
on at least one arc out of the state.
See also fstaddselfloops.pl from Kaldi. One difference is that
Kaldi uses OpenFst style FSTs and it has multiple final states.
This function uses k2 style FSTs and it does not need to add self-loops
to the final state.
The input label of a self-loop is `disambig_token`, while the output
label is `disambig_word`.
Args:
arcs:
A list-of-list. The sublist contains
`[src_state, dest_state, label, aux_label, score]`
disambig_token:
It is the token ID of the symbol `#0`.
disambig_word:
It is the word ID of the symbol `#0`.
Return:
Return new `arcs` containing self-loops.
"""
states_needs_self_loops = set()
for arc in arcs:
src, dst, ilabel, olabel, score = arc
if olabel != 0:
states_needs_self_loops.add(src)
ans = []
for s in states_needs_self_loops:
ans.append([s, s, disambig_token, disambig_word, 0])
return arcs + ans
def lexicon_to_fst(
lexicon: Lexicon,
token2id: Dict[str, int],
word2id: Dict[str, int],
sil_token: str = "SIL",
sil_prob: float = 0.5,
need_self_loops: bool = False,
) -> k2.Fsa:
"""Convert a lexicon to an FST (in k2 format) with optional silence at
the beginning and end of each word.
Args:
lexicon:
The input lexicon. See also :func:`read_lexicon`
token2id:
A dict mapping tokens to IDs.
word2id:
A dict mapping words to IDs.
sil_token:
The silence token.
sil_prob:
The probability for adding a silence at the beginning and end
of the word.
need_self_loops:
If True, add self-loop to states with non-epsilon output symbols
on at least one arc out of the state. The input label for this
self loop is `token2id["#0"]` and the output label is `word2id["#0"]`.
Returns:
Return an instance of `k2.Fsa` representing the given lexicon.
"""
assert sil_prob > 0.0 and sil_prob < 1.0
# CAUTION: we use score, i.e, negative cost.
sil_score = math.log(sil_prob)
no_sil_score = math.log(1.0 - sil_prob)
start_state = 0
loop_state = 1 # words enter and leave from here
sil_state = 2 # words terminate here when followed by silence; this state
# has a silence transition to loop_state.
next_state = 3 # the next un-allocated state, will be incremented as we go.
arcs = []
assert token2id["<eps>"] == 0
assert word2id["<eps>"] == 0
eps = 0
sil_token = token2id[sil_token]
arcs.append([start_state, loop_state, eps, eps, no_sil_score])
arcs.append([start_state, sil_state, eps, eps, sil_score])
arcs.append([sil_state, loop_state, sil_token, eps, 0])
for word, tokens in lexicon:
assert len(tokens) > 0, f"{word} has no pronunciations"
cur_state = loop_state
word = word2id[word]
tokens = [token2id[i] for i in tokens]
for i in range(len(tokens) - 1):
w = word if i == 0 else eps
arcs.append([cur_state, next_state, tokens[i], w, 0])
cur_state = next_state
next_state += 1
# now for the last token of this word
# It has two out-going arcs, one to the loop state,
# the other one to the sil_state.
i = len(tokens) - 1
w = word if i == 0 else eps
arcs.append([cur_state, loop_state, tokens[i], w, no_sil_score])
arcs.append([cur_state, sil_state, tokens[i], w, sil_score])
if need_self_loops:
disambig_token = token2id["#0"]
disambig_word = word2id["#0"]
arcs = add_self_loops(
arcs,
disambig_token=disambig_token,
disambig_word=disambig_word,
)
final_state = next_state
arcs.append([loop_state, final_state, -1, -1, 0])
arcs.append([final_state])
arcs = sorted(arcs, key=lambda arc: arc[0])
arcs = [[str(i) for i in arc] for arc in arcs]
arcs = [" ".join(arc) for arc in arcs]
arcs = "\n".join(arcs)
fsa = k2.Fsa.from_str(arcs, acceptor=False)
return fsa
def main():
out_dir = Path("data/lang_phone")
lexicon_filename = out_dir / "lexicon.txt"
sil_token = "SIL"
sil_prob = 0.5
lexicon = read_lexicon(lexicon_filename)
tokens = get_tokens(lexicon)
words = get_words(lexicon)
lexicon_disambig, max_disambig = add_disambig_symbols(lexicon)
for i in range(max_disambig + 1):
disambig = f"#{i}"
assert disambig not in tokens
tokens.append(f"#{i}")
assert "<eps>" not in tokens
tokens = ["<eps>"] + tokens
assert "<eps>" not in words
assert "#0" not in words
assert "<s>" not in words
assert "</s>" not in words
words = ["<eps>"] + words + ["#0", "<s>", "</s>"]
token2id = generate_id_map(tokens)
word2id = generate_id_map(words)
write_mapping(out_dir / "tokens.txt", token2id)
write_mapping(out_dir / "words.txt", word2id)
write_lexicon(out_dir / "lexicon_disambig.txt", lexicon_disambig)
L = lexicon_to_fst(
lexicon,
token2id=token2id,
word2id=word2id,
sil_token=sil_token,
sil_prob=sil_prob,
)
L_disambig = lexicon_to_fst(
lexicon_disambig,
token2id=token2id,
word2id=word2id,
sil_token=sil_token,
sil_prob=sil_prob,
need_self_loops=True,
)
torch.save(L.as_dict(), out_dir / "L.pt")
torch.save(L_disambig.as_dict(), out_dir / "L_disambig.pt")
if False:
# Just for debugging, will remove it
L.labels_sym = k2.SymbolTable.from_file(out_dir / "tokens.txt")
L.aux_labels_sym = k2.SymbolTable.from_file(out_dir / "words.txt")
L_disambig.labels_sym = L.labels_sym
L_disambig.aux_labels_sym = L.aux_labels_sym
L.draw(out_dir / "L.png", title="L")
L_disambig.draw(out_dir / "L_disambig.png", title="L_disambig")
if __name__ == "__main__":
main()
| 29.6875 | 80 | 0.633501 |
import math
from collections import defaultdict
from pathlib import Path
from typing import Any, Dict, List, Tuple
import k2
import torch
from icefall.lexicon import read_lexicon, write_lexicon
Lexicon = List[Tuple[str, List[str]]]
def write_mapping(filename: str, sym2id: Dict[str, int]) -> None:
with open(filename, "w", encoding="utf-8") as f:
for sym, i in sym2id.items():
f.write(f"{sym} {i}\n")
def get_tokens(lexicon: Lexicon) -> List[str]:
ans = set()
for _, tokens in lexicon:
ans.update(tokens)
sorted_ans = sorted(list(ans))
return sorted_ans
def get_words(lexicon: Lexicon) -> List[str]:
ans = set()
for word, _ in lexicon:
ans.add(word)
sorted_ans = sorted(list(ans))
return sorted_ans
def add_disambig_symbols(lexicon: Lexicon) -> Tuple[Lexicon, int]:
count = defaultdict(int)
for _, tokens in lexicon:
count[" ".join(tokens)] += 1
issubseq = defaultdict(int)
for _, tokens in lexicon:
tokens = tokens.copy()
tokens.pop()
while tokens:
issubseq[" ".join(tokens)] = 1
tokens.pop()
for word, tokens in lexicon:
tokenseq = " ".join(tokens)
assert tokenseq != ""
if issubseq[tokenseq] == 0 and count[tokenseq] == 1:
ans.append((word, tokens))
continue
cur_disambig = last_used_disambig_symbol_of[tokenseq]
if cur_disambig == 0:
cur_disambig = first_allowed_disambig
else:
cur_disambig += 1
if cur_disambig > max_disambig:
max_disambig = cur_disambig
last_used_disambig_symbol_of[tokenseq] = cur_disambig
tokenseq += f" #{cur_disambig}"
ans.append((word, tokenseq.split()))
return ans, max_disambig
def generate_id_map(symbols: List[str]) -> Dict[str, int]:
return {sym: i for i, sym in enumerate(symbols)}
def add_self_loops(
arcs: List[List[Any]], disambig_token: int, disambig_word: int
) -> List[List[Any]]:
states_needs_self_loops = set()
for arc in arcs:
src, dst, ilabel, olabel, score = arc
if olabel != 0:
states_needs_self_loops.add(src)
ans = []
for s in states_needs_self_loops:
ans.append([s, s, disambig_token, disambig_word, 0])
return arcs + ans
def lexicon_to_fst(
lexicon: Lexicon,
token2id: Dict[str, int],
word2id: Dict[str, int],
sil_token: str = "SIL",
sil_prob: float = 0.5,
need_self_loops: bool = False,
) -> k2.Fsa:
assert sil_prob > 0.0 and sil_prob < 1.0
sil_score = math.log(sil_prob)
no_sil_score = math.log(1.0 - sil_prob)
start_state = 0
loop_state = 1
sil_state = 2
next_state = 3
arcs = []
assert token2id["<eps>"] == 0
assert word2id["<eps>"] == 0
eps = 0
sil_token = token2id[sil_token]
arcs.append([start_state, loop_state, eps, eps, no_sil_score])
arcs.append([start_state, sil_state, eps, eps, sil_score])
arcs.append([sil_state, loop_state, sil_token, eps, 0])
for word, tokens in lexicon:
assert len(tokens) > 0, f"{word} has no pronunciations"
cur_state = loop_state
word = word2id[word]
tokens = [token2id[i] for i in tokens]
for i in range(len(tokens) - 1):
w = word if i == 0 else eps
arcs.append([cur_state, next_state, tokens[i], w, 0])
cur_state = next_state
next_state += 1
i = len(tokens) - 1
w = word if i == 0 else eps
arcs.append([cur_state, loop_state, tokens[i], w, no_sil_score])
arcs.append([cur_state, sil_state, tokens[i], w, sil_score])
if need_self_loops:
disambig_token = token2id["#0"]
disambig_word = word2id["#0"]
arcs = add_self_loops(
arcs,
disambig_token=disambig_token,
disambig_word=disambig_word,
)
final_state = next_state
arcs.append([loop_state, final_state, -1, -1, 0])
arcs.append([final_state])
arcs = sorted(arcs, key=lambda arc: arc[0])
arcs = [[str(i) for i in arc] for arc in arcs]
arcs = [" ".join(arc) for arc in arcs]
arcs = "\n".join(arcs)
fsa = k2.Fsa.from_str(arcs, acceptor=False)
return fsa
def main():
out_dir = Path("data/lang_phone")
lexicon_filename = out_dir / "lexicon.txt"
sil_token = "SIL"
sil_prob = 0.5
lexicon = read_lexicon(lexicon_filename)
tokens = get_tokens(lexicon)
words = get_words(lexicon)
lexicon_disambig, max_disambig = add_disambig_symbols(lexicon)
for i in range(max_disambig + 1):
disambig = f"#{i}"
assert disambig not in tokens
tokens.append(f"#{i}")
assert "<eps>" not in tokens
tokens = ["<eps>"] + tokens
assert "<eps>" not in words
assert "#0" not in words
assert "<s>" not in words
assert "</s>" not in words
words = ["<eps>"] + words + ["#0", "<s>", "</s>"]
token2id = generate_id_map(tokens)
word2id = generate_id_map(words)
write_mapping(out_dir / "tokens.txt", token2id)
write_mapping(out_dir / "words.txt", word2id)
write_lexicon(out_dir / "lexicon_disambig.txt", lexicon_disambig)
L = lexicon_to_fst(
lexicon,
token2id=token2id,
word2id=word2id,
sil_token=sil_token,
sil_prob=sil_prob,
)
L_disambig = lexicon_to_fst(
lexicon_disambig,
token2id=token2id,
word2id=word2id,
sil_token=sil_token,
sil_prob=sil_prob,
need_self_loops=True,
)
torch.save(L.as_dict(), out_dir / "L.pt")
torch.save(L_disambig.as_dict(), out_dir / "L_disambig.pt")
if False:
L.labels_sym = k2.SymbolTable.from_file(out_dir / "tokens.txt")
L.aux_labels_sym = k2.SymbolTable.from_file(out_dir / "words.txt")
L_disambig.labels_sym = L.labels_sym
L_disambig.aux_labels_sym = L.aux_labels_sym
L.draw(out_dir / "L.png", title="L")
L_disambig.draw(out_dir / "L_disambig.png", title="L_disambig")
if __name__ == "__main__":
main()
| true | true |
f7fde7d735d6d8204161c6f69f919f7466338266 | 1,591 | py | Python | nasa_background.py | Thomas9292/nasa-background | 65cd238f780f76165af68eb2cba92a68b10ff643 | [
"MIT"
] | 20 | 2019-12-24T13:22:19.000Z | 2020-01-26T01:23:41.000Z | nasa_background.py | Thomas9292/nasa-background | 65cd238f780f76165af68eb2cba92a68b10ff643 | [
"MIT"
] | 13 | 2019-12-22T18:03:35.000Z | 2020-01-14T08:55:22.000Z | nasa_background.py | Thomas9292/nasa-background | 65cd238f780f76165af68eb2cba92a68b10ff643 | [
"MIT"
] | 7 | 2019-12-24T16:18:44.000Z | 2019-12-25T14:22:32.000Z | from datetime import datetime
import click
from tools import background, nasa_api
from tools.utils import parse_str_to_date
@click.group()
def nasa_background():
pass
@nasa_background.command()
@click.option("--date",
default=None,
help="Enter the date as a single string in YYYYMMDD or YYYY-MM-DD format." )
@click.option("--auto",
is_flag=True,
help="Disables prompts and sets the background automatically if this can successfully be completed." )
def update(date, auto):
'''Get the newest NASA Picture of the Day and set it as background'''
# Check if date is passed as argument, set to default (today) otherwise
if date is None:
date = datetime.now()
else:
date = parse_str_to_date(date)
try:
# Download and print information about
meta_info = nasa_api.get_info(date)
click.echo(f"Title: {meta_info['title']}\n")
click.echo(meta_info['explanation'] + "\n")
# Check if auto is selected, otherwise prompt user to set it as background
if auto or click.confirm("Do you wish to download this image and set it as background?"):
# Download and set the background
file_path = nasa_api.download_image(date)
background.change_background(file_path, auto)
except KeyError:
click.echo(f"Image not found for the selected date {date}. ")
except Exception as e:
click.echo("Fatal error encountered, exiting program.")
click.echo(e)
if __name__ == '__main__':
nasa_background()
| 32.469388 | 116 | 0.659962 | from datetime import datetime
import click
from tools import background, nasa_api
from tools.utils import parse_str_to_date
@click.group()
def nasa_background():
pass
@nasa_background.command()
@click.option("--date",
default=None,
help="Enter the date as a single string in YYYYMMDD or YYYY-MM-DD format." )
@click.option("--auto",
is_flag=True,
help="Disables prompts and sets the background automatically if this can successfully be completed." )
def update(date, auto):
if date is None:
date = datetime.now()
else:
date = parse_str_to_date(date)
try:
meta_info = nasa_api.get_info(date)
click.echo(f"Title: {meta_info['title']}\n")
click.echo(meta_info['explanation'] + "\n")
if auto or click.confirm("Do you wish to download this image and set it as background?"):
file_path = nasa_api.download_image(date)
background.change_background(file_path, auto)
except KeyError:
click.echo(f"Image not found for the selected date {date}. ")
except Exception as e:
click.echo("Fatal error encountered, exiting program.")
click.echo(e)
if __name__ == '__main__':
nasa_background()
| true | true |
f7fde8a0153e5f47b6098d1748aa541f4af9a7f2 | 964 | py | Python | vortidplenigilo.py | corcra/esperanto | be8f6eda63c4f20b6f7667a50f9b85d3dba32258 | [
"MIT"
] | 15 | 2015-11-15T15:15:55.000Z | 2018-05-05T19:13:01.000Z | vortidplenigilo.py | corcra/esperanto | be8f6eda63c4f20b6f7667a50f9b85d3dba32258 | [
"MIT"
] | 1 | 2015-11-19T15:11:32.000Z | 2015-11-19T15:12:06.000Z | vortidplenigilo.py | corcra/esperanto | be8f6eda63c4f20b6f7667a50f9b85d3dba32258 | [
"MIT"
] | 2 | 2015-11-28T13:15:35.000Z | 2016-03-03T09:24:17.000Z | #!/usr/bin/env ipython
# coding=utf-8
# This is intended to be run on a cronjob
from __future__ import print_function
import tweepy
from creds import consumer_key, consumer_secret, access_token, access_token_secret
from soup import tweet_soup
from parse_EO_full import eo_to_en
import random
# --- set up API --- #
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth, wait_on_rate_limit=True, wait_on_rate_limit_notify=True)
# --- generate a tweet --- #
# ... try to succeed ... #
success = False
while not success:
root = random.choice(list(eo_to_en.keys()))
root_meaning = eo_to_en[root]
try:
assert root.count(' ') == 0
tweet = tweet_soup(root, root_meaning)
assert len(tweet) < 140
success = True
print(tweet.encode('utf8'))
except AssertionError:
pass
# --- tweet yo --- #
api.update_status(status=tweet)
| 27.542857 | 82 | 0.708506 |
from __future__ import print_function
import tweepy
from creds import consumer_key, consumer_secret, access_token, access_token_secret
from soup import tweet_soup
from parse_EO_full import eo_to_en
import random
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth, wait_on_rate_limit=True, wait_on_rate_limit_notify=True)
success = False
while not success:
root = random.choice(list(eo_to_en.keys()))
root_meaning = eo_to_en[root]
try:
assert root.count(' ') == 0
tweet = tweet_soup(root, root_meaning)
assert len(tweet) < 140
success = True
print(tweet.encode('utf8'))
except AssertionError:
pass
api.update_status(status=tweet)
| true | true |
f7fdea1735672e981febba7e392ec581a9562beb | 3,155 | py | Python | nova/console/websocketproxy.py | NetApp/nova | ca490d48a762a423449c654d5a7caeadecf2f6ca | [
"Apache-2.0"
] | 2 | 2015-11-05T04:52:34.000Z | 2016-03-07T03:00:06.000Z | nova/console/websocketproxy.py | NetApp/nova | ca490d48a762a423449c654d5a7caeadecf2f6ca | [
"Apache-2.0"
] | 1 | 2018-01-19T07:50:49.000Z | 2018-01-19T07:50:49.000Z | nova/console/websocketproxy.py | NetApp/nova | ca490d48a762a423449c654d5a7caeadecf2f6ca | [
"Apache-2.0"
] | 1 | 2020-07-24T07:32:11.000Z | 2020-07-24T07:32:11.000Z | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
'''
Websocket proxy that is compatible with OpenStack Nova.
Leverages websockify.py by Joel Martin
'''
import Cookie
import socket
import websockify
from nova.consoleauth import rpcapi as consoleauth_rpcapi
from nova import context
from nova.openstack.common import log as logging
LOG = logging.getLogger(__name__)
class NovaWebSocketProxy(websockify.WebSocketProxy):
def __init__(self, *args, **kwargs):
websockify.WebSocketProxy.__init__(self, unix_target=None,
target_cfg=None,
ssl_target=None, *args, **kwargs)
def new_client(self):
"""
Called after a new WebSocket connection has been established.
"""
cookie = Cookie.SimpleCookie()
cookie.load(self.headers.getheader('cookie'))
token = cookie['token'].value
ctxt = context.get_admin_context()
rpcapi = consoleauth_rpcapi.ConsoleAuthAPI()
connect_info = rpcapi.check_token(ctxt, token=token)
if not connect_info:
LOG.audit("Invalid Token: %s", token)
raise Exception(_("Invalid Token"))
host = connect_info['host']
port = int(connect_info['port'])
# Connect to the target
self.msg("connecting to: %s:%s" % (host, port))
LOG.audit("connecting to: %s:%s" % (host, port))
tsock = self.socket(host, port, connect=True)
# Handshake as necessary
if connect_info.get('internal_access_path'):
tsock.send("CONNECT %s HTTP/1.1\r\n\r\n" %
connect_info['internal_access_path'])
while True:
data = tsock.recv(4096, socket.MSG_PEEK)
if data.find("\r\n\r\n") != -1:
if not data.split("\r\n")[0].find("200"):
LOG.audit("Invalid Connection Info %s", token)
raise Exception(_("Invalid Connection Info"))
tsock.recv(len(data))
break
if self.verbose and not self.daemon:
print(self.traffic_legend)
# Start proxying
try:
self.do_proxy(tsock)
except Exception:
if tsock:
tsock.shutdown(socket.SHUT_RDWR)
tsock.close()
self.vmsg("%s:%s: Target closed" % (host, port))
LOG.audit("%s:%s: Target closed" % (host, port))
raise
| 35.055556 | 78 | 0.602536 |
import Cookie
import socket
import websockify
from nova.consoleauth import rpcapi as consoleauth_rpcapi
from nova import context
from nova.openstack.common import log as logging
LOG = logging.getLogger(__name__)
class NovaWebSocketProxy(websockify.WebSocketProxy):
def __init__(self, *args, **kwargs):
websockify.WebSocketProxy.__init__(self, unix_target=None,
target_cfg=None,
ssl_target=None, *args, **kwargs)
def new_client(self):
cookie = Cookie.SimpleCookie()
cookie.load(self.headers.getheader('cookie'))
token = cookie['token'].value
ctxt = context.get_admin_context()
rpcapi = consoleauth_rpcapi.ConsoleAuthAPI()
connect_info = rpcapi.check_token(ctxt, token=token)
if not connect_info:
LOG.audit("Invalid Token: %s", token)
raise Exception(_("Invalid Token"))
host = connect_info['host']
port = int(connect_info['port'])
self.msg("connecting to: %s:%s" % (host, port))
LOG.audit("connecting to: %s:%s" % (host, port))
tsock = self.socket(host, port, connect=True)
if connect_info.get('internal_access_path'):
tsock.send("CONNECT %s HTTP/1.1\r\n\r\n" %
connect_info['internal_access_path'])
while True:
data = tsock.recv(4096, socket.MSG_PEEK)
if data.find("\r\n\r\n") != -1:
if not data.split("\r\n")[0].find("200"):
LOG.audit("Invalid Connection Info %s", token)
raise Exception(_("Invalid Connection Info"))
tsock.recv(len(data))
break
if self.verbose and not self.daemon:
print(self.traffic_legend)
try:
self.do_proxy(tsock)
except Exception:
if tsock:
tsock.shutdown(socket.SHUT_RDWR)
tsock.close()
self.vmsg("%s:%s: Target closed" % (host, port))
LOG.audit("%s:%s: Target closed" % (host, port))
raise
| true | true |
f7fdeaa94cc732b4dc7be6a8f88249870c0a181f | 242 | py | Python | airbyte-integrations/connectors/source-faker/main.py | kattos-aws/airbyte | cbcbab4a2399c08d8f66d1b693ac824c245ba3da | [
"MIT"
] | 1 | 2022-03-16T22:53:06.000Z | 2022-03-16T22:53:06.000Z | airbyte-integrations/connectors/source-faker/main.py | kattos-aws/airbyte | cbcbab4a2399c08d8f66d1b693ac824c245ba3da | [
"MIT"
] | 1 | 2021-12-08T21:39:05.000Z | 2021-12-09T17:10:45.000Z | airbyte-integrations/connectors/source-faker/main.py | kattos-aws/airbyte | cbcbab4a2399c08d8f66d1b693ac824c245ba3da | [
"MIT"
] | 1 | 2022-02-19T17:22:50.000Z | 2022-02-19T17:22:50.000Z | #
# Copyright (c) 2021 Airbyte, Inc., all rights reserved.
#
import sys
from airbyte_cdk.entrypoint import launch
from source_faker import SourceFaker
if __name__ == "__main__":
source = SourceFaker()
launch(source, sys.argv[1:])
| 17.285714 | 56 | 0.72314 |
import sys
from airbyte_cdk.entrypoint import launch
from source_faker import SourceFaker
if __name__ == "__main__":
source = SourceFaker()
launch(source, sys.argv[1:])
| true | true |
f7fdeb2255b5885a6f541831d92b117383847c96 | 1,596 | py | Python | vat.py | awesome-archive/ssl-suite | 03990c5e86432c3f475971aeaf1ff7f7821ef40c | [
"Apache-2.0"
] | null | null | null | vat.py | awesome-archive/ssl-suite | 03990c5e86432c3f475971aeaf1ff7f7821ef40c | [
"Apache-2.0"
] | null | null | null | vat.py | awesome-archive/ssl-suite | 03990c5e86432c3f475971aeaf1ff7f7821ef40c | [
"Apache-2.0"
] | null | null | null | import torch
from homura.modules import cross_entropy_with_softlabels
from torch.distributions import Categorical
from torch.nn import functional as F
from backends.loss import _kl, _l2_normalize
from backends.utils import SSLTrainerBase, disable_bn_stats, get_task
class VATTrainer(SSLTrainerBase):
def labeled(self,
input: torch.Tensor,
target: torch.Tensor) -> (torch.Tensor, torch.Tensor):
output = self.model(input)
target = self.to_onehot(target, self.smoothing)
s_loss = self.loss_f[0](output, target)
return output, s_loss
def unlabeled(self,
input: torch.Tensor) -> (None, torch.Tensor, torch.Tensor):
with disable_bn_stats(self.model):
u_loss = self.vat_loss(input)
e_loss = Categorical(logits=self.model(input)).entropy().mean()
return None, u_loss, e_loss
def vat_loss(self,
input: torch.Tensor) -> torch.Tensor:
with torch.no_grad():
pred = self.model(input)
d = _l2_normalize(input.clone().normal_())
d.requires_grad_(True)
pred_hat = self.model(input + self.xi * d)
adv_loss = _kl(pred, pred_hat)
d_grad, = torch.autograd.grad([adv_loss], [d])
d = _l2_normalize(d_grad)
self.model.zero_grad()
pred_hat = self.model(input + self.eps * d)
return _kl(pred, pred_hat)
if __name__ == "__main__":
import hydra
hydra.main('config/vat.yaml')(
get_task(VATTrainer, [cross_entropy_with_softlabels, F.cross_entropy])
)()
| 33.957447 | 78 | 0.642231 | import torch
from homura.modules import cross_entropy_with_softlabels
from torch.distributions import Categorical
from torch.nn import functional as F
from backends.loss import _kl, _l2_normalize
from backends.utils import SSLTrainerBase, disable_bn_stats, get_task
class VATTrainer(SSLTrainerBase):
def labeled(self,
input: torch.Tensor,
target: torch.Tensor) -> (torch.Tensor, torch.Tensor):
output = self.model(input)
target = self.to_onehot(target, self.smoothing)
s_loss = self.loss_f[0](output, target)
return output, s_loss
def unlabeled(self,
input: torch.Tensor) -> (None, torch.Tensor, torch.Tensor):
with disable_bn_stats(self.model):
u_loss = self.vat_loss(input)
e_loss = Categorical(logits=self.model(input)).entropy().mean()
return None, u_loss, e_loss
def vat_loss(self,
input: torch.Tensor) -> torch.Tensor:
with torch.no_grad():
pred = self.model(input)
d = _l2_normalize(input.clone().normal_())
d.requires_grad_(True)
pred_hat = self.model(input + self.xi * d)
adv_loss = _kl(pred, pred_hat)
d_grad, = torch.autograd.grad([adv_loss], [d])
d = _l2_normalize(d_grad)
self.model.zero_grad()
pred_hat = self.model(input + self.eps * d)
return _kl(pred, pred_hat)
if __name__ == "__main__":
import hydra
hydra.main('config/vat.yaml')(
get_task(VATTrainer, [cross_entropy_with_softlabels, F.cross_entropy])
)()
| true | true |
f7fdeb55bc0326c1b03424292a432a31dc7976f6 | 769 | pyde | Python | sketches/colldectrect/colldectrect.pyde | kantel/processingpy | 74aae222e46f68d1c8f06307aaede3cdae65c8ec | [
"MIT"
] | 4 | 2018-06-03T02:11:46.000Z | 2021-08-18T19:55:15.000Z | sketches/colldectrect/colldectrect.pyde | kantel/processingpy | 74aae222e46f68d1c8f06307aaede3cdae65c8ec | [
"MIT"
] | null | null | null | sketches/colldectrect/colldectrect.pyde | kantel/processingpy | 74aae222e46f68d1c8f06307aaede3cdae65c8ec | [
"MIT"
] | 3 | 2019-12-23T19:12:51.000Z | 2021-04-30T14:00:31.000Z | from enemies import Enemy
from player import Player
def setup():
global enemy, player
size(420, 420)
this.surface.setTitle("Rectangle Collision Detection")
enemy = Enemy(width/2 - 64, height/2 - 32)
player = Player(20, 20)
def draw():
global enemy, player
background("#95ee0f5")
if rect_collision(enemy, player):
background("#817ac6")
else:
background("#95ee0f5")
enemy.show()
player.update()
player.show()
def rect_collision(r1, r2):
distanceX = (r1.x + r1.w/2) - (r2.x + r2.w/2)
distanceY = (r1.y + r1.h/2) - (r2.y + r2.h/2)
halfW = r1.w/2 + r2.w/2
halfH = r1.h/2 + r2.h/2
if (abs(distanceX) < halfW):
if (abs(distanceY) < halfH):
return True
return False
| 24.806452 | 58 | 0.592978 | from enemies import Enemy
from player import Player
def setup():
global enemy, player
size(420, 420)
this.surface.setTitle("Rectangle Collision Detection")
enemy = Enemy(width/2 - 64, height/2 - 32)
player = Player(20, 20)
def draw():
global enemy, player
background("#95ee0f5")
if rect_collision(enemy, player):
background("#817ac6")
else:
background("#95ee0f5")
enemy.show()
player.update()
player.show()
def rect_collision(r1, r2):
distanceX = (r1.x + r1.w/2) - (r2.x + r2.w/2)
distanceY = (r1.y + r1.h/2) - (r2.y + r2.h/2)
halfW = r1.w/2 + r2.w/2
halfH = r1.h/2 + r2.h/2
if (abs(distanceX) < halfW):
if (abs(distanceY) < halfH):
return True
return False
| true | true |
f7fdeb67125e77e9af71a01a0d25eb34834a9b5b | 1,866 | py | Python | python/maheen_code/dump_big_nn.py | maheenRashid/caffe | 82d7b1f19f49942be1f9a8d60bf3afa52c01c300 | [
"BSD-2-Clause"
] | null | null | null | python/maheen_code/dump_big_nn.py | maheenRashid/caffe | 82d7b1f19f49942be1f9a8d60bf3afa52c01c300 | [
"BSD-2-Clause"
] | null | null | null | python/maheen_code/dump_big_nn.py | maheenRashid/caffe | 82d7b1f19f49942be1f9a8d60bf3afa52c01c300 | [
"BSD-2-Clause"
] | null | null | null | FOR getNNIndicesForBigFeatureMats experiments_hashing
out_dir='/disk2/decemberExperiments/gettingNN';
out_dir_featureMats=os.path.join(out_dir,'big_feature_mats');
meta_replace=['/feature_mats','/feature_mats_meta']
big_feature_files=[os.path.join(out_dir_featureMats,file_curr) for file_curr in os.listdir(out_dir_featureMats) if file_curr.endswith('.npz')];
# big_feature_files=big_feature_files[:3];
# meta_feature_files=[];
# for file_curr in big_feature_files:
# file_curr=file_curr.replace(meta_replace[0],meta_replace[1]);
# file_curr=file_curr[:file_curr.rfind('.')]+'.p';
# meta_feature_files.append(file_curr);
# assert os.path.exists(file_curr);
# meta_info_all=[];
# for meta_file_curr in meta_feature_files:
# [paths,sizes]=pickle.load(open(meta_file_curr,'rb'));
# meta_info_all.append([paths,sizes]);
test_path= '/disk2/res11/tubePatches/aeroplane_10_1/0/0.npz';
# meta_info_all[0][0][0]
print test_path
out_path=test_path[:test_path.rfind('.')]+'_indices.npz';
print out_path
script_getNNIndicesForTestMat([test_path],big_feature_files,[out_path])
return
t=time.time();
for test_mat_no in range(len(meta_info_all)):
[paths,sizes]=meta_info_all[test_mat_no];
for test_no in range(len(sizes)):
start_idx=sum(sizes[:test_no]);
end_idx=start_idx+sizes[test_no];
test=mats[test_mat_no][start_idx:end_idx,:];
indices = getNNIndicesForBigFeatureMats(test,mats);
print indices[:10,:10]
print indices.shape,type(indices[0,0])
path_out=path_curr[:path_curr.rfind('.')]+'_indices.npz';
print path_out
np.savez(path_out,indices);
break;
break;
print time.time()-t,'Time'
| 36.588235 | 147 | 0.664523 | FOR getNNIndicesForBigFeatureMats experiments_hashing
out_dir='/disk2/decemberExperiments/gettingNN';
out_dir_featureMats=os.path.join(out_dir,'big_feature_mats');
meta_replace=['/feature_mats','/feature_mats_meta']
big_feature_files=[os.path.join(out_dir_featureMats,file_curr) for file_curr in os.listdir(out_dir_featureMats) if file_curr.endswith('.npz')];
test_path= '/disk2/res11/tubePatches/aeroplane_10_1/0/0.npz';
print test_path
out_path=test_path[:test_path.rfind('.')]+'_indices.npz';
print out_path
script_getNNIndicesForTestMat([test_path],big_feature_files,[out_path])
return
t=time.time();
for test_mat_no in range(len(meta_info_all)):
[paths,sizes]=meta_info_all[test_mat_no];
for test_no in range(len(sizes)):
start_idx=sum(sizes[:test_no]);
end_idx=start_idx+sizes[test_no];
test=mats[test_mat_no][start_idx:end_idx,:];
indices = getNNIndicesForBigFeatureMats(test,mats);
print indices[:10,:10]
print indices.shape,type(indices[0,0])
path_out=path_curr[:path_curr.rfind('.')]+'_indices.npz';
print path_out
np.savez(path_out,indices);
break;
break;
print time.time()-t,'Time'
| false | true |
f7fdeccac279b16a83abefacf96e6b863d155121 | 6,534 | py | Python | applications/popart/bert/tests/unit/pytorch/nsp_test.py | Alwaysproblem/examples-1 | 9754fa63ed1931489a21ac1f5b299f945e369a5c | [
"MIT"
] | null | null | null | applications/popart/bert/tests/unit/pytorch/nsp_test.py | Alwaysproblem/examples-1 | 9754fa63ed1931489a21ac1f5b299f945e369a5c | [
"MIT"
] | null | null | null | applications/popart/bert/tests/unit/pytorch/nsp_test.py | Alwaysproblem/examples-1 | 9754fa63ed1931489a21ac1f5b299f945e369a5c | [
"MIT"
] | null | null | null | # Copyright (c) 2019 Graphcore Ltd. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import torch
import popart
import numpy as np
import pytest
from bert_model import BertConfig, ExecutionMode, get_model
from tests.torch_bert import BertConfig as TorchBertConfig, BertForNextSentencePrediction
from .full_graph_utils import fwd_graph, bwd_graph
'''
Tests the full nsp graph.
'''
NSP_MAPPING = {}
NSP_MAPPING[ExecutionMode.DEFAULT] = {
"bert.pooler.dense.weight": "NSP/PoolW",
"bert.pooler.dense.bias": "NSP/PoolB",
"cls.seq_relationship.weight": "NSP/NspW",
"cls.seq_relationship.bias": "NSP/NspB"
}
NSP_MAPPING[ExecutionMode.PHASED] = {
"bert.pooler.dense.weight": "BertModel/NSP/Pool/Dense/Weight",
"bert.pooler.dense.bias": "BertModel/NSP/Pool/Dense/Bias",
"cls.seq_relationship.weight": "BertModel/NSP/Classifier/Dense/Weight",
"cls.seq_relationship.bias": "BertModel/NSP/Classifier/Dense/Bias"
}
NSP_TRANSFORM = {
"bert.pooler.dense.weight": np.transpose,
"cls.seq_relationship.weight": np.transpose
}
test_modes = [ExecutionMode.DEFAULT, pytest.param(ExecutionMode.PHASED, marks=pytest.mark.requires_remote_buffers)]
@pytest.mark.parametrize("mode", test_modes)
def test_nsp_fwd(custom_ops, mode):
# ------------------- PopART --------------------
config = BertConfig(task="NSP",
vocab_length=9728,
num_layers=2,
batch_size=1,
hidden_size=768,
sequence_length=128,
activation_type="relu",
popart_dtype="FLOAT",
no_dropout=True,
no_attn_dropout=True,
inference=True,
no_mask=True,
execution_mode=mode,
mask_tokens=0,
split_qkv=False)
popart_model = get_model(config, mode)
# ------------------- PyTorch -------------------------
torch_model = BertForNextSentencePrediction(
TorchBertConfig(config.vocab_length, config.hidden_size,
num_hidden_layers=config.num_layers,
num_attention_heads=config.attention_heads,
intermediate_size=config.ff_size,
hidden_act=config.activation_type,
max_position_embeddings=config.max_positional_length,
layer_norm_eps=config.layer_norm_eps,
mask_tokens=config.mask_tokens,
num_labels=2))
fwd_graph(popart_model,
torch_model,
mode,
NSP_MAPPING[mode],
transform=NSP_TRANSFORM)
@pytest.mark.sanity
@pytest.mark.parametrize("mode", test_modes)
@pytest.mark.parametrize("opt_type", ["SGD", "LAMB"])
def test_nsp_bwd(custom_ops, mode, opt_type):
nsp_bwd(custom_ops, mode, opt_type, 2432, 288)
def nsp_bwd(custom_ops, mode, opt_type, vocab_length=9728, hidden_size=768):
if mode == ExecutionMode.PHASED:
# Phased Execution requires atleast two transformer layers to ensure mlm and embedding are in the same virtual graph.
num_layers = 2
else:
num_layers = 1
# ------------------- PopART --------------------
config = BertConfig(task="NSP",
vocab_length=vocab_length,
num_layers=num_layers,
batch_size=1,
hidden_size=hidden_size,
sequence_length=128,
activation_type="relu",
popart_dtype="FLOAT",
no_dropout=True,
no_attn_dropout=True,
no_mask=True,
update_embedding_dict=True,
phased_execution_type="single",
execution_mode=mode,
split_qkv = (opt_type == "LAMB"))
popart_model = get_model(config, mode)
# ------------------- PyTorch -------------------------
torch_model = BertForNextSentencePrediction(
TorchBertConfig(config.vocab_length, config.hidden_size,
num_hidden_layers=config.num_layers,
num_attention_heads=config.attention_heads,
intermediate_size=config.ff_size,
hidden_act=config.activation_type,
max_position_embeddings=config.max_positional_length,
layer_norm_eps=config.layer_norm_eps,
mask_tokens=config.mask_tokens,
update_embedding_dict=True,
num_labels=2))
l1_lambda = 0.1
def popart_loss_fn(outputs):
if mode == ExecutionMode.PHASED:
with popart_model.scope_provider(popart_model.builder, popart_model.nsp_scope):
loss = popart_model.builder.aiGraphcore.l1loss([outputs[0]],
l1_lambda, debugPrefix="l1LossVal",
reduction=popart.ReductionType.Sum)
else:
loss = popart_model.builder.aiGraphcore.l1loss([outputs[0]], l1_lambda,
debugPrefix="l1LossVal",
reduction=popart.ReductionType.Sum)
popart_model.builder.virtualGraph(loss, popart_model.nsp_scope.virtualGraph)
return loss
def torch_loss_fn(outputs):
return l1_lambda * torch.norm(outputs[0], 1)
bwd_graph(popart_model,
torch_model,
mode,
popart_loss_fn=popart_loss_fn,
torch_loss_fn=torch_loss_fn,
mapping=NSP_MAPPING[mode],
transform=NSP_TRANSFORM,
opt_type=opt_type)
| 40.08589 | 125 | 0.573309 |
import torch
import popart
import numpy as np
import pytest
from bert_model import BertConfig, ExecutionMode, get_model
from tests.torch_bert import BertConfig as TorchBertConfig, BertForNextSentencePrediction
from .full_graph_utils import fwd_graph, bwd_graph
NSP_MAPPING = {}
NSP_MAPPING[ExecutionMode.DEFAULT] = {
"bert.pooler.dense.weight": "NSP/PoolW",
"bert.pooler.dense.bias": "NSP/PoolB",
"cls.seq_relationship.weight": "NSP/NspW",
"cls.seq_relationship.bias": "NSP/NspB"
}
NSP_MAPPING[ExecutionMode.PHASED] = {
"bert.pooler.dense.weight": "BertModel/NSP/Pool/Dense/Weight",
"bert.pooler.dense.bias": "BertModel/NSP/Pool/Dense/Bias",
"cls.seq_relationship.weight": "BertModel/NSP/Classifier/Dense/Weight",
"cls.seq_relationship.bias": "BertModel/NSP/Classifier/Dense/Bias"
}
NSP_TRANSFORM = {
"bert.pooler.dense.weight": np.transpose,
"cls.seq_relationship.weight": np.transpose
}
test_modes = [ExecutionMode.DEFAULT, pytest.param(ExecutionMode.PHASED, marks=pytest.mark.requires_remote_buffers)]
@pytest.mark.parametrize("mode", test_modes)
def test_nsp_fwd(custom_ops, mode):
config = BertConfig(task="NSP",
vocab_length=9728,
num_layers=2,
batch_size=1,
hidden_size=768,
sequence_length=128,
activation_type="relu",
popart_dtype="FLOAT",
no_dropout=True,
no_attn_dropout=True,
inference=True,
no_mask=True,
execution_mode=mode,
mask_tokens=0,
split_qkv=False)
popart_model = get_model(config, mode)
torch_model = BertForNextSentencePrediction(
TorchBertConfig(config.vocab_length, config.hidden_size,
num_hidden_layers=config.num_layers,
num_attention_heads=config.attention_heads,
intermediate_size=config.ff_size,
hidden_act=config.activation_type,
max_position_embeddings=config.max_positional_length,
layer_norm_eps=config.layer_norm_eps,
mask_tokens=config.mask_tokens,
num_labels=2))
fwd_graph(popart_model,
torch_model,
mode,
NSP_MAPPING[mode],
transform=NSP_TRANSFORM)
@pytest.mark.sanity
@pytest.mark.parametrize("mode", test_modes)
@pytest.mark.parametrize("opt_type", ["SGD", "LAMB"])
def test_nsp_bwd(custom_ops, mode, opt_type):
nsp_bwd(custom_ops, mode, opt_type, 2432, 288)
def nsp_bwd(custom_ops, mode, opt_type, vocab_length=9728, hidden_size=768):
if mode == ExecutionMode.PHASED:
num_layers = 2
else:
num_layers = 1
config = BertConfig(task="NSP",
vocab_length=vocab_length,
num_layers=num_layers,
batch_size=1,
hidden_size=hidden_size,
sequence_length=128,
activation_type="relu",
popart_dtype="FLOAT",
no_dropout=True,
no_attn_dropout=True,
no_mask=True,
update_embedding_dict=True,
phased_execution_type="single",
execution_mode=mode,
split_qkv = (opt_type == "LAMB"))
popart_model = get_model(config, mode)
torch_model = BertForNextSentencePrediction(
TorchBertConfig(config.vocab_length, config.hidden_size,
num_hidden_layers=config.num_layers,
num_attention_heads=config.attention_heads,
intermediate_size=config.ff_size,
hidden_act=config.activation_type,
max_position_embeddings=config.max_positional_length,
layer_norm_eps=config.layer_norm_eps,
mask_tokens=config.mask_tokens,
update_embedding_dict=True,
num_labels=2))
l1_lambda = 0.1
def popart_loss_fn(outputs):
if mode == ExecutionMode.PHASED:
with popart_model.scope_provider(popart_model.builder, popart_model.nsp_scope):
loss = popart_model.builder.aiGraphcore.l1loss([outputs[0]],
l1_lambda, debugPrefix="l1LossVal",
reduction=popart.ReductionType.Sum)
else:
loss = popart_model.builder.aiGraphcore.l1loss([outputs[0]], l1_lambda,
debugPrefix="l1LossVal",
reduction=popart.ReductionType.Sum)
popart_model.builder.virtualGraph(loss, popart_model.nsp_scope.virtualGraph)
return loss
def torch_loss_fn(outputs):
return l1_lambda * torch.norm(outputs[0], 1)
bwd_graph(popart_model,
torch_model,
mode,
popart_loss_fn=popart_loss_fn,
torch_loss_fn=torch_loss_fn,
mapping=NSP_MAPPING[mode],
transform=NSP_TRANSFORM,
opt_type=opt_type)
| true | true |
f7fded9f3af8883ffa64d1e202e66ca6ad448c9d | 3,342 | py | Python | imperative/python/megengine/data/collator.py | Olalaye/MegEngine | 695d24f24517536e6544b07936d189dbc031bbce | [
"Apache-2.0"
] | 5,168 | 2020-03-19T06:10:04.000Z | 2022-03-31T11:11:54.000Z | imperative/python/megengine/data/collator.py | Olalaye/MegEngine | 695d24f24517536e6544b07936d189dbc031bbce | [
"Apache-2.0"
] | 286 | 2020-03-25T01:36:23.000Z | 2022-03-31T10:26:33.000Z | imperative/python/megengine/data/collator.py | Olalaye/MegEngine | 695d24f24517536e6544b07936d189dbc031bbce | [
"Apache-2.0"
] | 515 | 2020-03-19T06:10:05.000Z | 2022-03-30T09:15:59.000Z | # -*- coding: utf-8 -*-
# Copyright (c) 2016- Facebook, Inc (Adam Paszke)
# Copyright (c) 2014- Facebook, Inc (Soumith Chintala)
# Copyright (c) 2011-2014 Idiap Research Institute (Ronan Collobert)
# Copyright (c) 2012-2014 Deepmind Technologies (Koray Kavukcuoglu)
# Copyright (c) 2011-2012 NEC Laboratories America (Koray Kavukcuoglu)
# Copyright (c) 2011-2013 NYU (Clement Farabet)
# Copyright (c) 2006-2010 NEC Laboratories America (Ronan Collobert, Leon Bottou, Iain Melvin, Jason Weston)
# Copyright (c) 2006 Idiap Research Institute (Samy Bengio)
# Copyright (c) 2001-2004 Idiap Research Institute (Ronan Collobert, Samy Bengio, Johnny Mariethoz)
# ---------------------------------------------------------------------
# MegEngine is Licensed under the Apache License, Version 2.0 (the "License")
#
# Copyright (c) 2014-2021 Megvii Inc. All rights reserved.
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# This file has been modified by Megvii ("Megvii Modifications").
# All Megvii Modifications are Copyright (C) 2014-2021 Megvii Inc. All rights reserved.
# ----------------------------------------------------------------------
import collections.abc
import re
import numpy as np
np_str_obj_array_pattern = re.compile(r"[aO]")
default_collate_err_msg_format = (
"default_collator: inputs must contain numpy arrays, numbers, "
"Unicode strings, bytes, dicts or lists; found {}"
)
class Collator:
r"""Used for merging a list of samples to form a mini-batch of Tensor(s). Used when using batched loading from a dataset.
Modified from https://github.com/pytorch/pytorch/blob/master/torch/utils/data/_utils/collate.py
"""
def apply(self, inputs):
elem = inputs[0]
elem_type = type(elem)
if (
elem_type.__module__ == "numpy"
and elem_type.__name__ != "str_"
and elem_type.__name__ != "string_"
):
elem = inputs[0]
if elem_type.__name__ == "ndarray":
# array of string classes and object
if np_str_obj_array_pattern.search(elem.dtype.str) is not None:
raise TypeError(default_collate_err_msg_format.format(elem.dtype))
return np.ascontiguousarray(np.stack(inputs))
elif elem.shape == (): # scalars
return np.array(inputs)
elif isinstance(elem, float):
return np.array(inputs, dtype=np.float64)
elif isinstance(elem, int):
return np.array(inputs)
elif isinstance(elem, (str, bytes)):
return inputs
elif isinstance(elem, collections.abc.Mapping):
return {key: self.apply([d[key] for d in inputs]) for key in elem}
elif isinstance(elem, tuple) and hasattr(elem, "_fields"): # namedtuple
return elem_type(*(self.apply(samples) for samples in zip(*inputs)))
elif isinstance(elem, collections.abc.Sequence):
transposed = zip(*inputs)
return [self.apply(samples) for samples in transposed]
raise TypeError(default_collate_err_msg_format.format(elem_type))
| 46.416667 | 125 | 0.637044 |
import collections.abc
import re
import numpy as np
np_str_obj_array_pattern = re.compile(r"[aO]")
default_collate_err_msg_format = (
"default_collator: inputs must contain numpy arrays, numbers, "
"Unicode strings, bytes, dicts or lists; found {}"
)
class Collator:
def apply(self, inputs):
elem = inputs[0]
elem_type = type(elem)
if (
elem_type.__module__ == "numpy"
and elem_type.__name__ != "str_"
and elem_type.__name__ != "string_"
):
elem = inputs[0]
if elem_type.__name__ == "ndarray":
if np_str_obj_array_pattern.search(elem.dtype.str) is not None:
raise TypeError(default_collate_err_msg_format.format(elem.dtype))
return np.ascontiguousarray(np.stack(inputs))
elif elem.shape == ():
return np.array(inputs)
elif isinstance(elem, float):
return np.array(inputs, dtype=np.float64)
elif isinstance(elem, int):
return np.array(inputs)
elif isinstance(elem, (str, bytes)):
return inputs
elif isinstance(elem, collections.abc.Mapping):
return {key: self.apply([d[key] for d in inputs]) for key in elem}
elif isinstance(elem, tuple) and hasattr(elem, "_fields"):
return elem_type(*(self.apply(samples) for samples in zip(*inputs)))
elif isinstance(elem, collections.abc.Sequence):
transposed = zip(*inputs)
return [self.apply(samples) for samples in transposed]
raise TypeError(default_collate_err_msg_format.format(elem_type))
| true | true |
f7fdeeb67a2fbce41bcc6ad078ed73378c5f4ef6 | 812 | py | Python | launch/stereo_frame_capture.launch.py | aussierobots/galaxy_camera_u3v | c60722042b3685caeeaaac9d98215bb3f2883a34 | [
"Apache-2.0"
] | 2 | 2021-09-04T19:40:48.000Z | 2021-09-11T09:50:51.000Z | launch/stereo_frame_capture.launch.py | aussierobots/galaxy_camera_u3v | c60722042b3685caeeaaac9d98215bb3f2883a34 | [
"Apache-2.0"
] | null | null | null | launch/stereo_frame_capture.launch.py | aussierobots/galaxy_camera_u3v | c60722042b3685caeeaaac9d98215bb3f2883a34 | [
"Apache-2.0"
] | null | null | null | """Launch uv3_image_pub stereo left & right in a component container."""
import launch
from launch_ros.actions import ComposableNodeContainer
from launch_ros.actions import LoadComposableNodes
from launch_ros.descriptions import ComposableNode
def generate_launch_description():
"""Generate launch description with multiple components."""
params=[{'image_path': '/tmp'}]
container1 = ComposableNodeContainer(
name='stereo_frame_capture',
namespace='',
package='rclcpp_components',
executable='component_container_mt',
composable_node_descriptions=[
ComposableNode(
package='galaxy_camera_u3v',
plugin='stereo_capture::StereoFrameCap',
name='stereo_frame_cap',
parameters=params
)
]
)
return launch.LaunchDescription([container1])
| 29 | 72 | 0.741379 |
import launch
from launch_ros.actions import ComposableNodeContainer
from launch_ros.actions import LoadComposableNodes
from launch_ros.descriptions import ComposableNode
def generate_launch_description():
params=[{'image_path': '/tmp'}]
container1 = ComposableNodeContainer(
name='stereo_frame_capture',
namespace='',
package='rclcpp_components',
executable='component_container_mt',
composable_node_descriptions=[
ComposableNode(
package='galaxy_camera_u3v',
plugin='stereo_capture::StereoFrameCap',
name='stereo_frame_cap',
parameters=params
)
]
)
return launch.LaunchDescription([container1])
| true | true |
f7fdeff7c15b7a324a2fb182504f08c5fbea5a73 | 4,617 | py | Python | preprocess/video_generator.py | wustone1995/speech2face | 0eadbc8caf59c58cf5320a0a131a5e6fc9e728b8 | [
"MIT"
] | null | null | null | preprocess/video_generator.py | wustone1995/speech2face | 0eadbc8caf59c58cf5320a0a131a5e6fc9e728b8 | [
"MIT"
] | 6 | 2020-09-25T22:34:18.000Z | 2022-03-12T00:17:46.000Z | preprocess/video_generator.py | wustone1995/speech2face | 0eadbc8caf59c58cf5320a0a131a5e6fc9e728b8 | [
"MIT"
] | 1 | 2021-01-02T10:18:06.000Z | 2021-01-02T10:18:06.000Z | import os
import pickle
import shutil
import imageio
import pandas as pd
import subprocess
from PIL import Image
import face_recognition
import numpy as np
import skimage
import scipy
from keras.engine import Model
from keras.layers import Input
from keras_vggface.vggface import VGGFace
from keras_vggface import utils
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '1'
FNULL = open(os.devnull, 'w')
class VideoExtract():
def __init__(self, fps, duration, face_extraction_model, verbose):
self.destination_dir = "speech2face/preprocess/data/speaker_video_embeddings/"
self.videos = "speech2face/preprocess/data/videos/"
self.frames_dir = "speech2face/preprocess/data/frames/"
self.frame_cropped = "speech2face/preprocess/data/cropped_frames/"
self.model_dir = "speech2face/preprocess/data/pretrained_model/"
self.fps = fps
self.duration = duration
self.face_extraction_model = face_extraction_model
self.vgg = VGGFace(model='vgg16')
self.out = self.vgg.get_layer('fc7').output
self.vgg_model = Model(self.vgg.input, self.out)
self.verbose = verbose
if not os.path.isdir(self.destination_dir):
os.mkdir(self.destination_dir)
if not os.path.isdir(self.frames_dir):
os.mkdir(self.frames_dir)
def extract_video(self, id, x, y):
embeddings = np.zeros((4096))
if not os.path.isfile(self.videos + id + ".mp4"):
if self.verbose:
print("--------Video {} not found-----------".format(self.videos + id + ".mp4"))
return 1
if (not os.path.isfile(self.destination_dir + id + ".pkl")):
if self.verbose:
print("Resampling video", id)
resample = "ffmpeg -nostats -loglevel 0 -y -i {1}{2}.mp4 -r {0} -t {3} '{4}{2}.mp4'".format(self.fps, self.videos, id, self.duration, self.destination_dir)
res2 = subprocess.Popen(resample, stdout = FNULL, shell=True).communicate()
if not os.path.isfile(self.destination_dir + id + ".mp4"):
if self.verbose:
print("--------Fault in video {}--------".format(id))
return 1
extract_frames = "ffmpeg -nostats -loglevel 0 -i '{0}{1}.mp4' {2}/%02d.jpg".format(self.destination_dir, id, self.frames_dir)
rs = subprocess.Popen(extract_frames, stdout = FNULL, shell = True).communicate()
for j in range(1, 7):
if not os.path.isfile(self.frames_dir + "%02d" % j + ".jpg"):
if self.verbose:
print("------MISSING FRAME DETECTED FOR {} FRAME NO {}----".format(id, j))
continue
if self.verbose:
print("reading frame - {0}".format(j))
frame = Image.open(self.frames_dir + "%02d" % j + ".jpg")
face_boxes = face_recognition.face_locations(np.array(frame), model= self.face_extraction_model)
if(len(face_boxes) > 1):
if self.verbose:
print("-----2 faces detected in {0} frame {1}-----".format(id, j))
return 1
elif len(face_boxes) == 0:
if self.verbose:
print("-----No face detected in {} frame {}-----".format(id, j))
return 1
top, right, bottom, left = np.squeeze(face_boxes)
frame_cropped = frame.crop(box = (left, top, right, bottom))
frame_resized = np.array(Image.fromarray(np.array(frame_cropped)).resize((224,224)))
Image.fromarray(frame_resized).save(self.frame_cropped + id + '.jpg')
frame_resized = np.expand_dims(np.array(frame_resized, dtype=np.float64), 0)
frame_resized = utils.preprocess_input(frame_resized, version=1)
embeddings = self.vgg_model.predict(frame_resized)
break
pickle.dump(embeddings, open(self.destination_dir + id + ".pkl", "wb"))
delete_frames = "rm {0}*".format(self.frames_dir)
delete_video = "rm '{0}'".format(self.destination_dir + id + ".mp4")
rs = subprocess.Popen(delete_frames, stdout = subprocess.PIPE, shell = True).communicate()
rs = subprocess.Popen(delete_video, stdout = subprocess.PIPE, shell = True).communicate()
return 0
| 43.971429 | 168 | 0.568118 | import os
import pickle
import shutil
import imageio
import pandas as pd
import subprocess
from PIL import Image
import face_recognition
import numpy as np
import skimage
import scipy
from keras.engine import Model
from keras.layers import Input
from keras_vggface.vggface import VGGFace
from keras_vggface import utils
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '1'
FNULL = open(os.devnull, 'w')
class VideoExtract():
def __init__(self, fps, duration, face_extraction_model, verbose):
self.destination_dir = "speech2face/preprocess/data/speaker_video_embeddings/"
self.videos = "speech2face/preprocess/data/videos/"
self.frames_dir = "speech2face/preprocess/data/frames/"
self.frame_cropped = "speech2face/preprocess/data/cropped_frames/"
self.model_dir = "speech2face/preprocess/data/pretrained_model/"
self.fps = fps
self.duration = duration
self.face_extraction_model = face_extraction_model
self.vgg = VGGFace(model='vgg16')
self.out = self.vgg.get_layer('fc7').output
self.vgg_model = Model(self.vgg.input, self.out)
self.verbose = verbose
if not os.path.isdir(self.destination_dir):
os.mkdir(self.destination_dir)
if not os.path.isdir(self.frames_dir):
os.mkdir(self.frames_dir)
def extract_video(self, id, x, y):
embeddings = np.zeros((4096))
if not os.path.isfile(self.videos + id + ".mp4"):
if self.verbose:
print("--------Video {} not found-----------".format(self.videos + id + ".mp4"))
return 1
if (not os.path.isfile(self.destination_dir + id + ".pkl")):
if self.verbose:
print("Resampling video", id)
resample = "ffmpeg -nostats -loglevel 0 -y -i {1}{2}.mp4 -r {0} -t {3} '{4}{2}.mp4'".format(self.fps, self.videos, id, self.duration, self.destination_dir)
res2 = subprocess.Popen(resample, stdout = FNULL, shell=True).communicate()
if not os.path.isfile(self.destination_dir + id + ".mp4"):
if self.verbose:
print("--------Fault in video {}--------".format(id))
return 1
extract_frames = "ffmpeg -nostats -loglevel 0 -i '{0}{1}.mp4' {2}/%02d.jpg".format(self.destination_dir, id, self.frames_dir)
rs = subprocess.Popen(extract_frames, stdout = FNULL, shell = True).communicate()
for j in range(1, 7):
if not os.path.isfile(self.frames_dir + "%02d" % j + ".jpg"):
if self.verbose:
print("------MISSING FRAME DETECTED FOR {} FRAME NO {}----".format(id, j))
continue
if self.verbose:
print("reading frame - {0}".format(j))
frame = Image.open(self.frames_dir + "%02d" % j + ".jpg")
face_boxes = face_recognition.face_locations(np.array(frame), model= self.face_extraction_model)
if(len(face_boxes) > 1):
if self.verbose:
print("-----2 faces detected in {0} frame {1}-----".format(id, j))
return 1
elif len(face_boxes) == 0:
if self.verbose:
print("-----No face detected in {} frame {}-----".format(id, j))
return 1
top, right, bottom, left = np.squeeze(face_boxes)
frame_cropped = frame.crop(box = (left, top, right, bottom))
frame_resized = np.array(Image.fromarray(np.array(frame_cropped)).resize((224,224)))
Image.fromarray(frame_resized).save(self.frame_cropped + id + '.jpg')
frame_resized = np.expand_dims(np.array(frame_resized, dtype=np.float64), 0)
frame_resized = utils.preprocess_input(frame_resized, version=1)
embeddings = self.vgg_model.predict(frame_resized)
break
pickle.dump(embeddings, open(self.destination_dir + id + ".pkl", "wb"))
delete_frames = "rm {0}*".format(self.frames_dir)
delete_video = "rm '{0}'".format(self.destination_dir + id + ".mp4")
rs = subprocess.Popen(delete_frames, stdout = subprocess.PIPE, shell = True).communicate()
rs = subprocess.Popen(delete_video, stdout = subprocess.PIPE, shell = True).communicate()
return 0
| true | true |
f7fdf01c194b31505661a0bd103e53ec3c595d54 | 8,894 | py | Python | src/server.py | drobnymichal/ChatServerUnixSocket | 9fb7693ef45486d5cd3255fc1eed3a42cd76a7b0 | [
"MIT"
] | null | null | null | src/server.py | drobnymichal/ChatServerUnixSocket | 9fb7693ef45486d5cd3255fc1eed3a42cd76a7b0 | [
"MIT"
] | null | null | null | src/server.py | drobnymichal/ChatServerUnixSocket | 9fb7693ef45486d5cd3255fc1eed3a42cd76a7b0 | [
"MIT"
] | null | null | null | import asyncio, socket, time
import collections
from inspect import stack
import os
from typing import AsyncIterator, Deque, List, Optional, Tuple, Union, Dict
from collections import deque
class Client:
def __init__(self, writer: asyncio.StreamWriter, reader: asyncio.StreamReader) -> None:
self.name = None
self.writer = writer
self.reader = reader
self.channels: Dict[str, 'Channel'] = {}
async def send_message(self, text: str):
#print(text)
self.writer.write((text + " \n").encode("utf-8"))
await self.writer.drain()
class Message:
def __init__(self, text: str, timestamp: int, nick: str) -> None:
self.text = text
self.timestamp: int = timestamp
self.nick = nick
class Channel:
def __init__(self, name: str) -> None:
self.name: str = name
self.clients: List['Client'] = []
self.messages: List['Message'] = []
async def send_to_all(self, msg: Union['Message', str]) -> None:
if isinstance(msg, Message):
self.messages.append(msg)
msg = msg.text
for client in self.clients:
await client.send_message(msg)
async def announce_all(self, called_client: 'Client', msg: Union['Message', str]) -> None:
if isinstance(msg, Message):
self.messages.append(msg)
msg = msg.text
for client in self.clients:
if client is not called_client:
await client.send_message(msg)
async def replay(self, client: 'Client', timestamp: float):
for msg in self.messages:
#print(f"actual: {timestamp} msg: {msg.timestamp}")
if msg.timestamp >= timestamp:
await client.send_message(msg.text)
class Server:
def __init__(self) -> None:
self.all_channels: Dict[str, 'Channel'] = {}
self.all_clients: List['Client'] = []
def find_channel(self, name: str) -> Optional['Channel']:
for name_ch, channel in self.all_channels.items():
if name_ch == name:
return channel
return None
def check_nick(self, nick: str) -> bool:
if nick.startswith("#"):
return False
for client in self.all_clients:
if client.name == nick:
return False
return True
async def handle_client(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter):
client: 'Client' = Client(writer, reader)
self.all_clients.append(client)
try:
got_text = (await reader.readuntil(b'\n')).decode("utf-8")[:-1]
except asyncio.IncompleteReadError:
return None
while not writer.is_closing():
request = got_text.split(maxsplit=2)
timestamp = time.time()
print(" ".join(request).encode("utf-8"))
if len(request) == 2: # commands: nick, join, part
command, param = request
if command == "nick":
if not self.check_nick(param) or param.startswith("#") or param.startswith("*"):
await client.send_message("error You cannot use this nick.")
elif client.name is None:
client.name = param
await client.send_message("ok Your nick has been set.")
else:
for channel in client.channels.values():
for cl in channel.clients:
if cl is not client:
await founded_channel.announce_all(client, Message(f"message {founded_channel.name} {int(timestamp)} *server* {client.name} is now known as {param}", int(timestamp), "*server*"))
client.name = param
await client.send_message("ok Your nick has been changed.")
elif client.name is None:
await client.send_message("error First, you have to select your nick.")
elif command == "join":
founded_channel = self.find_channel(param)
if len(param) < 2 or not param.startswith("#") or param.startswith("*"):
await client.send_message("error Channel name must start with <#>.")
elif founded_channel is None:
founded_channel = Channel(param)
self.all_channels[param] = founded_channel
founded_channel.clients.append(client)
client.channels[param] = founded_channel
founded_channel.messages.append(Message(f"message {founded_channel.name} {int(timestamp)} *server* {client.name} has joined the channel", int(timestamp), "*server*"))
await client.send_message("ok You have created and joined the channel.")
elif founded_channel not in client.channels:
founded_channel.clients.append(client)
client.channels[param] = founded_channel
await client.send_message("ok You have joined the channel.")
await founded_channel.announce_all(client, Message(f"message {founded_channel.name} {int(timestamp)} *server* {client.name} has joined the channel", int(timestamp), "*server*"))
else:
await client.send_message("error You are already joined to this channel.")
elif command == "part":
founded_channel = self.find_channel(param)
if founded_channel is None:
await client.send_message("error You are not member of this channel.")
else:
founded_channel.clients.remove(client)
client.channels.pop(param)
await client.send_message("ok You have left the channel.")
else:
await client.send_message("error Unknown command.")
elif client.name is None:
await client.send_message("error First, you have to select your nick.")
elif len(request) == 3:
command, channel, other = request
if command == "message":
founded_channel = self.find_channel(channel)
if founded_channel is None:
await client.send_message("error The channel does not exist.")
elif channel not in client.channels:
await client.send_message("error You are no associated with this channel.")
else:
await founded_channel.send_to_all(Message(f"message {founded_channel.name} {int(timestamp)} {client.name} {other}", int(timestamp), client.name))
elif command == "replay":
if not other.isdigit() or int(other) > time.time():
await client.send_message("error Replay command is not valid - timestamp.")
else:
founded_channel = client.channels.get(channel, None)
if founded_channel is None:
await client.send_message("error Replay command is not valid - channel.")
else:
await client.send_message("ok Replay command is valid.")
await founded_channel.replay(client, int(other))
else:
await client.send_message("error Unknown command.")
else:
await client.send_message("error Unknown command.")
try:
got_text = (await reader.readuntil(b'\n')).decode("utf-8")[:-1]
except asyncio.IncompleteReadError:
return None
def check_names(inp: List[str]) -> bool:
if len(inp) == 0:
return False
for name in inp:
if name == "" or name.isspace() or os.path.exists(name):
return False
return True
async def create_server(name: str):
try:
sock = await asyncio.start_unix_server(Server().handle_client, name)
await sock.serve_forever()
except KeyboardInterrupt:
print("ending")
exit(1)
def main():
user_input = input("Enter names of unix servers: ").split()
while not check_names(user_input):
print("Some of the names are already used.")
user_input = input("Enter names of unix servers: ").split()
loop = asyncio.get_event_loop()
for name in user_input:
loop.create_task(create_server(name))
try:
loop.run_forever()
finally:
for name in user_input:
os.remove(name)
if __name__ == "__main__":
main()
| 42.759615 | 214 | 0.560265 | import asyncio, socket, time
import collections
from inspect import stack
import os
from typing import AsyncIterator, Deque, List, Optional, Tuple, Union, Dict
from collections import deque
class Client:
def __init__(self, writer: asyncio.StreamWriter, reader: asyncio.StreamReader) -> None:
self.name = None
self.writer = writer
self.reader = reader
self.channels: Dict[str, 'Channel'] = {}
async def send_message(self, text: str):
self.writer.write((text + " \n").encode("utf-8"))
await self.writer.drain()
class Message:
def __init__(self, text: str, timestamp: int, nick: str) -> None:
self.text = text
self.timestamp: int = timestamp
self.nick = nick
class Channel:
def __init__(self, name: str) -> None:
self.name: str = name
self.clients: List['Client'] = []
self.messages: List['Message'] = []
async def send_to_all(self, msg: Union['Message', str]) -> None:
if isinstance(msg, Message):
self.messages.append(msg)
msg = msg.text
for client in self.clients:
await client.send_message(msg)
async def announce_all(self, called_client: 'Client', msg: Union['Message', str]) -> None:
if isinstance(msg, Message):
self.messages.append(msg)
msg = msg.text
for client in self.clients:
if client is not called_client:
await client.send_message(msg)
async def replay(self, client: 'Client', timestamp: float):
for msg in self.messages:
if msg.timestamp >= timestamp:
await client.send_message(msg.text)
class Server:
def __init__(self) -> None:
self.all_channels: Dict[str, 'Channel'] = {}
self.all_clients: List['Client'] = []
def find_channel(self, name: str) -> Optional['Channel']:
for name_ch, channel in self.all_channels.items():
if name_ch == name:
return channel
return None
def check_nick(self, nick: str) -> bool:
if nick.startswith("#"):
return False
for client in self.all_clients:
if client.name == nick:
return False
return True
async def handle_client(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter):
client: 'Client' = Client(writer, reader)
self.all_clients.append(client)
try:
got_text = (await reader.readuntil(b'\n')).decode("utf-8")[:-1]
except asyncio.IncompleteReadError:
return None
while not writer.is_closing():
request = got_text.split(maxsplit=2)
timestamp = time.time()
print(" ".join(request).encode("utf-8"))
if len(request) == 2:
command, param = request
if command == "nick":
if not self.check_nick(param) or param.startswith("#") or param.startswith("*"):
await client.send_message("error You cannot use this nick.")
elif client.name is None:
client.name = param
await client.send_message("ok Your nick has been set.")
else:
for channel in client.channels.values():
for cl in channel.clients:
if cl is not client:
await founded_channel.announce_all(client, Message(f"message {founded_channel.name} {int(timestamp)} *server* {client.name} is now known as {param}", int(timestamp), "*server*"))
client.name = param
await client.send_message("ok Your nick has been changed.")
elif client.name is None:
await client.send_message("error First, you have to select your nick.")
elif command == "join":
founded_channel = self.find_channel(param)
if len(param) < 2 or not param.startswith("#") or param.startswith("*"):
await client.send_message("error Channel name must start with <#>.")
elif founded_channel is None:
founded_channel = Channel(param)
self.all_channels[param] = founded_channel
founded_channel.clients.append(client)
client.channels[param] = founded_channel
founded_channel.messages.append(Message(f"message {founded_channel.name} {int(timestamp)} *server* {client.name} has joined the channel", int(timestamp), "*server*"))
await client.send_message("ok You have created and joined the channel.")
elif founded_channel not in client.channels:
founded_channel.clients.append(client)
client.channels[param] = founded_channel
await client.send_message("ok You have joined the channel.")
await founded_channel.announce_all(client, Message(f"message {founded_channel.name} {int(timestamp)} *server* {client.name} has joined the channel", int(timestamp), "*server*"))
else:
await client.send_message("error You are already joined to this channel.")
elif command == "part":
founded_channel = self.find_channel(param)
if founded_channel is None:
await client.send_message("error You are not member of this channel.")
else:
founded_channel.clients.remove(client)
client.channels.pop(param)
await client.send_message("ok You have left the channel.")
else:
await client.send_message("error Unknown command.")
elif client.name is None:
await client.send_message("error First, you have to select your nick.")
elif len(request) == 3:
command, channel, other = request
if command == "message":
founded_channel = self.find_channel(channel)
if founded_channel is None:
await client.send_message("error The channel does not exist.")
elif channel not in client.channels:
await client.send_message("error You are no associated with this channel.")
else:
await founded_channel.send_to_all(Message(f"message {founded_channel.name} {int(timestamp)} {client.name} {other}", int(timestamp), client.name))
elif command == "replay":
if not other.isdigit() or int(other) > time.time():
await client.send_message("error Replay command is not valid - timestamp.")
else:
founded_channel = client.channels.get(channel, None)
if founded_channel is None:
await client.send_message("error Replay command is not valid - channel.")
else:
await client.send_message("ok Replay command is valid.")
await founded_channel.replay(client, int(other))
else:
await client.send_message("error Unknown command.")
else:
await client.send_message("error Unknown command.")
try:
got_text = (await reader.readuntil(b'\n')).decode("utf-8")[:-1]
except asyncio.IncompleteReadError:
return None
def check_names(inp: List[str]) -> bool:
if len(inp) == 0:
return False
for name in inp:
if name == "" or name.isspace() or os.path.exists(name):
return False
return True
async def create_server(name: str):
try:
sock = await asyncio.start_unix_server(Server().handle_client, name)
await sock.serve_forever()
except KeyboardInterrupt:
print("ending")
exit(1)
def main():
user_input = input("Enter names of unix servers: ").split()
while not check_names(user_input):
print("Some of the names are already used.")
user_input = input("Enter names of unix servers: ").split()
loop = asyncio.get_event_loop()
for name in user_input:
loop.create_task(create_server(name))
try:
loop.run_forever()
finally:
for name in user_input:
os.remove(name)
if __name__ == "__main__":
main()
| true | true |
f7fdf04c4e0a3593983a55cd2f8bc0a6c7ae5bd1 | 1,570 | py | Python | main/migrations/0007_auto_20190325_1437.py | SimonF24/ChironLearning | 42cb7bc8b277d0bbad712d7bc60e9acf511f4a25 | [
"Unlicense"
] | null | null | null | main/migrations/0007_auto_20190325_1437.py | SimonF24/ChironLearning | 42cb7bc8b277d0bbad712d7bc60e9acf511f4a25 | [
"Unlicense"
] | 3 | 2020-06-06T01:55:54.000Z | 2021-06-10T22:58:02.000Z | main/migrations/0007_auto_20190325_1437.py | SimonF24/ChironLearning | 42cb7bc8b277d0bbad712d7bc60e9acf511f4a25 | [
"Unlicense"
] | null | null | null | # Generated by Django 2.1.7 on 2019-03-25 21:37
import constrainedfilefield.fields.file
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import main.models
class Migration(migrations.Migration):
dependencies = [
('main', '0006_auto_20190316_1034'),
]
operations = [
migrations.RemoveField(
model_name='resource',
name='description',
),
migrations.RemoveField(
model_name='resource',
name='embed_link',
),
migrations.RemoveField(
model_name='resource',
name='normal_link',
),
migrations.AddField(
model_name='resource',
name='creator',
field=models.ForeignKey(default=1, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='resource',
name='title',
field=models.TextField(default='', max_length=100),
),
migrations.AddField(
model_name='resource',
name='upload',
field=constrainedfilefield.fields.file.ConstrainedFileField(content_types=['video/mp4'], default='', js_checker=True, max_upload_size=5000000000, mime_lookup_length=4096, upload_to=main.models.user_directory_path),
),
migrations.AddField(
model_name='user',
name='is_creator',
field=models.BooleanField(default=False),
),
]
| 31.4 | 226 | 0.611465 |
import constrainedfilefield.fields.file
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import main.models
class Migration(migrations.Migration):
dependencies = [
('main', '0006_auto_20190316_1034'),
]
operations = [
migrations.RemoveField(
model_name='resource',
name='description',
),
migrations.RemoveField(
model_name='resource',
name='embed_link',
),
migrations.RemoveField(
model_name='resource',
name='normal_link',
),
migrations.AddField(
model_name='resource',
name='creator',
field=models.ForeignKey(default=1, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='resource',
name='title',
field=models.TextField(default='', max_length=100),
),
migrations.AddField(
model_name='resource',
name='upload',
field=constrainedfilefield.fields.file.ConstrainedFileField(content_types=['video/mp4'], default='', js_checker=True, max_upload_size=5000000000, mime_lookup_length=4096, upload_to=main.models.user_directory_path),
),
migrations.AddField(
model_name='user',
name='is_creator',
field=models.BooleanField(default=False),
),
]
| true | true |
f7fdf079db52e320528a5a9fbc15b538be3b2556 | 2,918 | py | Python | huaweicloud-sdk-mrs/huaweicloudsdkmrs/v1/model/create_cluster_request.py | huaweicloud/huaweicloud-sdk-python-v3 | 7a6270390fcbf192b3882bf763e7016e6026ef78 | [
"Apache-2.0"
] | 64 | 2020-06-12T07:05:07.000Z | 2022-03-30T03:32:50.000Z | huaweicloud-sdk-mrs/huaweicloudsdkmrs/v1/model/create_cluster_request.py | huaweicloud/huaweicloud-sdk-python-v3 | 7a6270390fcbf192b3882bf763e7016e6026ef78 | [
"Apache-2.0"
] | 11 | 2020-07-06T07:56:54.000Z | 2022-01-11T11:14:40.000Z | huaweicloud-sdk-mrs/huaweicloudsdkmrs/v1/model/create_cluster_request.py | huaweicloud/huaweicloud-sdk-python-v3 | 7a6270390fcbf192b3882bf763e7016e6026ef78 | [
"Apache-2.0"
] | 24 | 2020-06-08T11:42:13.000Z | 2022-03-04T06:44:08.000Z | # coding: utf-8
import re
import six
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class CreateClusterRequest:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'body': 'CreateClusterReq'
}
attribute_map = {
'body': 'body'
}
def __init__(self, body=None):
"""CreateClusterRequest - a model defined in huaweicloud sdk"""
self._body = None
self.discriminator = None
if body is not None:
self.body = body
@property
def body(self):
"""Gets the body of this CreateClusterRequest.
:return: The body of this CreateClusterRequest.
:rtype: CreateClusterReq
"""
return self._body
@body.setter
def body(self, body):
"""Sets the body of this CreateClusterRequest.
:param body: The body of this CreateClusterRequest.
:type: CreateClusterReq
"""
self._body = body
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, CreateClusterRequest):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 25.823009 | 79 | 0.540439 |
import re
import six
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class CreateClusterRequest:
sensitive_list = []
openapi_types = {
'body': 'CreateClusterReq'
}
attribute_map = {
'body': 'body'
}
def __init__(self, body=None):
self._body = None
self.discriminator = None
if body is not None:
self.body = body
@property
def body(self):
return self._body
@body.setter
def body(self, body):
self._body = body
def to_dict(self):
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if not isinstance(other, CreateClusterRequest):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| true | true |
f7fdf0d3ecb1fad427f7ad0d8c52af380cf83871 | 396 | py | Python | top/api/rest/TmcMessagesConfirmRequest.py | looio/jst | 9d8e5bb7018ad8eeef36b233cd5e076106078f80 | [
"MIT"
] | null | null | null | top/api/rest/TmcMessagesConfirmRequest.py | looio/jst | 9d8e5bb7018ad8eeef36b233cd5e076106078f80 | [
"MIT"
] | null | null | null | top/api/rest/TmcMessagesConfirmRequest.py | looio/jst | 9d8e5bb7018ad8eeef36b233cd5e076106078f80 | [
"MIT"
] | null | null | null | '''
Created by auto_sdk on 2015.12.17
'''
from top.api.base import RestApi
class TmcMessagesConfirmRequest(RestApi):
def __init__(self, domain='gw.api.taobao.com', port=80):
RestApi.__init__(self, domain, port)
self.f_message_ids = None
self.group_name = None
self.s_message_ids = None
def getapiname(self):
return 'taobao.tmc.messages.confirm'
| 24.75 | 60 | 0.676768 | from top.api.base import RestApi
class TmcMessagesConfirmRequest(RestApi):
def __init__(self, domain='gw.api.taobao.com', port=80):
RestApi.__init__(self, domain, port)
self.f_message_ids = None
self.group_name = None
self.s_message_ids = None
def getapiname(self):
return 'taobao.tmc.messages.confirm'
| true | true |
f7fdf1b055d9e0a963c8a8f1f112c0a42f9881aa | 4,963 | py | Python | app/users/views.py | jayjodev/oncollegehub | 5633df8beaef232d58025c4407bd9e25bd349e49 | [
"MIT"
] | 2 | 2018-11-14T17:08:05.000Z | 2018-11-14T17:08:38.000Z | app/users/views.py | jayjodev/oncollegehub | 5633df8beaef232d58025c4407bd9e25bd349e49 | [
"MIT"
] | 16 | 2020-01-11T04:09:50.000Z | 2022-03-12T00:11:19.000Z | app/users/views.py | jayjodev/oncollegehub | 5633df8beaef232d58025c4407bd9e25bd349e49 | [
"MIT"
] | 2 | 2018-11-14T17:08:07.000Z | 2018-11-28T21:38:16.000Z | from django.shortcuts import render, redirect, get_object_or_404
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from django.http import HttpResponseRedirect
from django.contrib.auth.mixins import LoginRequiredMixin, UserPassesTestMixin
from .forms import UserRegisterForm, UserUpdateForm, ProfileUpdateForm
# from django.contrib.auth.models import User
from .models import Student
from blog.views import get_college_ranking, get_student_ranking
from django.db import connection
from django.views.generic import (
DetailView,
CreateView,
UpdateView,
ListView,
)
class UserRegistration(CreateView):
model = Student
form_class = UserRegisterForm
def dispatch(self, request, *args, **kwargs):
if self.request.user.is_authenticated:
return redirect('blog-home')
return super().dispatch(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['registration_form'] = UserRegisterForm()
get_college_ranking(context)
get_student_ranking(context)
return context
def post(self, request, *args, **kwargs):
form = self.form_class(request.POST)
if form.is_valid():
form.save()
return redirect('login')
else:
messages.error(request, "There are some errors with your registration, please check below: ")
return render(request, 'users/register.html', {'registration_form': form})
@method_decorator(login_required, name='dispatch')
class UserProfile(DetailView):
model = Student
context_object_name = 'user_object'
def post(self, request, *args, **kwargs):
user = self.get_object()
user_following = self.request.user.profile
if request.POST.get('follow'):
user.profile.follower.add(user_following)
user_following.following.add(user.profile)
user_following.save()
user.save()
elif request.POST.get('unfollow'):
user.profile.follower.remove(user_following)
user_following.following.remove(user.profile)
user.save()
user_following.save()
return HttpResponseRedirect(user.profile.get_absolute_url())
def get_context_data(self, *args, **kwargs):
context = super().get_context_data(**kwargs)
following = self.get_object().profile.following.all()
followers = self.get_object().profile.follower.all()
context['following'] = following
context['followers'] = followers
get_college_ranking(context)
get_student_ranking(context)
return context
class UserUpdateProfile(UserPassesTestMixin, UpdateView):
model = Student
user_details_form = UserUpdateForm
context_object_name = 'user_object'
fields = ['first_name', 'last_name']
success_url = '/'
def post(self, request, *args, **kwargs):
# Call the parent before overriding to save the UserUpdateForm and the ProfileUpdate
super().post(self, request, *args, **kwargs)
p_form = ProfileUpdateForm(self.request.POST, self.request.FILES, instance=self.request.user.profile)
if p_form.is_valid():
p_form.save()
return redirect(f"/users/{self.kwargs.get('pk')}/{self.kwargs.get('username')}")
def get_context_data(self, **kwargs):
data = super().get_context_data(**kwargs)
p_form = ProfileUpdateForm(instance=self.request.user.profile)
data['relevant_post'] = None
data['p_form'] = p_form
get_college_ranking(data)
get_student_ranking(data)
return data
def test_func(self):
user = self.get_object()
return False if self.request.user != user else True
class UserDetailView(DetailView):
model = Student
template_name = 'users/user_detail.html'
class UserProfileFollowing(ListView):
model = Student
template_name = 'users/users_following.html'
def get_context_data(self, *, object_list=None, **kwargs):
context = super().get_context_data(**kwargs)
user = Student.objects.filter(id=self.kwargs['pk']).first()
following = user.profile.following.all()
context['following_list'] = following
get_college_ranking(context)
get_student_ranking(context)
return context
class UserProfileFollowers(ListView):
model = Student
template_name = 'users/user_followers.html'
def get_context_data(self, *, object_list=None, **kwargs):
context = super().get_context_data(**kwargs)
user = Student.objects.filter(id=self.kwargs['pk']).first()
followers = user.profile.follower.all()
context['followers_list'] = followers
get_college_ranking(context)
get_student_ranking(context)
return context
| 34.706294 | 109 | 0.683055 | from django.shortcuts import render, redirect, get_object_or_404
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from django.http import HttpResponseRedirect
from django.contrib.auth.mixins import LoginRequiredMixin, UserPassesTestMixin
from .forms import UserRegisterForm, UserUpdateForm, ProfileUpdateForm
from .models import Student
from blog.views import get_college_ranking, get_student_ranking
from django.db import connection
from django.views.generic import (
DetailView,
CreateView,
UpdateView,
ListView,
)
class UserRegistration(CreateView):
model = Student
form_class = UserRegisterForm
def dispatch(self, request, *args, **kwargs):
if self.request.user.is_authenticated:
return redirect('blog-home')
return super().dispatch(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['registration_form'] = UserRegisterForm()
get_college_ranking(context)
get_student_ranking(context)
return context
def post(self, request, *args, **kwargs):
form = self.form_class(request.POST)
if form.is_valid():
form.save()
return redirect('login')
else:
messages.error(request, "There are some errors with your registration, please check below: ")
return render(request, 'users/register.html', {'registration_form': form})
@method_decorator(login_required, name='dispatch')
class UserProfile(DetailView):
model = Student
context_object_name = 'user_object'
def post(self, request, *args, **kwargs):
user = self.get_object()
user_following = self.request.user.profile
if request.POST.get('follow'):
user.profile.follower.add(user_following)
user_following.following.add(user.profile)
user_following.save()
user.save()
elif request.POST.get('unfollow'):
user.profile.follower.remove(user_following)
user_following.following.remove(user.profile)
user.save()
user_following.save()
return HttpResponseRedirect(user.profile.get_absolute_url())
def get_context_data(self, *args, **kwargs):
context = super().get_context_data(**kwargs)
following = self.get_object().profile.following.all()
followers = self.get_object().profile.follower.all()
context['following'] = following
context['followers'] = followers
get_college_ranking(context)
get_student_ranking(context)
return context
class UserUpdateProfile(UserPassesTestMixin, UpdateView):
model = Student
user_details_form = UserUpdateForm
context_object_name = 'user_object'
fields = ['first_name', 'last_name']
success_url = '/'
def post(self, request, *args, **kwargs):
super().post(self, request, *args, **kwargs)
p_form = ProfileUpdateForm(self.request.POST, self.request.FILES, instance=self.request.user.profile)
if p_form.is_valid():
p_form.save()
return redirect(f"/users/{self.kwargs.get('pk')}/{self.kwargs.get('username')}")
def get_context_data(self, **kwargs):
data = super().get_context_data(**kwargs)
p_form = ProfileUpdateForm(instance=self.request.user.profile)
data['relevant_post'] = None
data['p_form'] = p_form
get_college_ranking(data)
get_student_ranking(data)
return data
def test_func(self):
user = self.get_object()
return False if self.request.user != user else True
class UserDetailView(DetailView):
model = Student
template_name = 'users/user_detail.html'
class UserProfileFollowing(ListView):
model = Student
template_name = 'users/users_following.html'
def get_context_data(self, *, object_list=None, **kwargs):
context = super().get_context_data(**kwargs)
user = Student.objects.filter(id=self.kwargs['pk']).first()
following = user.profile.following.all()
context['following_list'] = following
get_college_ranking(context)
get_student_ranking(context)
return context
class UserProfileFollowers(ListView):
model = Student
template_name = 'users/user_followers.html'
def get_context_data(self, *, object_list=None, **kwargs):
context = super().get_context_data(**kwargs)
user = Student.objects.filter(id=self.kwargs['pk']).first()
followers = user.profile.follower.all()
context['followers_list'] = followers
get_college_ranking(context)
get_student_ranking(context)
return context
| true | true |
f7fdf2460dad1af95cb06a8fdbfb287af7d94c68 | 959 | py | Python | lend/models.py | harshitanand/Libmgmt | a2a7537b07dd960b620198708714f036345b2395 | [
"MIT"
] | 1 | 2015-04-28T08:40:00.000Z | 2015-04-28T08:40:00.000Z | lend/models.py | harshitanand/Libmgmt | a2a7537b07dd960b620198708714f036345b2395 | [
"MIT"
] | null | null | null | lend/models.py | harshitanand/Libmgmt | a2a7537b07dd960b620198708714f036345b2395 | [
"MIT"
] | null | null | null | from django.db import models
# Create your models here.
class Books(models.Model):
name = models.CharField(max_length=255, blank=False, unique=True)
author = models.CharField(max_length=255, blank=False)
copies = models.IntegerField(blank=False, default=1)
status = models.BooleanField(blank=False, default=True)
def __str__(self):
return self.name
class Issuebook(models.Model):
username = models.CharField(max_length=255, blank=False)
bookname = models.CharField(max_length=255, blank=False)
groupname = models.CharField(max_length=255, blank=False)
issued = models.IntegerField(default=0)
can_issue = models.IntegerField(blank=True)
def __str__(self):
return self.username
class usergroups(models.Model):
username = models.CharField(max_length=255, unique=True, blank=False)
group = models.CharField(max_length=120, blank=False)
def __str__(self):
return self.username
| 31.966667 | 73 | 0.726799 | from django.db import models
class Books(models.Model):
name = models.CharField(max_length=255, blank=False, unique=True)
author = models.CharField(max_length=255, blank=False)
copies = models.IntegerField(blank=False, default=1)
status = models.BooleanField(blank=False, default=True)
def __str__(self):
return self.name
class Issuebook(models.Model):
username = models.CharField(max_length=255, blank=False)
bookname = models.CharField(max_length=255, blank=False)
groupname = models.CharField(max_length=255, blank=False)
issued = models.IntegerField(default=0)
can_issue = models.IntegerField(blank=True)
def __str__(self):
return self.username
class usergroups(models.Model):
username = models.CharField(max_length=255, unique=True, blank=False)
group = models.CharField(max_length=120, blank=False)
def __str__(self):
return self.username
| true | true |
f7fdf34cb999122cc1a1bd62fe452838b0e311c7 | 2,475 | py | Python | IoT_ShadowApplications/my_app/UsefulData.py | ertis-research/reliable-iot | c7a1f6bb69099797e2136522dbdda94c2e6a4895 | [
"MIT"
] | 1 | 2019-04-26T10:28:57.000Z | 2019-04-26T10:28:57.000Z | IoT_ShadowApplications/my_app/UsefulData.py | ertis-research/reliable-iot | c7a1f6bb69099797e2136522dbdda94c2e6a4895 | [
"MIT"
] | null | null | null | IoT_ShadowApplications/my_app/UsefulData.py | ertis-research/reliable-iot | c7a1f6bb69099797e2136522dbdda94c2e6a4895 | [
"MIT"
] | 1 | 2019-04-26T10:29:35.000Z | 2019-04-26T10:29:35.000Z | """This is a singleton class to store the component token database urls and kafka Producer & Consumer"""
from kafka import KafkaAdminClient, KafkaProducer
import requests
import json
class URL:
# DB_URL = 'http://127.0.0.1:8084/' # on local for tests
DB_URL = 'http://mongoapi:80/' # on docker swarm
class Token:
__instance = None
@staticmethod
def get_instance():
""" Static access method. """
if not Token.__instance:
Token()
return Token.__instance
def __init__(self):
""" Virtually private constructor. """
if Token.__instance:
raise Exception("This class is a singleton!")
else:
req = requests.post(url=URL.DB_URL+'generateToken/',
data={'type': 'COMPONENT'})
token_id = json.loads(req.text)['token'] # i get the id of the token
req_token = requests.get(url=URL.DB_URL+'getTokenById/{}/'.format(token_id))
token = json.loads(req_token.text)['token']
self.token = token
Token.__instance = self
class KfkProducer:
__instance = None
@staticmethod
def get_instance():
""" Static access method. """
if not KfkProducer.__instance:
KfkProducer()
return KfkProducer.__instance
def __init__(self):
""" Virtually private constructor. """
if KfkProducer.__instance:
raise Exception("This class is a singleton!")
else:
KfkProducer.__instance = \
KafkaProducer(bootstrap_servers=['kafka1:9092', 'kafka2:9092'],
client_id='iot_shadow_applications',
value_serializer=lambda v: json.dumps(v).encode('utf-8')
)
class KfkAdminClient:
__instance = None
@staticmethod
def get_instance():
""" Static access method. """
if not KfkAdminClient.__instance:
KfkAdminClient()
return KfkAdminClient.__instance
def __init__(self):
""" Virtually private constructor. """
if KfkAdminClient.__instance:
raise Exception("This class is a singleton!")
else:
KfkAdminClient.__instance =\
KafkaAdminClient(bootstrap_servers=['kafka1:9092', 'kafka2:9092'],
client_id='iot_shadow_applications',
)
| 29.464286 | 104 | 0.574545 |
from kafka import KafkaAdminClient, KafkaProducer
import requests
import json
class URL:
//mongoapi:80/'
class Token:
__instance = None
@staticmethod
def get_instance():
if not Token.__instance:
Token()
return Token.__instance
def __init__(self):
if Token.__instance:
raise Exception("This class is a singleton!")
else:
req = requests.post(url=URL.DB_URL+'generateToken/',
data={'type': 'COMPONENT'})
token_id = json.loads(req.text)['token']
req_token = requests.get(url=URL.DB_URL+'getTokenById/{}/'.format(token_id))
token = json.loads(req_token.text)['token']
self.token = token
Token.__instance = self
class KfkProducer:
__instance = None
@staticmethod
def get_instance():
if not KfkProducer.__instance:
KfkProducer()
return KfkProducer.__instance
def __init__(self):
if KfkProducer.__instance:
raise Exception("This class is a singleton!")
else:
KfkProducer.__instance = \
KafkaProducer(bootstrap_servers=['kafka1:9092', 'kafka2:9092'],
client_id='iot_shadow_applications',
value_serializer=lambda v: json.dumps(v).encode('utf-8')
)
class KfkAdminClient:
__instance = None
@staticmethod
def get_instance():
if not KfkAdminClient.__instance:
KfkAdminClient()
return KfkAdminClient.__instance
def __init__(self):
if KfkAdminClient.__instance:
raise Exception("This class is a singleton!")
else:
KfkAdminClient.__instance =\
KafkaAdminClient(bootstrap_servers=['kafka1:9092', 'kafka2:9092'],
client_id='iot_shadow_applications',
)
| true | true |
f7fdf400ff2f46e396f4224c552a0ad3c77feaf3 | 18,953 | py | Python | mpf/platforms/system11.py | garimahc15/mpf | 62acdd4110fa6bc7ac2d97ad4216fdc60076d001 | [
"MIT"
] | null | null | null | mpf/platforms/system11.py | garimahc15/mpf | 62acdd4110fa6bc7ac2d97ad4216fdc60076d001 | [
"MIT"
] | null | null | null | mpf/platforms/system11.py | garimahc15/mpf | 62acdd4110fa6bc7ac2d97ad4216fdc60076d001 | [
"MIT"
] | null | null | null | """A generic system11 driver overlay.
This is based on the Snux platform to generically support all kinds of System11 platforms.
"""
import asyncio
from typing import Any, Optional, Set, Tuple, Dict
from mpf.core.machine import MachineController
from mpf.core.platform import DriverPlatform, DriverConfig
from mpf.platforms.interfaces.driver_platform_interface import DriverPlatformInterface, PulseSettings, HoldSettings
from mpf.core.delays import DelayManager
# pylint: disable-msg=too-many-instance-attributes
class System11OverlayPlatform(DriverPlatform):
"""Overlay platform to drive system11 machines using a WPC controller."""
__slots__ = ["delay", "platform", "system11_config", "a_side_queue", "c_side_queue",
"a_side_done_time", "c_side_done_time", "drivers_holding_a_side", "drivers_holding_c_side",
"a_side_enabled", "c_side_enabled", "ac_relay_in_transition", "prefer_a_side", "drivers"]
def __init__(self, machine: MachineController) -> None:
"""Initialise the board."""
super().__init__(machine)
self.delay = DelayManager(machine)
self.platform = None # type: DriverPlatform
self.system11_config = None # type: Any
self.a_side_queue = set() # type: Set[Tuple[DriverPlatformInterface, PulseSettings, HoldSettings]]
self.c_side_queue = set() # type: Set[Tuple[DriverPlatformInterface, PulseSettings, HoldSettings]]
self.a_side_done_time = 0
self.c_side_done_time = 0
self.drivers_holding_a_side = set() # type: Set[DriverPlatformInterface]
self.drivers_holding_c_side = set() # type: Set[DriverPlatformInterface]
self.a_side_enabled = True
self.c_side_enabled = False
self.drivers = {} # type: Dict[str, DriverPlatformInterface]
self.ac_relay_in_transition = False
# Specify whether the AC relay should favour the A or C side when at rest.
# Typically during a game the 'C' side should be preferred, since that is
# normally where the flashers are which need a quick response without having to wait on the relay.
# At game over though, it should prefer the 'A' side so that the relay isn't permanently energised.
self.prefer_a_side = True
def stop(self):
"""Stop the overlay. Nothing to do here because stop is also called on parent platform."""
@property
def a_side_busy(self):
"""Return if A side cannot be switches off right away."""
return self.drivers_holding_a_side or self.a_side_done_time > self.machine.clock.get_time() or self.a_side_queue
@property
def c_side_active(self):
"""Return if C side cannot be switches off right away."""
return self.drivers_holding_c_side or self.c_side_done_time > self.machine.clock.get_time()
@property
def c_side_busy(self):
"""Return if C side cannot be switches off right away."""
return self.drivers_holding_c_side or self.c_side_done_time > self.machine.clock.get_time() or self.c_side_queue
@property
def a_side_active(self):
"""Return if A side cannot be switches off right away."""
return self.drivers_holding_a_side or self.a_side_done_time > self.machine.clock.get_time()
def _null_log_handler(self, *args, **kwargs):
pass
@asyncio.coroutine
def initialize(self):
"""Automatically called by the Platform class after all the core modules are loaded."""
# load coil platform
self.platform = self.machine.get_platform_sections(
"platform", getattr(self.machine.config.get('system11', {}), 'platform', None))
# we have to wait for coils to be initialized
self.machine.events.add_handler("init_phase_1", self._initialize)
def _initialize(self, **kwargs):
del kwargs
self._validate_config()
self.configure_logging('Platform.System11', self.system11_config['console_log'],
self.system11_config['file_log'])
self.log.debug("Configuring A/C Select Relay for driver %s",
self.system11_config['ac_relay_driver'].name)
self.system11_config['ac_relay_driver'].get_and_verify_hold_power(1.0)
self.log.debug("Configuring A/C Select Relay transition delay for "
"%sms", self.system11_config['ac_relay_delay_ms'])
self.machine.events.add_handler(self.system11_config['prefer_a_side_event'], self._prefer_a_side)
self.log.info("Configuring System11 driver to prefer A side on event %s",
self.system11_config['prefer_a_side_event'])
self.machine.events.add_handler(self.system11_config['prefer_c_side_event'], self._prefer_c_side)
self.log.info("Configuring System11 driver to prefer C side on event %s",
self.system11_config['prefer_c_side_event'])
def _prefer_a_side(self, **kwargs):
del kwargs
self.prefer_a_side = True
self._enable_a_side()
def _prefer_c_side(self, **kwargs):
del kwargs
self.prefer_a_side = False
self._enable_c_side()
def _validate_config(self):
self.system11_config = self.machine.config_validator.validate_config(
'system11', self.machine.config.get('system11', {}))
def tick(self):
"""System11 main loop.
Called based on the timer_tick event.
"""
if self.prefer_a_side:
if self.a_side_queue:
self._service_a_side()
elif self.c_side_queue:
self._service_c_side()
elif self.c_side_enabled and not self.c_side_active:
self._enable_a_side()
else:
if self.c_side_queue:
self._service_c_side()
elif self.a_side_queue:
self._service_a_side()
elif self.a_side_enabled and not self.a_side_active:
self._enable_c_side()
def configure_driver(self, config: DriverConfig, number: str, platform_settings: dict):
"""Configure a driver on the system11 overlay.
Args:
config: Driver config dict
"""
orig_number = number
if number and (number.lower().endswith('a') or number.lower().endswith('c')):
side = number[-1:].upper()
number = number[:-1]
# only configure driver once
if number not in self.drivers:
self.drivers[number] = self.platform.configure_driver(config, number, platform_settings)
system11_driver = System11Driver(orig_number, self.drivers[number], self, side)
return system11_driver
return self.platform.configure_driver(config, number, platform_settings)
def set_pulse_on_hit_and_release_rule(self, enable_switch, coil):
"""Configure a rule for a driver on the system11 overlay.
Will pass the call onto the parent platform if the driver is not on A/C relay.
"""
if coil.hw_driver in self.drivers.values():
raise AssertionError("Received a request to set a hardware rule for a System11 driver {}. "
"This is not supported.".format(coil))
self.platform.set_pulse_on_hit_and_release_rule(enable_switch, coil)
def set_pulse_on_hit_and_enable_and_release_rule(self, enable_switch, coil):
"""Configure a rule for a driver on the system11 overlay.
Will pass the call onto the parent platform if the driver is not on A/C relay.
"""
if coil.hw_driver in self.drivers.values():
raise AssertionError("Received a request to set a hardware rule for a System11 driver {}. "
"This is not supported.".format(coil))
self.platform.set_pulse_on_hit_and_enable_and_release_rule(enable_switch, coil)
def set_pulse_on_hit_and_enable_and_release_and_disable_rule(self, enable_switch, disable_switch, coil):
"""Configure a rule for a driver on the system11 overlay.
Will pass the call onto the parent platform if the driver is not on A/C relay.
"""
if coil.hw_driver in self.drivers.values():
raise AssertionError("Received a request to set a hardware rule for a System11 driver {}. "
"This is not supported.".format(coil))
self.platform.set_pulse_on_hit_and_enable_and_release_and_disable_rule(enable_switch, disable_switch, coil)
def set_pulse_on_hit_rule(self, enable_switch, coil):
"""Configure a rule on the system11 overlay.
Will pass the call onto the parent platform if the driver is not on A/C relay.
"""
if coil.hw_driver in self.drivers.values():
raise AssertionError("Received a request to set a hardware rule for a System11 driver {}. "
"This is not supported.".format(coil))
self.platform.set_pulse_on_hit_rule(enable_switch, coil)
def clear_hw_rule(self, switch, coil):
"""Clear a rule for a driver on the system11 overlay."""
if coil.hw_driver in self.drivers.values():
raise AssertionError("Received a request to clear a hardware rule for a System11 driver {}. "
"This is not supported.".format(coil))
self.platform.clear_hw_rule(switch, coil)
def driver_action(self, driver, pulse_settings: Optional[PulseSettings], hold_settings: Optional[HoldSettings],
side: str):
"""Add a driver action for a switched driver to the queue (for either the A-side or C-side queue).
Args:
driver: A reference to the original platform class Driver instance.
pulse_settings: Settings for the pulse or None
hold_settings:Settings for hold or None
This action will be serviced immediately if it can, or ASAP otherwise.
"""
if self.prefer_a_side:
if side == "A":
self.a_side_queue.add((driver, pulse_settings, hold_settings))
self._service_a_side()
elif side == "C":
self.c_side_queue.add((driver, pulse_settings, hold_settings))
if not self.ac_relay_in_transition and not self.a_side_busy:
self._service_c_side()
else:
raise AssertionError("Invalid side {}".format(side))
else:
if side == "C":
self.c_side_queue.add((driver, pulse_settings, hold_settings))
self._service_c_side()
elif side == "A":
self.a_side_queue.add((driver, pulse_settings, hold_settings))
if not self.ac_relay_in_transition and not self.c_side_busy:
self._service_a_side()
else:
raise AssertionError("Invalid side {}".format(side))
def _enable_ac_relay(self):
self.system11_config['ac_relay_driver'].enable()
self.ac_relay_in_transition = True
self.a_side_enabled = False
self.c_side_enabled = False
self.delay.add(ms=self.system11_config['ac_relay_delay_ms'],
callback=self._c_side_enabled,
name='enable_ac_relay')
def _disable_ac_relay(self):
self.system11_config['ac_relay_driver'].disable()
self.ac_relay_in_transition = True
self.a_side_enabled = False
self.c_side_enabled = False
self.delay.add(ms=self.system11_config['ac_relay_delay_ms'],
callback=self._a_side_enabled,
name='disable_ac_relay')
# -------------------------------- A SIDE ---------------------------------
def _enable_a_side(self):
if self.prefer_a_side:
if not self.a_side_enabled and not self.ac_relay_in_transition:
if self.c_side_active:
self._disable_all_c_side_drivers()
self._disable_ac_relay()
self.delay.add(ms=self.system11_config['ac_relay_delay_ms'],
callback=self._enable_a_side,
name='enable_a_side')
return
if self.c_side_enabled:
self._disable_ac_relay()
else:
self._a_side_enabled()
else:
if (not self.ac_relay_in_transition and
not self.a_side_enabled and
not self.c_side_busy):
self._disable_ac_relay()
elif self.a_side_enabled and self.a_side_queue:
self._service_a_side()
def _a_side_enabled(self):
self.ac_relay_in_transition = False
if self.prefer_a_side:
self.a_side_enabled = True
self.c_side_enabled = False
self._service_a_side()
else:
if self.c_side_queue:
self._enable_c_side()
return
self.c_side_enabled = False
self.a_side_enabled = True
self._service_a_side()
def _service_a_side(self):
if not self.a_side_queue:
return
if not self.a_side_enabled:
self._enable_a_side()
return
while self.a_side_queue:
driver, pulse_settings, hold_settings = self.a_side_queue.pop()
if hold_settings is None and pulse_settings:
driver.pulse(pulse_settings)
self.a_side_done_time = max(self.a_side_done_time,
self.machine.clock.get_time() + (pulse_settings.duration / 1000.0))
elif hold_settings and pulse_settings:
driver.enable(pulse_settings, hold_settings)
self.drivers_holding_a_side.add(driver)
else: # ms == 0
driver.disable()
try:
self.drivers_holding_a_side.remove(driver)
except KeyError:
pass
# -------------------------------- C SIDE ---------------------------------
def _enable_c_side(self):
if self.prefer_a_side:
if not self.c_side_enabled and not self.ac_relay_in_transition:
if self.a_side_active:
self._disable_all_a_side_drivers()
self._enable_ac_relay()
self.delay.add(ms=self.system11_config['ac_relay_delay_ms'],
callback=self._enable_c_side,
name='enable_c_side')
return
if self.a_side_enabled:
self._enable_ac_relay()
else:
self._c_side_enabled()
else:
if (not self.ac_relay_in_transition and
not self.c_side_enabled and
not self.a_side_busy):
self._enable_ac_relay()
elif self.c_side_enabled and self.c_side_queue:
self._service_c_side()
def _c_side_enabled(self):
self.ac_relay_in_transition = False
if self.prefer_a_side:
self.c_side_enabled = True
self.a_side_enabled = False
self._service_c_side()
else:
if self.a_side_queue:
self._enable_a_side()
return
self.a_side_enabled = False
self.c_side_enabled = True
self._service_c_side()
def _service_c_side(self):
if not self.c_side_queue:
return
if self.ac_relay_in_transition or self.a_side_busy:
return
if not self.c_side_enabled:
self._enable_c_side()
return
while self.c_side_queue:
driver, pulse_settings, hold_settings = self.c_side_queue.pop()
if hold_settings is None and pulse_settings:
driver.pulse(pulse_settings)
self.c_side_done_time = max(self.c_side_done_time,
self.machine.clock.get_time() + (pulse_settings.duration / 1000.0))
elif hold_settings and pulse_settings:
driver.enable(pulse_settings, hold_settings)
self.drivers_holding_c_side.add(driver)
else:
driver.disable()
try:
self.drivers_holding_c_side.remove(driver)
except KeyError:
pass
def _disable_all_c_side_drivers(self):
if self.c_side_active:
for driver in self.drivers_holding_c_side:
driver.disable()
self.drivers_holding_c_side = set()
self.c_side_done_time = 0
self.c_side_enabled = False
def _disable_all_a_side_drivers(self):
if self.a_side_active:
for driver in self.drivers_holding_a_side:
driver.disable()
self.drivers_holding_a_side = set()
self.a_side_done_time = 0
self.a_side_enabled = False
def validate_coil_section(self, driver, config):
"""Validate coil config for platform."""
return self.platform.validate_coil_section(driver, config)
class System11Driver(DriverPlatformInterface):
"""Represent one driver on the system11 overlay.
Two of those drivers may be created for one real driver. One for the A and one for the C side.
"""
def __init__(self, number, platform_driver: DriverPlatformInterface, overlay, side) -> None:
"""Initialize driver."""
super().__init__(platform_driver.config, number)
self.number = number
self.platform_driver = platform_driver
self.overlay = overlay
self.side = side
def __repr__(self):
"""Pretty print."""
return "System11Driver.{}".format(self.number)
def get_board_name(self):
"""Return name of driver board."""
return self.platform_driver.get_board_name()
def pulse(self, pulse_settings: PulseSettings):
"""Pulse driver."""
self.overlay.driver_action(self.platform_driver, pulse_settings, None, self.side)
# Usually pulse() returns the value (in ms) that the driver will pulse
# for so we can update Driver.time_when_done. But with A/C switched
# coils, we don't know when exactly that will be, so we return -1
return -1
def enable(self, pulse_settings: PulseSettings, hold_settings: HoldSettings):
"""Enable driver."""
self.overlay.driver_action(self.platform_driver, pulse_settings, hold_settings, self.side)
def disable(self):
"""Disable driver."""
self.overlay.driver_action(self.platform_driver, None, None, self.side)
| 39.733753 | 120 | 0.617528 | import asyncio
from typing import Any, Optional, Set, Tuple, Dict
from mpf.core.machine import MachineController
from mpf.core.platform import DriverPlatform, DriverConfig
from mpf.platforms.interfaces.driver_platform_interface import DriverPlatformInterface, PulseSettings, HoldSettings
from mpf.core.delays import DelayManager
class System11OverlayPlatform(DriverPlatform):
__slots__ = ["delay", "platform", "system11_config", "a_side_queue", "c_side_queue",
"a_side_done_time", "c_side_done_time", "drivers_holding_a_side", "drivers_holding_c_side",
"a_side_enabled", "c_side_enabled", "ac_relay_in_transition", "prefer_a_side", "drivers"]
def __init__(self, machine: MachineController) -> None:
super().__init__(machine)
self.delay = DelayManager(machine)
self.platform = None
self.system11_config = None
self.a_side_queue = set()
self.c_side_queue = set()
self.a_side_done_time = 0
self.c_side_done_time = 0
self.drivers_holding_a_side = set()
self.drivers_holding_c_side = set()
self.a_side_enabled = True
self.c_side_enabled = False
self.drivers = {}
self.ac_relay_in_transition = False
self.prefer_a_side = True
def stop(self):
@property
def a_side_busy(self):
return self.drivers_holding_a_side or self.a_side_done_time > self.machine.clock.get_time() or self.a_side_queue
@property
def c_side_active(self):
return self.drivers_holding_c_side or self.c_side_done_time > self.machine.clock.get_time()
@property
def c_side_busy(self):
return self.drivers_holding_c_side or self.c_side_done_time > self.machine.clock.get_time() or self.c_side_queue
@property
def a_side_active(self):
return self.drivers_holding_a_side or self.a_side_done_time > self.machine.clock.get_time()
def _null_log_handler(self, *args, **kwargs):
pass
@asyncio.coroutine
def initialize(self):
# load coil platform
self.platform = self.machine.get_platform_sections(
"platform", getattr(self.machine.config.get('system11', {}), 'platform', None))
# we have to wait for coils to be initialized
self.machine.events.add_handler("init_phase_1", self._initialize)
def _initialize(self, **kwargs):
del kwargs
self._validate_config()
self.configure_logging('Platform.System11', self.system11_config['console_log'],
self.system11_config['file_log'])
self.log.debug("Configuring A/C Select Relay for driver %s",
self.system11_config['ac_relay_driver'].name)
self.system11_config['ac_relay_driver'].get_and_verify_hold_power(1.0)
self.log.debug("Configuring A/C Select Relay transition delay for "
"%sms", self.system11_config['ac_relay_delay_ms'])
self.machine.events.add_handler(self.system11_config['prefer_a_side_event'], self._prefer_a_side)
self.log.info("Configuring System11 driver to prefer A side on event %s",
self.system11_config['prefer_a_side_event'])
self.machine.events.add_handler(self.system11_config['prefer_c_side_event'], self._prefer_c_side)
self.log.info("Configuring System11 driver to prefer C side on event %s",
self.system11_config['prefer_c_side_event'])
def _prefer_a_side(self, **kwargs):
del kwargs
self.prefer_a_side = True
self._enable_a_side()
def _prefer_c_side(self, **kwargs):
del kwargs
self.prefer_a_side = False
self._enable_c_side()
def _validate_config(self):
self.system11_config = self.machine.config_validator.validate_config(
'system11', self.machine.config.get('system11', {}))
def tick(self):
if self.prefer_a_side:
if self.a_side_queue:
self._service_a_side()
elif self.c_side_queue:
self._service_c_side()
elif self.c_side_enabled and not self.c_side_active:
self._enable_a_side()
else:
if self.c_side_queue:
self._service_c_side()
elif self.a_side_queue:
self._service_a_side()
elif self.a_side_enabled and not self.a_side_active:
self._enable_c_side()
def configure_driver(self, config: DriverConfig, number: str, platform_settings: dict):
orig_number = number
if number and (number.lower().endswith('a') or number.lower().endswith('c')):
side = number[-1:].upper()
number = number[:-1]
# only configure driver once
if number not in self.drivers:
self.drivers[number] = self.platform.configure_driver(config, number, platform_settings)
system11_driver = System11Driver(orig_number, self.drivers[number], self, side)
return system11_driver
return self.platform.configure_driver(config, number, platform_settings)
def set_pulse_on_hit_and_release_rule(self, enable_switch, coil):
if coil.hw_driver in self.drivers.values():
raise AssertionError("Received a request to set a hardware rule for a System11 driver {}. "
"This is not supported.".format(coil))
self.platform.set_pulse_on_hit_and_release_rule(enable_switch, coil)
def set_pulse_on_hit_and_enable_and_release_rule(self, enable_switch, coil):
if coil.hw_driver in self.drivers.values():
raise AssertionError("Received a request to set a hardware rule for a System11 driver {}. "
"This is not supported.".format(coil))
self.platform.set_pulse_on_hit_and_enable_and_release_rule(enable_switch, coil)
def set_pulse_on_hit_and_enable_and_release_and_disable_rule(self, enable_switch, disable_switch, coil):
if coil.hw_driver in self.drivers.values():
raise AssertionError("Received a request to set a hardware rule for a System11 driver {}. "
"This is not supported.".format(coil))
self.platform.set_pulse_on_hit_and_enable_and_release_and_disable_rule(enable_switch, disable_switch, coil)
def set_pulse_on_hit_rule(self, enable_switch, coil):
if coil.hw_driver in self.drivers.values():
raise AssertionError("Received a request to set a hardware rule for a System11 driver {}. "
"This is not supported.".format(coil))
self.platform.set_pulse_on_hit_rule(enable_switch, coil)
def clear_hw_rule(self, switch, coil):
if coil.hw_driver in self.drivers.values():
raise AssertionError("Received a request to clear a hardware rule for a System11 driver {}. "
"This is not supported.".format(coil))
self.platform.clear_hw_rule(switch, coil)
def driver_action(self, driver, pulse_settings: Optional[PulseSettings], hold_settings: Optional[HoldSettings],
side: str):
if self.prefer_a_side:
if side == "A":
self.a_side_queue.add((driver, pulse_settings, hold_settings))
self._service_a_side()
elif side == "C":
self.c_side_queue.add((driver, pulse_settings, hold_settings))
if not self.ac_relay_in_transition and not self.a_side_busy:
self._service_c_side()
else:
raise AssertionError("Invalid side {}".format(side))
else:
if side == "C":
self.c_side_queue.add((driver, pulse_settings, hold_settings))
self._service_c_side()
elif side == "A":
self.a_side_queue.add((driver, pulse_settings, hold_settings))
if not self.ac_relay_in_transition and not self.c_side_busy:
self._service_a_side()
else:
raise AssertionError("Invalid side {}".format(side))
def _enable_ac_relay(self):
self.system11_config['ac_relay_driver'].enable()
self.ac_relay_in_transition = True
self.a_side_enabled = False
self.c_side_enabled = False
self.delay.add(ms=self.system11_config['ac_relay_delay_ms'],
callback=self._c_side_enabled,
name='enable_ac_relay')
def _disable_ac_relay(self):
self.system11_config['ac_relay_driver'].disable()
self.ac_relay_in_transition = True
self.a_side_enabled = False
self.c_side_enabled = False
self.delay.add(ms=self.system11_config['ac_relay_delay_ms'],
callback=self._a_side_enabled,
name='disable_ac_relay')
# -------------------------------- A SIDE ---------------------------------
def _enable_a_side(self):
if self.prefer_a_side:
if not self.a_side_enabled and not self.ac_relay_in_transition:
if self.c_side_active:
self._disable_all_c_side_drivers()
self._disable_ac_relay()
self.delay.add(ms=self.system11_config['ac_relay_delay_ms'],
callback=self._enable_a_side,
name='enable_a_side')
return
if self.c_side_enabled:
self._disable_ac_relay()
else:
self._a_side_enabled()
else:
if (not self.ac_relay_in_transition and
not self.a_side_enabled and
not self.c_side_busy):
self._disable_ac_relay()
elif self.a_side_enabled and self.a_side_queue:
self._service_a_side()
def _a_side_enabled(self):
self.ac_relay_in_transition = False
if self.prefer_a_side:
self.a_side_enabled = True
self.c_side_enabled = False
self._service_a_side()
else:
if self.c_side_queue:
self._enable_c_side()
return
self.c_side_enabled = False
self.a_side_enabled = True
self._service_a_side()
def _service_a_side(self):
if not self.a_side_queue:
return
if not self.a_side_enabled:
self._enable_a_side()
return
while self.a_side_queue:
driver, pulse_settings, hold_settings = self.a_side_queue.pop()
if hold_settings is None and pulse_settings:
driver.pulse(pulse_settings)
self.a_side_done_time = max(self.a_side_done_time,
self.machine.clock.get_time() + (pulse_settings.duration / 1000.0))
elif hold_settings and pulse_settings:
driver.enable(pulse_settings, hold_settings)
self.drivers_holding_a_side.add(driver)
else: # ms == 0
driver.disable()
try:
self.drivers_holding_a_side.remove(driver)
except KeyError:
pass
# -------------------------------- C SIDE ---------------------------------
def _enable_c_side(self):
if self.prefer_a_side:
if not self.c_side_enabled and not self.ac_relay_in_transition:
if self.a_side_active:
self._disable_all_a_side_drivers()
self._enable_ac_relay()
self.delay.add(ms=self.system11_config['ac_relay_delay_ms'],
callback=self._enable_c_side,
name='enable_c_side')
return
if self.a_side_enabled:
self._enable_ac_relay()
else:
self._c_side_enabled()
else:
if (not self.ac_relay_in_transition and
not self.c_side_enabled and
not self.a_side_busy):
self._enable_ac_relay()
elif self.c_side_enabled and self.c_side_queue:
self._service_c_side()
def _c_side_enabled(self):
self.ac_relay_in_transition = False
if self.prefer_a_side:
self.c_side_enabled = True
self.a_side_enabled = False
self._service_c_side()
else:
if self.a_side_queue:
self._enable_a_side()
return
self.a_side_enabled = False
self.c_side_enabled = True
self._service_c_side()
def _service_c_side(self):
if not self.c_side_queue:
return
if self.ac_relay_in_transition or self.a_side_busy:
return
if not self.c_side_enabled:
self._enable_c_side()
return
while self.c_side_queue:
driver, pulse_settings, hold_settings = self.c_side_queue.pop()
if hold_settings is None and pulse_settings:
driver.pulse(pulse_settings)
self.c_side_done_time = max(self.c_side_done_time,
self.machine.clock.get_time() + (pulse_settings.duration / 1000.0))
elif hold_settings and pulse_settings:
driver.enable(pulse_settings, hold_settings)
self.drivers_holding_c_side.add(driver)
else:
driver.disable()
try:
self.drivers_holding_c_side.remove(driver)
except KeyError:
pass
def _disable_all_c_side_drivers(self):
if self.c_side_active:
for driver in self.drivers_holding_c_side:
driver.disable()
self.drivers_holding_c_side = set()
self.c_side_done_time = 0
self.c_side_enabled = False
def _disable_all_a_side_drivers(self):
if self.a_side_active:
for driver in self.drivers_holding_a_side:
driver.disable()
self.drivers_holding_a_side = set()
self.a_side_done_time = 0
self.a_side_enabled = False
def validate_coil_section(self, driver, config):
return self.platform.validate_coil_section(driver, config)
class System11Driver(DriverPlatformInterface):
def __init__(self, number, platform_driver: DriverPlatformInterface, overlay, side) -> None:
super().__init__(platform_driver.config, number)
self.number = number
self.platform_driver = platform_driver
self.overlay = overlay
self.side = side
def __repr__(self):
return "System11Driver.{}".format(self.number)
def get_board_name(self):
return self.platform_driver.get_board_name()
def pulse(self, pulse_settings: PulseSettings):
self.overlay.driver_action(self.platform_driver, pulse_settings, None, self.side)
# Usually pulse() returns the value (in ms) that the driver will pulse
# for so we can update Driver.time_when_done. But with A/C switched
# coils, we don't know when exactly that will be, so we return -1
return -1
def enable(self, pulse_settings: PulseSettings, hold_settings: HoldSettings):
self.overlay.driver_action(self.platform_driver, pulse_settings, hold_settings, self.side)
def disable(self):
self.overlay.driver_action(self.platform_driver, None, None, self.side)
| true | true |
f7fdf4ed857b34eeab39209bf3a6cbf8a77dce20 | 527 | py | Python | leetcode/0013/Untitled-1.py | bluove/note-on-cs | 46e4ee1a2c00c0ed717af828013d42306c62061c | [
"MIT"
] | null | null | null | leetcode/0013/Untitled-1.py | bluove/note-on-cs | 46e4ee1a2c00c0ed717af828013d42306c62061c | [
"MIT"
] | null | null | null | leetcode/0013/Untitled-1.py | bluove/note-on-cs | 46e4ee1a2c00c0ed717af828013d42306c62061c | [
"MIT"
] | null | null | null | class Solution:
def romanToInt(self, s):
"""
:type s: str
:rtype: int
"""
s = s.replace('CM','DCCCC')
s = s.replace('CD','CCCC')
s = s.replace('XC','LXXXX')
s = s.replace('XL','XXXX')
s = s.replace('IX','VIIII')
s = s.replace('IV','IIII')
Roman_2_Int_dict = {'M':1000, 'D':500, 'C':100, 'L':50, 'X':10, 'V':5, 'I':1}
ret = 0
for char in s:
ret += Roman_2_Int_dict[char]
return ret
| 26.35 | 85 | 0.426945 | class Solution:
def romanToInt(self, s):
s = s.replace('CM','DCCCC')
s = s.replace('CD','CCCC')
s = s.replace('XC','LXXXX')
s = s.replace('XL','XXXX')
s = s.replace('IX','VIIII')
s = s.replace('IV','IIII')
Roman_2_Int_dict = {'M':1000, 'D':500, 'C':100, 'L':50, 'X':10, 'V':5, 'I':1}
ret = 0
for char in s:
ret += Roman_2_Int_dict[char]
return ret
| true | true |
f7fdf6002abe53d95cd09c73dfb7187f5eeadf0e | 1,366 | py | Python | recipe_parser/recipes/mykitchen101en.py | tyler-a-cox/recipe-parsing | fa883f66a39063cf72912527628b082cda455e76 | [
"MIT"
] | null | null | null | recipe_parser/recipes/mykitchen101en.py | tyler-a-cox/recipe-parsing | fa883f66a39063cf72912527628b082cda455e76 | [
"MIT"
] | null | null | null | recipe_parser/recipes/mykitchen101en.py | tyler-a-cox/recipe-parsing | fa883f66a39063cf72912527628b082cda455e76 | [
"MIT"
] | null | null | null | import re
from bs4 import BeautifulSoup
from ._schema import DefaultSchema
from ._utils import get_yields, normalize_string
class MyKitchen101en(DefaultSchema):
@classmethod
def host(cls):
return "mykitchen101en.com"
def author(self):
return self.soup.find("a", {"rel": "author"}).get_text()
def title(self):
return self.soup.find("h1", {"class": "entry-title"}).get_text()
def yields(self):
return get_yields(self.soup.find("p", text=re.compile("Yields: ")).get_text())
def image(self):
return self.schema.image()
def ingredients(self):
soup = BeautifulSoup(str(self.soup), features="html.parser")
ingredients = (
soup.find(name="p", text=re.compile("Ingredients:"))
.find_next("ul")
.find_all("li")
)
return [normalize_string(ingredient.get_text()) for ingredient in ingredients]
def instructions(self):
soup = BeautifulSoup(str(self.soup), features="html.parser")
instructions = soup.find(
name="p", text=re.compile("Directions:")
).find_all_next("p")
return "\n".join(
[
normalize_string(instruction.get_text())
for instruction in instructions
if instruction.get_text()[:1].isdigit()
]
)
| 29.06383 | 86 | 0.598097 | import re
from bs4 import BeautifulSoup
from ._schema import DefaultSchema
from ._utils import get_yields, normalize_string
class MyKitchen101en(DefaultSchema):
@classmethod
def host(cls):
return "mykitchen101en.com"
def author(self):
return self.soup.find("a", {"rel": "author"}).get_text()
def title(self):
return self.soup.find("h1", {"class": "entry-title"}).get_text()
def yields(self):
return get_yields(self.soup.find("p", text=re.compile("Yields: ")).get_text())
def image(self):
return self.schema.image()
def ingredients(self):
soup = BeautifulSoup(str(self.soup), features="html.parser")
ingredients = (
soup.find(name="p", text=re.compile("Ingredients:"))
.find_next("ul")
.find_all("li")
)
return [normalize_string(ingredient.get_text()) for ingredient in ingredients]
def instructions(self):
soup = BeautifulSoup(str(self.soup), features="html.parser")
instructions = soup.find(
name="p", text=re.compile("Directions:")
).find_all_next("p")
return "\n".join(
[
normalize_string(instruction.get_text())
for instruction in instructions
if instruction.get_text()[:1].isdigit()
]
)
| true | true |
f7fdf6170b08523e3a50200f1c862c9425a71680 | 4,854 | py | Python | src/software/backend/backend/settings/base.py | bytecod3/votingBooth | 6a248833b34885a1d6b62b5d1ee7a60baba769e9 | [
"MIT"
] | 2 | 2021-06-06T05:07:34.000Z | 2021-08-30T08:56:41.000Z | src/software/backend/backend/settings/base.py | bytecod3/votingBooth | 6a248833b34885a1d6b62b5d1ee7a60baba769e9 | [
"MIT"
] | null | null | null | src/software/backend/backend/settings/base.py | bytecod3/votingBooth | 6a248833b34885a1d6b62b5d1ee7a60baba769e9 | [
"MIT"
] | null | null | null | """
Django settings for backend project.
Generated by 'django-admin startproject' using Django 3.2.3.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
from pathlib import Path
import os
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'django-insecure-rf1mfkvesu-n0+kptvnw5ye-e%nqgs0s5&&j9%^5o690eui@^n'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites',
'rest_framework',
'rest_framework.authtoken',
'dj_rest_auth',
'dj_rest_auth.registration',
'django_filters',
'allauth',
'allauth.account',
'allauth.socialaccount',
'api.apps.ApiConfig',
'corsheaders'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'corsheaders.middleware.CorsMiddleware'
]
ROOT_URLCONF = 'backend.urls'
SITE_ID = 1
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'backend.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
AUTHENTICATION_BACKENDS = (
'allauth.account.auth_backends.AuthenticationBackend',
'django.contrib.auth.backends.ModelBackend',
)
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = BASE_DIR / "staticfiles"
STATICFILES_DIRS = (
BASE_DIR / 'static',
)
AUTH_USER_MODEL = 'api.User'
REST_FRAMEWORK = {
'DEFAULT_FILTER_BACKENDS': ['django_filters.rest_framework.DjangoFilterBackend'],
'DEFAULT_PERMISSION_CLASSES': (
'rest_framework.permissions.IsAuthenticatedOrReadOnly',
),
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework_simplejwt.authentication.JWTAuthentication',
'rest_framework.authentication.SessionAuthentication',
'rest_framework.authentication.TokenAuthentication',
),
'DEFAULT_VERSIONING_CLASS': 'rest_framework.versioning.URLPathVersioning'
}
REST_USE_JWT = True
REST_AUTH_REGISTER_SERIALIZERS = {
'REGISTER_SERIALIZER': 'api.serializers.UserRegisterSerializer'
}
REST_AUTH_SERIALIZERS = {
'USER_DETAILS_SERIALIZER': 'api.serializers.UserSerializer',
}
ACCOUNT_USER_MODEL_USERNAME_FIELD = None
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_UNIQUE_EMAIL = True
ACCOUNT_USERNAME_REQUIRED = False
ACCOUNT_AUTHENTICATION_METHOD = 'regno'
ACCOUNT_EMAIL_VERIFICATION = None
ACCOUNT_ADAPTER = 'api.adapters.UserAccountAdapter'
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
| 26.52459 | 91 | 0.721261 |
from pathlib import Path
import os
BASE_DIR = Path(__file__).resolve().parent.parent.parent
SECRET_KEY = 'django-insecure-rf1mfkvesu-n0+kptvnw5ye-e%nqgs0s5&&j9%^5o690eui@^n'
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites',
'rest_framework',
'rest_framework.authtoken',
'dj_rest_auth',
'dj_rest_auth.registration',
'django_filters',
'allauth',
'allauth.account',
'allauth.socialaccount',
'api.apps.ApiConfig',
'corsheaders'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'corsheaders.middleware.CorsMiddleware'
]
ROOT_URLCONF = 'backend.urls'
SITE_ID = 1
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'backend.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
AUTHENTICATION_BACKENDS = (
'allauth.account.auth_backends.AuthenticationBackend',
'django.contrib.auth.backends.ModelBackend',
)
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = BASE_DIR / "staticfiles"
STATICFILES_DIRS = (
BASE_DIR / 'static',
)
AUTH_USER_MODEL = 'api.User'
REST_FRAMEWORK = {
'DEFAULT_FILTER_BACKENDS': ['django_filters.rest_framework.DjangoFilterBackend'],
'DEFAULT_PERMISSION_CLASSES': (
'rest_framework.permissions.IsAuthenticatedOrReadOnly',
),
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework_simplejwt.authentication.JWTAuthentication',
'rest_framework.authentication.SessionAuthentication',
'rest_framework.authentication.TokenAuthentication',
),
'DEFAULT_VERSIONING_CLASS': 'rest_framework.versioning.URLPathVersioning'
}
REST_USE_JWT = True
REST_AUTH_REGISTER_SERIALIZERS = {
'REGISTER_SERIALIZER': 'api.serializers.UserRegisterSerializer'
}
REST_AUTH_SERIALIZERS = {
'USER_DETAILS_SERIALIZER': 'api.serializers.UserSerializer',
}
ACCOUNT_USER_MODEL_USERNAME_FIELD = None
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_UNIQUE_EMAIL = True
ACCOUNT_USERNAME_REQUIRED = False
ACCOUNT_AUTHENTICATION_METHOD = 'regno'
ACCOUNT_EMAIL_VERIFICATION = None
ACCOUNT_ADAPTER = 'api.adapters.UserAccountAdapter'
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
| true | true |
f7fdf6d5616de0b1c5066237e5e6ec52b729404b | 544 | py | Python | MicroPython_BUILD/components/micropython/esp32/modules/refresh_wd.py | linus-gates/mp | 8f55cd4e6c91d02c527ac4bc636e3b6cc7109137 | [
"Apache-2.0"
] | null | null | null | MicroPython_BUILD/components/micropython/esp32/modules/refresh_wd.py | linus-gates/mp | 8f55cd4e6c91d02c527ac4bc636e3b6cc7109137 | [
"Apache-2.0"
] | null | null | null | MicroPython_BUILD/components/micropython/esp32/modules/refresh_wd.py | linus-gates/mp | 8f55cd4e6c91d02c527ac4bc636e3b6cc7109137 | [
"Apache-2.0"
] | null | null | null | import machine
import time
import _thread
GPIO_NUM = 5
def set(num):
pin = machine.Pin(num, machine.Pin.OUT)
pin.value(1)
def reset(num):
pin = machine.Pin(num, machine.Pin.OUT)
pin.value(0)
def rfs_wd():
print("refreshing watchdog")
while (True):
set(GPIO_NUM)
time.sleep(3)
reset(GPIO_NUM)
time.sleep(3)
def start_thread(func, args = (), stackSize=4096):
_ = _thread.stack_size(stackSize)
_thread.start_new_thread("refresh wd", func, args)
start_thread(rfs_wd)
#rfs_wd() | 18.133333 | 54 | 0.648897 | import machine
import time
import _thread
GPIO_NUM = 5
def set(num):
pin = machine.Pin(num, machine.Pin.OUT)
pin.value(1)
def reset(num):
pin = machine.Pin(num, machine.Pin.OUT)
pin.value(0)
def rfs_wd():
print("refreshing watchdog")
while (True):
set(GPIO_NUM)
time.sleep(3)
reset(GPIO_NUM)
time.sleep(3)
def start_thread(func, args = (), stackSize=4096):
_ = _thread.stack_size(stackSize)
_thread.start_new_thread("refresh wd", func, args)
start_thread(rfs_wd)
| true | true |
f7fdf7ae94165cb7b080eeb00b8515a57a053e1a | 2,476 | py | Python | docs/development_guide/source/conf.py | HuakeZhBo/bl_mcu_sdk | a6a7f077e5dd535419d1741e4c2f5215eebb699d | [
"Apache-2.0"
] | 93 | 2021-04-27T07:34:49.000Z | 2022-03-22T08:43:44.000Z | docs/development_guide/source/conf.py | zeroherolin/bl_mcu_sdk | 97760e3633d7ce0f435be1d98e7c9e8c46bfaca7 | [
"Apache-2.0"
] | 18 | 2021-05-23T14:10:12.000Z | 2022-03-30T09:18:39.000Z | docs/development_guide/source/conf.py | zeroherolin/bl_mcu_sdk | 97760e3633d7ce0f435be1d98e7c9e8c46bfaca7 | [
"Apache-2.0"
] | 33 | 2021-04-27T07:46:50.000Z | 2022-02-27T05:45:19.000Z | # Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- Project information -----------------------------------------------------
project = 'BL_MCU_SDK 开发指南'
copyright = '2021, BouffaloLab Co., Ltd'
author = 'BouffaloLab MCU Team'
version = '0.3'
# The full version, including alpha/beta/rc tags
release = '0.3'
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.imgmath',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = 'zh_CN'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = []
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ['_static']
man_pages = [
(master_doc, 'bl702', 'BouffaloLab Documentation',
[author], 1)
]
| 32.155844 | 80 | 0.653069 |
project = 'BL_MCU_SDK 开发指南'
copyright = '2021, BouffaloLab Co., Ltd'
author = 'BouffaloLab MCU Team'
version = '0.3'
release = '0.3'
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.imgmath',
]
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
language = 'zh_CN'
exclude_patterns = []
html_theme = 'sphinx_rtd_theme'
man_pages = [
(master_doc, 'bl702', 'BouffaloLab Documentation',
[author], 1)
]
| true | true |
f7fdf95e95e2ae92fce054e58bc59bf5ecffe6aa | 9,389 | py | Python | env/lib/python3.8/site-packages/plotly/validators/scatterpolargl/marker/_symbol.py | acrucetta/Chicago_COVI_WebApp | a37c9f492a20dcd625f8647067394617988de913 | [
"MIT",
"Unlicense"
] | 2 | 2021-07-07T20:16:23.000Z | 2021-07-14T14:03:09.000Z | env/lib/python3.8/site-packages/plotly/validators/scatterpolargl/marker/_symbol.py | acrucetta/Chicago_COVI_WebApp | a37c9f492a20dcd625f8647067394617988de913 | [
"MIT",
"Unlicense"
] | 5 | 2020-06-05T20:56:21.000Z | 2021-09-22T19:12:42.000Z | env/lib/python3.8/site-packages/plotly/validators/scatterpolargl/marker/_symbol.py | acrucetta/Chicago_COVI_WebApp | a37c9f492a20dcd625f8647067394617988de913 | [
"MIT",
"Unlicense"
] | 2 | 2020-07-05T12:57:14.000Z | 2020-07-05T12:58:00.000Z | import _plotly_utils.basevalidators
class SymbolValidator(_plotly_utils.basevalidators.EnumeratedValidator):
def __init__(
self, plotly_name="symbol", parent_name="scatterpolargl.marker", **kwargs
):
super(SymbolValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
array_ok=kwargs.pop("array_ok", True),
edit_type=kwargs.pop("edit_type", "calc"),
role=kwargs.pop("role", "style"),
values=kwargs.pop(
"values",
[
0,
"circle",
100,
"circle-open",
200,
"circle-dot",
300,
"circle-open-dot",
1,
"square",
101,
"square-open",
201,
"square-dot",
301,
"square-open-dot",
2,
"diamond",
102,
"diamond-open",
202,
"diamond-dot",
302,
"diamond-open-dot",
3,
"cross",
103,
"cross-open",
203,
"cross-dot",
303,
"cross-open-dot",
4,
"x",
104,
"x-open",
204,
"x-dot",
304,
"x-open-dot",
5,
"triangle-up",
105,
"triangle-up-open",
205,
"triangle-up-dot",
305,
"triangle-up-open-dot",
6,
"triangle-down",
106,
"triangle-down-open",
206,
"triangle-down-dot",
306,
"triangle-down-open-dot",
7,
"triangle-left",
107,
"triangle-left-open",
207,
"triangle-left-dot",
307,
"triangle-left-open-dot",
8,
"triangle-right",
108,
"triangle-right-open",
208,
"triangle-right-dot",
308,
"triangle-right-open-dot",
9,
"triangle-ne",
109,
"triangle-ne-open",
209,
"triangle-ne-dot",
309,
"triangle-ne-open-dot",
10,
"triangle-se",
110,
"triangle-se-open",
210,
"triangle-se-dot",
310,
"triangle-se-open-dot",
11,
"triangle-sw",
111,
"triangle-sw-open",
211,
"triangle-sw-dot",
311,
"triangle-sw-open-dot",
12,
"triangle-nw",
112,
"triangle-nw-open",
212,
"triangle-nw-dot",
312,
"triangle-nw-open-dot",
13,
"pentagon",
113,
"pentagon-open",
213,
"pentagon-dot",
313,
"pentagon-open-dot",
14,
"hexagon",
114,
"hexagon-open",
214,
"hexagon-dot",
314,
"hexagon-open-dot",
15,
"hexagon2",
115,
"hexagon2-open",
215,
"hexagon2-dot",
315,
"hexagon2-open-dot",
16,
"octagon",
116,
"octagon-open",
216,
"octagon-dot",
316,
"octagon-open-dot",
17,
"star",
117,
"star-open",
217,
"star-dot",
317,
"star-open-dot",
18,
"hexagram",
118,
"hexagram-open",
218,
"hexagram-dot",
318,
"hexagram-open-dot",
19,
"star-triangle-up",
119,
"star-triangle-up-open",
219,
"star-triangle-up-dot",
319,
"star-triangle-up-open-dot",
20,
"star-triangle-down",
120,
"star-triangle-down-open",
220,
"star-triangle-down-dot",
320,
"star-triangle-down-open-dot",
21,
"star-square",
121,
"star-square-open",
221,
"star-square-dot",
321,
"star-square-open-dot",
22,
"star-diamond",
122,
"star-diamond-open",
222,
"star-diamond-dot",
322,
"star-diamond-open-dot",
23,
"diamond-tall",
123,
"diamond-tall-open",
223,
"diamond-tall-dot",
323,
"diamond-tall-open-dot",
24,
"diamond-wide",
124,
"diamond-wide-open",
224,
"diamond-wide-dot",
324,
"diamond-wide-open-dot",
25,
"hourglass",
125,
"hourglass-open",
26,
"bowtie",
126,
"bowtie-open",
27,
"circle-cross",
127,
"circle-cross-open",
28,
"circle-x",
128,
"circle-x-open",
29,
"square-cross",
129,
"square-cross-open",
30,
"square-x",
130,
"square-x-open",
31,
"diamond-cross",
131,
"diamond-cross-open",
32,
"diamond-x",
132,
"diamond-x-open",
33,
"cross-thin",
133,
"cross-thin-open",
34,
"x-thin",
134,
"x-thin-open",
35,
"asterisk",
135,
"asterisk-open",
36,
"hash",
136,
"hash-open",
236,
"hash-dot",
336,
"hash-open-dot",
37,
"y-up",
137,
"y-up-open",
38,
"y-down",
138,
"y-down-open",
39,
"y-left",
139,
"y-left-open",
40,
"y-right",
140,
"y-right-open",
41,
"line-ew",
141,
"line-ew-open",
42,
"line-ns",
142,
"line-ns-open",
43,
"line-ne",
143,
"line-ne-open",
44,
"line-nw",
144,
"line-nw-open",
],
),
**kwargs
)
| 30.783607 | 81 | 0.251358 | import _plotly_utils.basevalidators
class SymbolValidator(_plotly_utils.basevalidators.EnumeratedValidator):
def __init__(
self, plotly_name="symbol", parent_name="scatterpolargl.marker", **kwargs
):
super(SymbolValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
array_ok=kwargs.pop("array_ok", True),
edit_type=kwargs.pop("edit_type", "calc"),
role=kwargs.pop("role", "style"),
values=kwargs.pop(
"values",
[
0,
"circle",
100,
"circle-open",
200,
"circle-dot",
300,
"circle-open-dot",
1,
"square",
101,
"square-open",
201,
"square-dot",
301,
"square-open-dot",
2,
"diamond",
102,
"diamond-open",
202,
"diamond-dot",
302,
"diamond-open-dot",
3,
"cross",
103,
"cross-open",
203,
"cross-dot",
303,
"cross-open-dot",
4,
"x",
104,
"x-open",
204,
"x-dot",
304,
"x-open-dot",
5,
"triangle-up",
105,
"triangle-up-open",
205,
"triangle-up-dot",
305,
"triangle-up-open-dot",
6,
"triangle-down",
106,
"triangle-down-open",
206,
"triangle-down-dot",
306,
"triangle-down-open-dot",
7,
"triangle-left",
107,
"triangle-left-open",
207,
"triangle-left-dot",
307,
"triangle-left-open-dot",
8,
"triangle-right",
108,
"triangle-right-open",
208,
"triangle-right-dot",
308,
"triangle-right-open-dot",
9,
"triangle-ne",
109,
"triangle-ne-open",
209,
"triangle-ne-dot",
309,
"triangle-ne-open-dot",
10,
"triangle-se",
110,
"triangle-se-open",
210,
"triangle-se-dot",
310,
"triangle-se-open-dot",
11,
"triangle-sw",
111,
"triangle-sw-open",
211,
"triangle-sw-dot",
311,
"triangle-sw-open-dot",
12,
"triangle-nw",
112,
"triangle-nw-open",
212,
"triangle-nw-dot",
312,
"triangle-nw-open-dot",
13,
"pentagon",
113,
"pentagon-open",
213,
"pentagon-dot",
313,
"pentagon-open-dot",
14,
"hexagon",
114,
"hexagon-open",
214,
"hexagon-dot",
314,
"hexagon-open-dot",
15,
"hexagon2",
115,
"hexagon2-open",
215,
"hexagon2-dot",
315,
"hexagon2-open-dot",
16,
"octagon",
116,
"octagon-open",
216,
"octagon-dot",
316,
"octagon-open-dot",
17,
"star",
117,
"star-open",
217,
"star-dot",
317,
"star-open-dot",
18,
"hexagram",
118,
"hexagram-open",
218,
"hexagram-dot",
318,
"hexagram-open-dot",
19,
"star-triangle-up",
119,
"star-triangle-up-open",
219,
"star-triangle-up-dot",
319,
"star-triangle-up-open-dot",
20,
"star-triangle-down",
120,
"star-triangle-down-open",
220,
"star-triangle-down-dot",
320,
"star-triangle-down-open-dot",
21,
"star-square",
121,
"star-square-open",
221,
"star-square-dot",
321,
"star-square-open-dot",
22,
"star-diamond",
122,
"star-diamond-open",
222,
"star-diamond-dot",
322,
"star-diamond-open-dot",
23,
"diamond-tall",
123,
"diamond-tall-open",
223,
"diamond-tall-dot",
323,
"diamond-tall-open-dot",
24,
"diamond-wide",
124,
"diamond-wide-open",
224,
"diamond-wide-dot",
324,
"diamond-wide-open-dot",
25,
"hourglass",
125,
"hourglass-open",
26,
"bowtie",
126,
"bowtie-open",
27,
"circle-cross",
127,
"circle-cross-open",
28,
"circle-x",
128,
"circle-x-open",
29,
"square-cross",
129,
"square-cross-open",
30,
"square-x",
130,
"square-x-open",
31,
"diamond-cross",
131,
"diamond-cross-open",
32,
"diamond-x",
132,
"diamond-x-open",
33,
"cross-thin",
133,
"cross-thin-open",
34,
"x-thin",
134,
"x-thin-open",
35,
"asterisk",
135,
"asterisk-open",
36,
"hash",
136,
"hash-open",
236,
"hash-dot",
336,
"hash-open-dot",
37,
"y-up",
137,
"y-up-open",
38,
"y-down",
138,
"y-down-open",
39,
"y-left",
139,
"y-left-open",
40,
"y-right",
140,
"y-right-open",
41,
"line-ew",
141,
"line-ew-open",
42,
"line-ns",
142,
"line-ns-open",
43,
"line-ne",
143,
"line-ne-open",
44,
"line-nw",
144,
"line-nw-open",
],
),
**kwargs
)
| true | true |
f7fdf9b878edb71799759b782250ba6cf99600c6 | 2,502 | py | Python | server/walt/server/threads/main/settings_manager.py | dia38/walt-python-packages | e6fa1f166f45e73173195d57840d22bef87b88f5 | [
"BSD-3-Clause"
] | null | null | null | server/walt/server/threads/main/settings_manager.py | dia38/walt-python-packages | e6fa1f166f45e73173195d57840d22bef87b88f5 | [
"BSD-3-Clause"
] | null | null | null | server/walt/server/threads/main/settings_manager.py | dia38/walt-python-packages | e6fa1f166f45e73173195d57840d22bef87b88f5 | [
"BSD-3-Clause"
] | null | null | null | # Handlers are generators. They are created with their parameters, which are
# always the requester, the device_set and the setting_value. Then, their code
# are divided in two parts: firstly, they check all conditions to ensure the
# setting is coherent, valid, available for the devices, etc. They yield a
# boolean to indicate if the tests have succeed or not. The second part (after
# the yield) effectively configure the setting on the devices. The caller may
# ask for checks (the first part) and not for action (the second). The caller
# must not ask for the action if the checks failed (yielded False).
class SettingsHandler:
def __init__(self, server):
self.server = server
def set_device_config(self, requester, device_set, settings_args):
# Parse the settings
if len(settings_args) % 2 != 0:
requester.stderr.write(
"Provide settings as `<setting name> <setting value>` pairs.\n")
return
configurations = list(zip(settings_args[::2], settings_args[1::2]))
# ensure all settings are known and retrieve their respective handlers
handlers = []
for setting_name, setting_value in configurations:
try:
handler_name = setting_name + "_setting_handler"
handler = getattr(self, handler_name)
except AttributeError:
requester.stderr.write(
"Unknown setting '%s'.\n" % setting_name)
return
handlers.append(handler( # no code of the handler is run there, as it is a generator
requester, device_set, setting_value))
# first part: ensure all settings are ok for the devices
for handler in handlers:
# run the first part of the handler function: the sanity checks
checks_passed = next(handler)
if not checks_passed:
return
# second part: effectively configure the devices
for handler in handlers:
try:
# run the second part of the handler function: the effective action
next(handler)
# ERROR: the handler has not finished its job!
except StopIteration:
# OK, the handler has finished its job
pass
def netsetup_setting_handler(self, requester, device_set, setting_value):
return self.server.nodes.netsetup_handler(requester, device_set, setting_value)
| 45.490909 | 97 | 0.642686 |
class SettingsHandler:
def __init__(self, server):
self.server = server
def set_device_config(self, requester, device_set, settings_args):
if len(settings_args) % 2 != 0:
requester.stderr.write(
"Provide settings as `<setting name> <setting value>` pairs.\n")
return
configurations = list(zip(settings_args[::2], settings_args[1::2]))
handlers = []
for setting_name, setting_value in configurations:
try:
handler_name = setting_name + "_setting_handler"
handler = getattr(self, handler_name)
except AttributeError:
requester.stderr.write(
"Unknown setting '%s'.\n" % setting_name)
return
handlers.append(handler(
requester, device_set, setting_value))
for handler in handlers:
checks_passed = next(handler)
if not checks_passed:
return
for handler in handlers:
try:
next(handler)
except StopIteration:
pass
def netsetup_setting_handler(self, requester, device_set, setting_value):
return self.server.nodes.netsetup_handler(requester, device_set, setting_value)
| true | true |
f7fdf9ccd69e61bc51090ac810f97077cdc55228 | 41,594 | py | Python | jssh.py | fnhchaiyin/jssh | c6a3c339b3baea6cc25675739d68b855c624144e | [
"MIT"
] | null | null | null | jssh.py | fnhchaiyin/jssh | c6a3c339b3baea6cc25675739d68b855c624144e | [
"MIT"
] | null | null | null | jssh.py | fnhchaiyin/jssh | c6a3c339b3baea6cc25675739d68b855c624144e | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# 报错信息为弹出框
#修改多线程
#执行进度优化
from Tkinter import *
from tkFileDialog import *
from threading import Thread,Semaphore
from datetime import datetime
import gl
from server import *
from ttk import Combobox
from tkFont import Font,NORMAL
# import atexit
from signal import signal,SIGTERM,SIGINT
import sys
import os
from cPickle import dump,load
from time import *
import platform
import re
# import tkMessageBox
def main():
reload(sys)
sys.setdefaultencoding('utf8')
def find_it(event, i):
target = "--------------------------------------%s\n" % i
where = text.search(target, '0.0', END)
if where:
pastit = where + ('+%dc' % len(target))
text.tag_add(SEL, where, pastit)
text.mark_set(INSERT, pastit)
text.see(INSERT)
text.focus()
def xshell(event,i):
if gl.server_all[i].connect_status:
shell = gl.server_all[i].ssh.invoke_shell()
def send_ctrl_c(event):
shell.send('\x03')
def single_exec_cmd(event, i):
cmd = xshell_entry.get()
if cmd:
shell.send(cmd+'\x0a')
xshell_entry.delete(0, END)
else:
shell.send('\x0a')
xshell_top=Toplevel()
xshell_top.attributes("-topmost", 1)
xshell_top.title("%s@%s"%(gl.server_all[i].username,i))
def on_closing():
shell.close()
xshell_top.destroy()
xshell_top.protocol("WM_DELETE_WINDOW", on_closing)
xshell_text = Text(xshell_top, bg='black', fg='green')
xshell_scroll = Scrollbar(xshell_top, command=xshell_text.yview)
xshell_text.configure(yscrollcommand=xshell_scroll.set)
xshell_scroll.pack(side=RIGHT, fill=Y)
xshell_text.pack(fill=BOTH,expand=YES)
xshell_Label=Label(xshell_top, text="command:")
xshell_Label.pack(side=LEFT)
xshell_entry = Entry(xshell_top, insertbackground='green', width=50)
xshell_entry.bind('<Key-Return>',lambda event,i=i:single_exec_cmd(event,i))
xshell_entry.bind('<Control-c>', send_ctrl_c)
xshell_entry.pack(fill=X)
def put_resoult():
sleep(1)
while True:
try:
xshell_text.insert(END,re.sub('\[.*?m','',shell.recv(1024)))
sleep(0.1)
xshell_text.see(END)
except:
break
Thread(target=put_resoult).start()
else:
tl = Toplevel()
tl.attributes("-topmost", 1)
tl.title("ERROR")
err_text = Label(tl, bg='black', fg='red',width=50, height=10, text="The host is not be connected!\n")
err_text.pack(fill=BOTH)
def open_list():
# 选择服务器清单
fd = askopenfilename(initialdir='.')
if fd:
save_log(log='%s open list %s\n' % (datetime.now(), fd))
root.title('Current file list:%s' % fd)
try:
server_list = open(fd)
except:
text.insert(END, "open file failed !\n")
server_list=None
if server_list:
gl.server_all.clear()
if any(gl.cbuts):
for i in gl.cbuts.keys():
gl.cbuts[i].destroy()
gl.cbuts.clear()
for (num, value) in enumerate(server_list):
if len(value) > 4 and not value.startswith('#'):
try:
hostname = value.split()[0]
except:
pass
try:
ipinfo = value.split()[1]
ip_addr = ipinfo.split(":")[0]
except:
pass
try:
if gl.server_all[hostname]:
err='ERROR,At line %s:Duplicate hostname %s\n' % (num,hostname)
text.insert(END, err)
save_log(log=err)
except:
pass
try:
if gl.server_all[hostname].ip_addr+":"+gl.server_all[hostname].port:
err='ERROR,At line %s:Duplicate ip and port %s\n' % (num,ipinfo)
text.insert(END, err)
save_log(log=err)
except:
pass
try:
try:
port = int(ipinfo.split(":")[1])
except:
port = 22
username = value.split()[2]
password = value.split()[3]
gl.server_all[hostname] = server(ip=ip_addr, port=port, username=username, password=password)
gl.server_all[hostname].selected = IntVar()
gl.cbuts[hostname] = (Checkbutton(listframe, text=hostname, font=ft, bg='black', foreground="blue", variable=gl.server_all[hostname].selected))
gl.cbuts[hostname].select()
gl.cbuts[hostname].pack()
gl.cbuts[hostname].bind("<Button-3>", lambda event, i=hostname:find_it(event, i))
gl.cbuts[hostname].bind("<Control-Button-1>", lambda event, i=hostname:xshell(event, i))
except IndexError:
err = 'ERROR,At line %s,wrong host info: %s\n' % (num + 1, value)
text.insert(END, err)
save_log(log=err)
server_list.close()
disconnect['state'] = DISABLED
if any(gl.server_all):
connect['state'] = ACTIVE
cmd_log.flush()
def connect():
try:
thread_num=int(thread_num_entry.get())
except:
thread_num=int(10)
semaphore= Semaphore(thread_num)
def connect_do(i):
if semaphore.acquire():
gl.server_all[i].connect()
semaphore.release()
connect['state'] = DISABLED
text.insert(END,'Connecting,Please wait ...\n')
threads = []
for i in gl.server_all.keys():
if gl.server_all[i].selected.get() == 1:
if gl.server_all[i].connect_status:
pass
else:
i = Thread(target=connect_do,args=(i,),name=i)
i.start()
threads.append(i)
sleep(0.02)
root.update()
while True:
for a in threads:
sleep(0.02)
root.update()
if not a.isAlive():
sleep(0.02)
root.update()
if gl.server_all[a.getName()].err:
gl.cbuts[a.getName()]['foreground'] = "red"
try:
err_text.insert(END, "--------------------------------------%s\n" % a.getName())
err_text.insert(END, gl.server_all[a.getName()].err)
err_text.see(END)
gl.server_all[a.getName()].err = ''
except:
tl = Toplevel()
tl.attributes("-topmost", 1)
tl.title("ERROR")
# def closetl():
# err_topped=False
# tl.protocol("WM_DELETE_WINDOW",closetl)
err_text = Text(tl, bg='black', fg='red')
err_scroll = Scrollbar(tl, command=err_text.yview)
err_text.configure(yscrollcommand=err_scroll.set)
err_scroll.pack(side=RIGHT, fill=Y)
err_text.pack(fill=BOTH,expand=YES)
err_text.insert(END, "--------------------------------------%s\n" % a.getName())
err_text.insert(END, gl.server_all[a.getName()].err)
err_text.see(END)
gl.server_all[a.getName()].err = ''
sleep(0.02)
root.update()
threads.remove(a)
if gl.server_all[a.getName()].connect_status:
gl.cbuts[a.getName()]['foreground'] = "green"
gl.connected = True
sleep(0.02)
root.update()
threads.remove(a)
if not threads:
text.insert(END,'Connect completed\n')
#tkMessageBox.showinfo("Complete!", "Connect Complete!")
break
if gl.connected:
disconnect['state'] = ACTIVE
command_but['state'] = ACTIVE
file_but['state'] = DISABLED
connect['state'] = ACTIVE
def disconnect():
disconnect['state']=DISABLED
try:
thread_num=int(thread_num_entry.get())
except:
thread_num=int(10)
semaphore= Semaphore(thread_num)
def disconnect_do(i):
if semaphore.acquire():
gl.server_all[i].close()
semaphore.release()
if gl.connected:
threads = []
for i in gl.server_all.keys():
if gl.server_all[i].selected.get() == 1:
gl.cbuts[i]['foreground'] = "blue"
i = Thread(target=disconnect_do,args=(i,),name=i)
i.start()
sleep(0.02)
root.update()
threads.append(i)
for a in threads:
a.join()
gl.connected = False
for r in gl.server_all.keys():
if gl.server_all[r].connect_status:
gl.connected = True
if gl.connected:
disconnect['state'] = ACTIVE
command_but['state'] = ACTIVE
file_but['state'] = DISABLED
else:
disconnect['state'] = DISABLED
connect['state'] = ACTIVE
command_but['state'] = DISABLED
file_but['state'] = ACTIVE
def gexe_cmd():
try:
thread_num=int(thread_num_entry.get())
except:
thread_num=int(10)
semaphore= Semaphore(thread_num)
def gexe_do(i,cmd):
if semaphore.acquire():
gl.server_all[i].exec_cmd(cmd)
semaphore.release()
command_but['state'] = DISABLED
gcmd = entry.get()
save_log(log='%s exec cmd: %s\n' % (datetime.now(), gcmd))
gl.history_cmd.reverse()
del gl.history_cmd[1000:]
gl.history_cmd.append(gcmd)
gl.history_cmd.reverse()
entry['values'] = gl.history_cmd
history_file = open(gl.history_file, 'w')
dump(gl.history_cmd, history_file)
history_file.close()
threads = []
wait_t = Toplevel()
wait_t.attributes("-topmost", 1)
wait_t.title("exec command:%s" % gcmd)
w_text = Text(wait_t, bg='black', fg='green')
w_scroll = Scrollbar(wait_t, command=w_text.yview)
w_text.configure(yscrollcommand=w_scroll.set)
w_scroll.pack(side=RIGHT, fill=Y)
w_text.pack(fill=BOTH,expand=YES)
sleep(0.02)
clear()
root.update()
for i in gl.server_all.keys():
if gl.server_all[i].selected.get() == 1:
try:
w_text.insert(END,'%s\n' % i)
except:
pass
gl.cbuts[i]['foreground'] = "green"
#a = Thread(target=gl.server_all[i].exec_cmd,kwargs={'cmd':"LANG=zh_CN.UTF-8;%s" % gcmd},name=i)
a = Thread(target=gexe_do,kwargs={'i':i,'cmd':gcmd},name=i)
a.start()
sleep(0.02)
root.update()
threads.append(a)
command_but['state'] = ACTIVE
while True:
for a in threads:
sleep(0.02)
root.update()
if not a.isAlive():
sleep(0.02)
root.update()
if gl.server_all[a.getName()].err:
gl.cbuts[a.getName()]['foreground'] = "red"
try:
where = w_text.search('%s\n' % a.getName(), '0.0', END)
if where:
pastit = where + ('+%dc' % (len(a.getName())+1))
w_text.delete(where, pastit)
except:
pass
try:
err_text.insert(END, "--------------------------------------%s\n" % a.getName())
save_log(log="--------------------------------------%s\n" % a.getName())
err_text.insert(END, gl.server_all[a.getName()].err)
save_log(log=gl.server_all[a.getName()].err)
err_text.see(END)
sleep(0.02)
root.update()
except:
tl = Toplevel()
tl.attributes("-topmost", 1)
tl.title("ERROR:execcmd %s" % gcmd)
err_text = Text(tl, bg='black', fg='red')
err_scroll = Scrollbar(tl, command=err_text.yview)
err_text.configure(yscrollcommand=err_scroll.set)
err_scroll.pack(side=RIGHT, fill=Y)
err_text.pack(fill=BOTH,expand=YES)
err_text.insert(END, "--------------------------------------%s\n" % a.getName())
save_log(log="--------------------------------------%s\n" % a.getName())
err_text.insert(END, gl.server_all[a.getName()].err)
save_log(log=gl.server_all[a.getName()].err)
err_text.see(END)
sleep(0.02)
root.update()
if gl.server_all[a.getName()].result:
try:
where = w_text.search('%s\n' % a.getName(), '0.0', END)
pastit = where + ('+%dc' % (len(a.getName())+1))
w_text.delete(where, pastit)
except:
pass
text.insert(END, "--------------------------------------%s\n" % a.getName())
save_log(log="--------------------------------------%s\n" % a.getName())
text.insert(END, gl.server_all[a.getName()].result)
text.see(END)
save_log(log=gl.server_all[a.getName()].result)
sleep(0.02)
root.update()
if not gl.server_all[a.getName()].result and not gl.server_all[a.getName()].err:
try:
where = w_text.search('%s\n' % a.getName(), '0.0', END)
if where:
pastit = where + ('+%dc' % (len(a.getName())+1))
w_text.delete(where, pastit)
except:
pass
sleep(0.02)
gl.server_all[a.getName()].err = ''
gl.server_all[a.getName()].result = ''
threads.remove(a)
if not threads:
break
text.insert(END, "######################all the servers finished execcmd:%s (%s)\n" % (gcmd,datetime.now()))
save_log(log="######################all the servers finished execcmd:%s (%s)\n" % (gcmd,datetime.now()))
try:
if w_text.get(0.0,END).split():
pass
else:
wait_t.destroy()
except:
pass
cmd_log.flush()
#tkMessageBox.showinfo("Complete!", "exec cmd :\n %s \n Complete!" % gcmd)
def get_ui():
global getfile_top
getfile_top = Toplevel(root)
getfile_top.attributes("-topmost", 1)
getfile_top.title("get file")
get_remote = Label(getfile_top, text="remote file:")
get_remote.grid(row=0, column=0)
global get_re
get_re = Entry(getfile_top, insertbackground='green', width=50)
get_re.grid(row=0, column=1)
get_locate = Label(getfile_top, text="local dir:")
get_locate.grid(row=1, column=0)
global get_lo
get_lo = Entry(getfile_top, insertbackground='green', width=50)
get_lo.grid(row=1, column=1)
def get_file_select():
get_filename=askdirectory()
get_lo.delete(0, END)
get_lo.insert(END,get_filename)
get_select_but=Button(getfile_top,text='...',command=get_file_select)
get_select_but.grid(row=1,column=2)
getfile_sub_but = Button(getfile_top, text='get', command=get_file)
getfile_sub_but.grid(row=2)
def get_file():
try:
thread_num=int(thread_num_entry.get())
except:
thread_num=int(10)
semaphore= Semaphore(thread_num)
def get_do(i,lo_path,re_file,FileSend):
if semaphore.acquire():
gl.server_all[i].FileTransfer(lo_path=lo_path,re_file=re_file,FileSend=FileSend)
semaphore.release()
re_file=get_re.get()
lo_file=get_lo.get()
if re_file and lo_file:
try:
gl.thread_num=int(thread_num_entry.get())
except:
gl.thread_num=int(10)
save_log(log='%s get file: %s\n' % (datetime.now(), re_file))
threads = []
wait_t = Toplevel()
wait_t.attributes("-topmost", 1)
wait_t.title("Get file:%s --> %s" % (re_file,lo_file))
w_text = Text(wait_t, bg='black', fg='green')
w_scroll = Scrollbar(wait_t, command=w_text.yview)
w_text.configure(yscrollcommand=w_scroll.set)
w_scroll.pack(side=RIGHT, fill=Y)
w_text.pack(fill=BOTH,expand=YES)
sleep(0.02)
root.update()
clear()
getfile_top.destroy()
for i in gl.server_all.keys():
if gl.server_all[i].selected.get() == 1:
w_text.insert(END,'%s\n' % i)
a = Thread(target=get_do,kwargs={'i':i,'lo_path':lo_file,'re_file':re_file,'FileSend':0},name=i)
a.start()
threads.append(a)
sleep(0.02)
root.update()
while True:
for a in threads:
sleep(0.02)
root.update()
if not a.isAlive():
sleep(0.02)
root.update()
if gl.server_all[a.getName()].err:
gl.cbuts[a.getName()]['foreground'] = "red"
try:
where = w_text.search('%s\n' % a.getName(), '0.0', END)
if where:
pastit = where + ('+%dc' % (len(a.getName())+1))
w_text.delete(where, pastit)
except:
pass
try:
err_text.insert(END, "--------------------------------------%s\n" % a.getName())
save_log(log="--------------------------------------%s\n" % a.getName())
err_text.insert(END, gl.server_all[a.getName()].err)
save_log(log=gl.server_all[a.getName()].err)
err_text.see(END)
gl.server_all[a.getName()].err = ''
sleep(0.02)
root.update()
except:
tl = Toplevel()
tl.attributes("-topmost", 1)
tl.title("ERROR:get file %s --> %s (%s)\n" % (re_file,lo_file,datetime.now()))
err_text = Text(tl, bg='black', fg='red')
err_scroll = Scrollbar(tl, command=err_text.yview)
err_text.configure(yscrollcommand=err_scroll.set)
err_scroll.pack(side=RIGHT, fill=Y)
err_text.pack(fill=BOTH,expand=YES)
err_text.insert(END, "--------------------------------------%s\n" % a.getName())
save_log(log="--------------------------------------%s\n" % a.getName())
err_text.insert(END, gl.server_all[a.getName()].err)
save_log(log=gl.server_all[a.getName()].err)
err_text.see(END)
gl.server_all[a.getName()].err = ''
sleep(0.02)
root.update()
threads.remove(a)
elif gl.server_all[a.getName()].get_file_status:
try:
where = w_text.search('%s' % a.getName(), '0.0', END)
pastit = where + ('+%dc' % (len(a.getName())+1))
w_text.delete(where, pastit)
except:
pass
text.insert(END, "--------------------------------------%s\n" % a.getName())
save_log(log="--------------------------------------%s\n" % a.getName())
text.insert(END, "get file %s %s\n" % (re_file, gl.server_all[a.getName()].get_file_status))
save_log(log="get file %s %s\n" % (re_file, gl.server_all[a.getName()].get_file_status))
gl.server_all[a.getName()].result = ''
sleep(0.02)
root.update()
threads.remove(a)
if not threads:
break
text.insert(END, "######################all the servers finished get file:%s --> %s (%s)\n" % (re_file,lo_file,datetime.now()))
save_log(log="######################all the servers finished get file:%s --> %s (%s)\n" % (re_file,lo_file,datetime.now()))
if w_text.get(0.0, END).split():
pass
else:
wait_t.destroy()
cmd_log.flush()
#tkMessageBox.showinfo("Complete!", "get file:\n %s \n Complete!" % re_file)
else:
tl = Toplevel()
tl.attributes("-topmost", 1)
tl.title("ERROR:get file %s --> %s (%s)\n" % (re_file,lo_file,datetime.now()))
err_text = Label(tl, bg='black', fg='red',width=100, height=10, text="ERROR:There is no file name or path name!")
err_text.pack(fill=BOTH)
def send_ui():
global sendfile_top
sendfile_top = Toplevel()
sendfile_top.attributes("-topmost", 1)
sendfile_top.title("send file")
send_remote = Label(sendfile_top, text="remote file:")
send_remote.grid(row=0, column=0)
global send_re
send_re = Entry(sendfile_top, insertbackground='green', width=50)
send_re.grid(row=0, column=1)
def send_file_select():
send_filename=askopenfilename()
send_lo.delete(0, END)
send_lo.insert(END,send_filename)
send_re.delete(0,END)
send_re.insert(END,"/tmp/"+os.path.split(send_filename)[-1])
send_select_but=Button(sendfile_top,text='...',command=send_file_select)
send_select_but.grid(row=1,column=2)
send_locate = Label(sendfile_top, text="local file:")
send_locate.grid(row=1, column=0)
global send_lo
send_lo = Entry(sendfile_top, insertbackground='green', width=50)
send_lo.grid(row=1, column=1)
sendfile_sub_but = Button(sendfile_top, text='send', command=send_file)
sendfile_sub_but.grid(row=2)
def send_file():
try:
thread_num=int(thread_num_entry.get())
except:
thread_num=int(10)
semaphore= Semaphore(thread_num)
def send_do(i,lo_file,re_file,FileSend):
if semaphore.acquire():
gl.server_all[i].FileTransfer(lo_file=lo_file,re_file=re_file,FileSend=FileSend)
semaphore.release()
re_file=send_re.get()
lo_file=send_lo.get()
if re_file and lo_file:
try:
gl.thread_num=int(thread_num_entry.get())
except:
gl.thread_num=int(10)
save_log(log='%s send file: %s --> %s \n' % (datetime.now(), lo_file, re_file))
threads = []
wait_t = Toplevel()
wait_t.attributes("-topmost", 1)
wait_t.title("Send file:%s --> %s" % (lo_file, re_file))
w_text = Text(wait_t, bg='black', fg='green')
w_scroll = Scrollbar(wait_t, command=w_text.yview)
w_text.configure(yscrollcommand=w_scroll.set)
w_scroll.pack(side=RIGHT, fill=Y)
w_text.pack(fill=BOTH,expand=YES)
sleep(0.02)
root.update()
clear()
sendfile_top.destroy()
for i in gl.server_all.keys():
if gl.server_all[i].selected.get() == 1:
w_text.insert(END,'%s\n' % i)
a = Thread(target=send_do,kwargs={'i':i,'lo_file':lo_file,'re_file':re_file,'FileSend':1},name=i)
a.start()
threads.append(a)
sleep(0.02)
root.update()
while True:
for a in threads:
sleep(0.02)
root.update()
if not a.isAlive():
sleep(0.02)
root.update()
if gl.server_all[a.getName()].err:
gl.cbuts[a.getName()]['foreground'] = "red"
try:
where = w_text.search('%s\n' % a.getName(), '0.0', END)
if where:
pastit = where + ('+%dc' % (len(a.getName())+1))
w_text.delete(where, pastit)
except:
pass
try:
err_text.insert(END, "--------------------------------------%s\n" % a.getName())
save_log(log="--------------------------------------%s\n" % a.getName())
err_text.insert(END, gl.server_all[a.getName()].err)
save_log(log=gl.server_all[a.getName()].err)
err_text.see(END)
gl.server_all[a.getName()].err = ''
sleep(0.02)
root.update()
except:
tl = Toplevel()
tl.attributes("-topmost", 1)
tl.title("ERROR:send file %s --> %s (%s)\n" % (lo_file,re_file,datetime.now()))
err_text = Text(tl, bg='black', fg='red')
err_scroll = Scrollbar(tl, command=err_text.yview)
err_text.configure(yscrollcommand=err_scroll.set)
err_scroll.pack(side=RIGHT, fill=Y)
err_text.pack(fill=BOTH,expand=YES)
err_text.insert(END, "--------------------------------------%s\n" % a.getName())
save_log(log="--------------------------------------%s\n" % a.getName())
err_text.insert(END, gl.server_all[a.getName()].err)
save_log(log=gl.server_all[a.getName()].err)
err_text.see(END)
gl.server_all[a.getName()].err = ''
sleep(0.02)
root.update()
threads.remove(a)
elif gl.server_all[a.getName()].send_file_status:
try:
where = w_text.search('%s\n' % a.getName(), '0.0', END)
pastit = where + ('+%dc' % (len(a.getName())+1))
w_text.delete(where, pastit)
except:
pass
text.insert(END, "--------------------------------------%s\n" % a.getName())
save_log(log="--------------------------------------%s\n" % a.getName())
text.insert(END, "send file %s --> %s %s\n" % (lo_file, re_file, gl.server_all[a.getName()].send_file_status))
save_log(log="send file %s --> %s %s\n" % (lo_file, re_file, gl.server_all[a.getName()].send_file_status))
gl.server_all[a.getName()].result = ''
sleep(0.02)
root.update()
threads.remove(a)
if not threads:
break
text.insert(END, "######################all the servers finished send file:%s --> %s (%s)\n" % (lo_file,re_file,datetime.now()))
save_log(log="######################all the servers finished send file:%s --> %s (%s)\n" % (lo_file,re_file,datetime.now()))
if w_text.get(0.0, END).split():
pass
else:
wait_t.destroy()
cmd_log.flush()
#tkMessageBox.showinfo("Complete!", "send file:\n %s \n Complete!" % lo_file)
else:
tl = Toplevel()
tl.attributes("-topmost", 1)
tl.title("ERROR:send file %s --> %s (%s)\n" % (lo_file,re_file,datetime.now()))
err_text = Label(tl, bg='black', fg='red',width=100, height=10, text="ERROR:There is no file name or path name!")
err_text.pack(fill=BOTH)
# gui
class AutocompleteCombobox(Combobox):
def set_completion_list(self, completion_list):
"""Use our completion list as our drop down selection menu, arrows move through menu."""
self._completion_list = sorted(completion_list, key=str.lower) # Work with a sorted list
self._hits = []
self._hit_index = 0
self.position = 0
self.bind('<KeyRelease>', self.handle_keyrelease)
self['values'] = self._completion_list # Setup our popup menu
def autocomplete(self, delta=0):
if delta:
self.delete(self.position, END)
else:
self.position = len(self.get())
_hits = []
for element in self._completion_list:
if element.lower().startswith(self.get().lower()):
_hits.append(element)
if _hits != self._hits:
self._hit_index = 0
self._hits = _hits
if _hits == self._hits and self._hits:
self._hit_index = (self._hit_index + delta) % len(self._hits)
if self._hits:
self.delete(0, END)
self.insert(0, self._hits[self._hit_index])
self.select_range(self.position, END)
def handle_keyrelease(self, event):
# if event.keysym == "BackSpace":
# self.delete(self.index(INSERT), END)
# self.position = self.index(END)
# if event.keysym == "Left":
# if self.position < self.index(END):
# self.delete(self.position, END)
# else:
# self.position = self.position - 1
# self.delete(self.position, END)
if event.keysym == "Right":
self.position = self.index(END)
if len(event.keysym) == 1:
self.autocomplete()
class FullScreenApp(object):
def __init__(self, master, **kwargs):
self.root = master
# self.tk.attributes('-zoomed', True) # This just maximizes it so we can see the window. It's nothing to do with fullscreen.
self.frame = Frame(self.root)
self.frame.pack()
self.state = False
self.root.bind("<F11>", self.toggle_fullscreen)
self.root.bind("<Escape>", self.end_fullscreen)
def toggle_fullscreen(self, event=None):
self.state = not self.state # Just toggling the boolean
self.root.attributes("-fullscreen", self.state)
return "break"
def end_fullscreen(self, event=None):
self.state = False
self.root.attributes("-fullscreen", False)
return "break"
root = Tk()
def close_all():
for i in gl.server_all.keys():
if gl.server_all[i].connect_status:
gl.server_all[i].close()
root.destroy()
root.protocol("WM_DELETE_WINDOW", close_all)
root.option_add('*background', 'black')
root.option_add('*foreground', 'green')
root.title('jssh')
if platform.system()=='Linux':
jssh_home=os.environ['HOME']+"/jssh"
try:
os.makedirs(jssh_home)
except:
pass
gl.logfile=jssh_home+'/log.txt'
gl.history_file=jssh_home+'/history.data'
elif platform.system()=='Windows':
try:
os.makedirs(r'c:\jssh')
except:
pass
gl.logfile=r'c:\jssh\log.txt'
gl.history_file=r'c:\jssh\history.data'
else:
print 'system type is not supported'
if os.path.isfile(gl.history_file):
pass
else:
open(gl.history_file,'w').write('''(lp1
S'df -h'
p2
aS'ifconfig'
a.
''')
#root.iconbitmap(default='jssh.ico')
# 菜单栏
def open_logfile():
#os.system('notepad %s' % gl.logfile)
tl = Toplevel()
tl.title("Log")
log_text = Text(tl, bg='black', fg='green')
log_scroll = Scrollbar(tl, command=log_text.yview)
log_text.configure(yscrollcommand=log_scroll.set)
log_scroll.pack(side=RIGHT, fill=Y)
log_text.pack(fill=BOTH,expand=YES)
log=file(gl.logfile)
for i in log:
log_text.insert(END, i)
log_text.see(END)
log.close()
def help():
help_msg = '''
You should create server-list file frist:
formate:hostname ip:port username password
eg:hostname 192.168.1.10:22 root password
use utf-8 formate better,one server one line
Use Ctrl + left-click a server that can be manipulated separately.
Use right-click on a server you can find it in the results.
F11 for full screen!
'''
ht = Toplevel()
ht.attributes("-topmost", 1)
hl = Label(ht, text=help_msg, justify="left").pack()
menubar = Menu(root)
menubar.add_command(label="send file",command=send_ui)
menubar.add_command(label="get file", command=get_ui)
menubar.add_command(label="log", command=open_logfile)
menubar.add_command(label="help", command=help)
menubar.add_command(label="exit", command=close_all)
root.config(menu=menubar)
# 命令窗口
command_frame = Frame(root, bd=1, relief=SUNKEN)
command_frame.pack(side=TOP, fill=X)
history_file = open(gl.history_file, 'r')
try:
gl.history_cmd = (load(history_file))
except:
os.rename(gl.history_file,'%s_%s' % (gl.history_file,strftime("%Y-%m-%d_%H_%M")))
open(gl.history_file,'w').write('''(lp1
S'df -h'
p2
aS'ifconfig'
a.
''')
history_file.close()
entry = AutocompleteCombobox(command_frame)
entry.set_completion_list(gl.history_cmd)
entry.pack(fill=X)
# 确认按键
command_but = Button(command_frame, text='OK', state=DISABLED, command=gexe_cmd)
command_but.pack(side=RIGHT)
# 打开文件按键
file_but = Button(command_frame, text='select server list', command=open_list)
file_but.pack(side=LEFT)
# 执行返回结果框及进度条
text_frame = Frame(root, bd=2, relief=SUNKEN)
text_frame.pack(side=RIGHT, fill=BOTH,expand=YES)
text = Text(text_frame, insertbackground='green', fg='green')
scroll = Scrollbar(text_frame, command=text.yview)
text.configure(yscrollcommand=scroll.set)
scroll.pack(side=RIGHT, fill=Y)
text.pack(fill=BOTH,expand=YES)
# 服务器列表
server_frame = Frame(root, bd=2, relief=SUNKEN)
server_frame.pack(side=LEFT, fill=Y)
def select_all():
for i in gl.cbuts.keys():
gl.cbuts[i].select()
def deselect_all():
for i in gl.cbuts.keys():
gl.cbuts[i].deselect()
def select_con():
for i in gl.cbuts.keys():
if gl.server_all[i].connect_status:
gl.cbuts[i].select()
else:
gl.cbuts[i].deselect()
def deselect_reverse():
for i in gl.cbuts.keys():
if gl.server_all[i].selected.get() == 1:
gl.cbuts[i].deselect()
else:
gl.cbuts[i].select()
server_all_frame = Frame(server_frame, bd=2, relief=SUNKEN)
server_all_frame.pack(side=TOP)
Button(server_all_frame, text='all', command=select_all).grid(row=0, column=0,sticky='nesw')
Button(server_all_frame, text='none', command=deselect_all).grid(row=0, column=1,sticky='nesw')
Button(server_all_frame, text='just_connected', command=select_con).grid(row=1, column=0,sticky='nesw')
Button(server_all_frame, text='reverse', command=deselect_reverse).grid(row=1, column=1,sticky='nesw')
ft = Font(family='Fixdsys', size=11, weight=NORMAL, underline=1)
def listfunction(event):
canvas.configure(scrollregion=canvas.bbox("all"))
server_list_frame = Frame(server_frame, bd=2, relief=SUNKEN)
server_list_frame.pack(fill=Y,expand=YES)
canvas = Canvas(server_list_frame, width=150, height=500)
listframe = Frame(canvas)
myscrollbar = Scrollbar(server_list_frame, orient="vertical", command=canvas.yview)
canvas.configure(yscrollcommand=myscrollbar.set)
myscrollbar.pack(side="right", fill="y")
canvas.pack(side="left", fill=Y)
canvas.create_window((0, 0), window=listframe)
listframe.bind("<Configure>", listfunction)
# 连接断开按键
connect = Button(command_frame, text='connect', state=DISABLED, command=connect)
connect.pack(side=LEFT)
disconnect = Button(command_frame, text='disconnect', state=DISABLED, command=disconnect)
disconnect.pack(side=LEFT)
#线程数量限制
thread_num_label = Label(command_frame,text=' Max Threads: ')
thread_num_label.pack(side=LEFT)
thread_num_e = StringVar()
thread_num_entry = Entry(command_frame,textvariable=thread_num_e,width=5,insertbackground = 'green')
thread_num_e.set('10')
thread_num_entry.pack(side=LEFT)
# 鼠标右键
def save():
save_file = asksaveasfilename(initialdir='.')
if save_file:
open(save_file, 'w').write(text.get(0.0, END))
def clear():
text.delete('0.0', END)
menubar = Menu(root)
menubar.add_command(label='save', command=save)
menubar.add_command(label='clear', command=clear)
def popup(event): # 显示菜单
menubar.post(event.x_root, event.y_root)
text.bind('<Button-3>', popup)
cmd_log = open(gl.logfile, 'a')
def save_log(log=''):
cmd_log.write(log)
def the_end():
# cmd_log.close()
print 'the end'
signal(SIGTERM, the_end)
signal(SIGINT, the_end)
root.mainloop()
if __name__=='__main__':
main()
| 45.657519 | 171 | 0.47014 |
from Tkinter import *
from tkFileDialog import *
from threading import Thread,Semaphore
from datetime import datetime
import gl
from server import *
from ttk import Combobox
from tkFont import Font,NORMAL
from signal import signal,SIGTERM,SIGINT
import sys
import os
from cPickle import dump,load
from time import *
import platform
import re
def main():
reload(sys)
sys.setdefaultencoding('utf8')
def find_it(event, i):
target = "--------------------------------------%s\n" % i
where = text.search(target, '0.0', END)
if where:
pastit = where + ('+%dc' % len(target))
text.tag_add(SEL, where, pastit)
text.mark_set(INSERT, pastit)
text.see(INSERT)
text.focus()
def xshell(event,i):
if gl.server_all[i].connect_status:
shell = gl.server_all[i].ssh.invoke_shell()
def send_ctrl_c(event):
shell.send('\x03')
def single_exec_cmd(event, i):
cmd = xshell_entry.get()
if cmd:
shell.send(cmd+'\x0a')
xshell_entry.delete(0, END)
else:
shell.send('\x0a')
xshell_top=Toplevel()
xshell_top.attributes("-topmost", 1)
xshell_top.title("%s@%s"%(gl.server_all[i].username,i))
def on_closing():
shell.close()
xshell_top.destroy()
xshell_top.protocol("WM_DELETE_WINDOW", on_closing)
xshell_text = Text(xshell_top, bg='black', fg='green')
xshell_scroll = Scrollbar(xshell_top, command=xshell_text.yview)
xshell_text.configure(yscrollcommand=xshell_scroll.set)
xshell_scroll.pack(side=RIGHT, fill=Y)
xshell_text.pack(fill=BOTH,expand=YES)
xshell_Label=Label(xshell_top, text="command:")
xshell_Label.pack(side=LEFT)
xshell_entry = Entry(xshell_top, insertbackground='green', width=50)
xshell_entry.bind('<Key-Return>',lambda event,i=i:single_exec_cmd(event,i))
xshell_entry.bind('<Control-c>', send_ctrl_c)
xshell_entry.pack(fill=X)
def put_resoult():
sleep(1)
while True:
try:
xshell_text.insert(END,re.sub('\[.*?m','',shell.recv(1024)))
sleep(0.1)
xshell_text.see(END)
except:
break
Thread(target=put_resoult).start()
else:
tl = Toplevel()
tl.attributes("-topmost", 1)
tl.title("ERROR")
err_text = Label(tl, bg='black', fg='red',width=50, height=10, text="The host is not be connected!\n")
err_text.pack(fill=BOTH)
def open_list():
fd = askopenfilename(initialdir='.')
if fd:
save_log(log='%s open list %s\n' % (datetime.now(), fd))
root.title('Current file list:%s' % fd)
try:
server_list = open(fd)
except:
text.insert(END, "open file failed !\n")
server_list=None
if server_list:
gl.server_all.clear()
if any(gl.cbuts):
for i in gl.cbuts.keys():
gl.cbuts[i].destroy()
gl.cbuts.clear()
for (num, value) in enumerate(server_list):
if len(value) > 4 and not value.startswith('#'):
try:
hostname = value.split()[0]
except:
pass
try:
ipinfo = value.split()[1]
ip_addr = ipinfo.split(":")[0]
except:
pass
try:
if gl.server_all[hostname]:
err='ERROR,At line %s:Duplicate hostname %s\n' % (num,hostname)
text.insert(END, err)
save_log(log=err)
except:
pass
try:
if gl.server_all[hostname].ip_addr+":"+gl.server_all[hostname].port:
err='ERROR,At line %s:Duplicate ip and port %s\n' % (num,ipinfo)
text.insert(END, err)
save_log(log=err)
except:
pass
try:
try:
port = int(ipinfo.split(":")[1])
except:
port = 22
username = value.split()[2]
password = value.split()[3]
gl.server_all[hostname] = server(ip=ip_addr, port=port, username=username, password=password)
gl.server_all[hostname].selected = IntVar()
gl.cbuts[hostname] = (Checkbutton(listframe, text=hostname, font=ft, bg='black', foreground="blue", variable=gl.server_all[hostname].selected))
gl.cbuts[hostname].select()
gl.cbuts[hostname].pack()
gl.cbuts[hostname].bind("<Button-3>", lambda event, i=hostname:find_it(event, i))
gl.cbuts[hostname].bind("<Control-Button-1>", lambda event, i=hostname:xshell(event, i))
except IndexError:
err = 'ERROR,At line %s,wrong host info: %s\n' % (num + 1, value)
text.insert(END, err)
save_log(log=err)
server_list.close()
disconnect['state'] = DISABLED
if any(gl.server_all):
connect['state'] = ACTIVE
cmd_log.flush()
def connect():
try:
thread_num=int(thread_num_entry.get())
except:
thread_num=int(10)
semaphore= Semaphore(thread_num)
def connect_do(i):
if semaphore.acquire():
gl.server_all[i].connect()
semaphore.release()
connect['state'] = DISABLED
text.insert(END,'Connecting,Please wait ...\n')
threads = []
for i in gl.server_all.keys():
if gl.server_all[i].selected.get() == 1:
if gl.server_all[i].connect_status:
pass
else:
i = Thread(target=connect_do,args=(i,),name=i)
i.start()
threads.append(i)
sleep(0.02)
root.update()
while True:
for a in threads:
sleep(0.02)
root.update()
if not a.isAlive():
sleep(0.02)
root.update()
if gl.server_all[a.getName()].err:
gl.cbuts[a.getName()]['foreground'] = "red"
try:
err_text.insert(END, "--------------------------------------%s\n" % a.getName())
err_text.insert(END, gl.server_all[a.getName()].err)
err_text.see(END)
gl.server_all[a.getName()].err = ''
except:
tl = Toplevel()
tl.attributes("-topmost", 1)
tl.title("ERROR")
err_text = Text(tl, bg='black', fg='red')
err_scroll = Scrollbar(tl, command=err_text.yview)
err_text.configure(yscrollcommand=err_scroll.set)
err_scroll.pack(side=RIGHT, fill=Y)
err_text.pack(fill=BOTH,expand=YES)
err_text.insert(END, "--------------------------------------%s\n" % a.getName())
err_text.insert(END, gl.server_all[a.getName()].err)
err_text.see(END)
gl.server_all[a.getName()].err = ''
sleep(0.02)
root.update()
threads.remove(a)
if gl.server_all[a.getName()].connect_status:
gl.cbuts[a.getName()]['foreground'] = "green"
gl.connected = True
sleep(0.02)
root.update()
threads.remove(a)
if not threads:
text.insert(END,'Connect completed\n')
break
if gl.connected:
disconnect['state'] = ACTIVE
command_but['state'] = ACTIVE
file_but['state'] = DISABLED
connect['state'] = ACTIVE
def disconnect():
disconnect['state']=DISABLED
try:
thread_num=int(thread_num_entry.get())
except:
thread_num=int(10)
semaphore= Semaphore(thread_num)
def disconnect_do(i):
if semaphore.acquire():
gl.server_all[i].close()
semaphore.release()
if gl.connected:
threads = []
for i in gl.server_all.keys():
if gl.server_all[i].selected.get() == 1:
gl.cbuts[i]['foreground'] = "blue"
i = Thread(target=disconnect_do,args=(i,),name=i)
i.start()
sleep(0.02)
root.update()
threads.append(i)
for a in threads:
a.join()
gl.connected = False
for r in gl.server_all.keys():
if gl.server_all[r].connect_status:
gl.connected = True
if gl.connected:
disconnect['state'] = ACTIVE
command_but['state'] = ACTIVE
file_but['state'] = DISABLED
else:
disconnect['state'] = DISABLED
connect['state'] = ACTIVE
command_but['state'] = DISABLED
file_but['state'] = ACTIVE
def gexe_cmd():
try:
thread_num=int(thread_num_entry.get())
except:
thread_num=int(10)
semaphore= Semaphore(thread_num)
def gexe_do(i,cmd):
if semaphore.acquire():
gl.server_all[i].exec_cmd(cmd)
semaphore.release()
command_but['state'] = DISABLED
gcmd = entry.get()
save_log(log='%s exec cmd: %s\n' % (datetime.now(), gcmd))
gl.history_cmd.reverse()
del gl.history_cmd[1000:]
gl.history_cmd.append(gcmd)
gl.history_cmd.reverse()
entry['values'] = gl.history_cmd
history_file = open(gl.history_file, 'w')
dump(gl.history_cmd, history_file)
history_file.close()
threads = []
wait_t = Toplevel()
wait_t.attributes("-topmost", 1)
wait_t.title("exec command:%s" % gcmd)
w_text = Text(wait_t, bg='black', fg='green')
w_scroll = Scrollbar(wait_t, command=w_text.yview)
w_text.configure(yscrollcommand=w_scroll.set)
w_scroll.pack(side=RIGHT, fill=Y)
w_text.pack(fill=BOTH,expand=YES)
sleep(0.02)
clear()
root.update()
for i in gl.server_all.keys():
if gl.server_all[i].selected.get() == 1:
try:
w_text.insert(END,'%s\n' % i)
except:
pass
gl.cbuts[i]['foreground'] = "green"
a = Thread(target=gexe_do,kwargs={'i':i,'cmd':gcmd},name=i)
a.start()
sleep(0.02)
root.update()
threads.append(a)
command_but['state'] = ACTIVE
while True:
for a in threads:
sleep(0.02)
root.update()
if not a.isAlive():
sleep(0.02)
root.update()
if gl.server_all[a.getName()].err:
gl.cbuts[a.getName()]['foreground'] = "red"
try:
where = w_text.search('%s\n' % a.getName(), '0.0', END)
if where:
pastit = where + ('+%dc' % (len(a.getName())+1))
w_text.delete(where, pastit)
except:
pass
try:
err_text.insert(END, "--------------------------------------%s\n" % a.getName())
save_log(log="--------------------------------------%s\n" % a.getName())
err_text.insert(END, gl.server_all[a.getName()].err)
save_log(log=gl.server_all[a.getName()].err)
err_text.see(END)
sleep(0.02)
root.update()
except:
tl = Toplevel()
tl.attributes("-topmost", 1)
tl.title("ERROR:execcmd %s" % gcmd)
err_text = Text(tl, bg='black', fg='red')
err_scroll = Scrollbar(tl, command=err_text.yview)
err_text.configure(yscrollcommand=err_scroll.set)
err_scroll.pack(side=RIGHT, fill=Y)
err_text.pack(fill=BOTH,expand=YES)
err_text.insert(END, "--------------------------------------%s\n" % a.getName())
save_log(log="--------------------------------------%s\n" % a.getName())
err_text.insert(END, gl.server_all[a.getName()].err)
save_log(log=gl.server_all[a.getName()].err)
err_text.see(END)
sleep(0.02)
root.update()
if gl.server_all[a.getName()].result:
try:
where = w_text.search('%s\n' % a.getName(), '0.0', END)
pastit = where + ('+%dc' % (len(a.getName())+1))
w_text.delete(where, pastit)
except:
pass
text.insert(END, "--------------------------------------%s\n" % a.getName())
save_log(log="--------------------------------------%s\n" % a.getName())
text.insert(END, gl.server_all[a.getName()].result)
text.see(END)
save_log(log=gl.server_all[a.getName()].result)
sleep(0.02)
root.update()
if not gl.server_all[a.getName()].result and not gl.server_all[a.getName()].err:
try:
where = w_text.search('%s\n' % a.getName(), '0.0', END)
if where:
pastit = where + ('+%dc' % (len(a.getName())+1))
w_text.delete(where, pastit)
except:
pass
sleep(0.02)
gl.server_all[a.getName()].err = ''
gl.server_all[a.getName()].result = ''
threads.remove(a)
if not threads:
break
text.insert(END, "######################all the servers finished execcmd:%s (%s)\n" % (gcmd,datetime.now()))
save_log(log="######################all the servers finished execcmd:%s (%s)\n" % (gcmd,datetime.now()))
try:
if w_text.get(0.0,END).split():
pass
else:
wait_t.destroy()
except:
pass
cmd_log.flush()
def get_ui():
global getfile_top
getfile_top = Toplevel(root)
getfile_top.attributes("-topmost", 1)
getfile_top.title("get file")
get_remote = Label(getfile_top, text="remote file:")
get_remote.grid(row=0, column=0)
global get_re
get_re = Entry(getfile_top, insertbackground='green', width=50)
get_re.grid(row=0, column=1)
get_locate = Label(getfile_top, text="local dir:")
get_locate.grid(row=1, column=0)
global get_lo
get_lo = Entry(getfile_top, insertbackground='green', width=50)
get_lo.grid(row=1, column=1)
def get_file_select():
get_filename=askdirectory()
get_lo.delete(0, END)
get_lo.insert(END,get_filename)
get_select_but=Button(getfile_top,text='...',command=get_file_select)
get_select_but.grid(row=1,column=2)
getfile_sub_but = Button(getfile_top, text='get', command=get_file)
getfile_sub_but.grid(row=2)
def get_file():
try:
thread_num=int(thread_num_entry.get())
except:
thread_num=int(10)
semaphore= Semaphore(thread_num)
def get_do(i,lo_path,re_file,FileSend):
if semaphore.acquire():
gl.server_all[i].FileTransfer(lo_path=lo_path,re_file=re_file,FileSend=FileSend)
semaphore.release()
re_file=get_re.get()
lo_file=get_lo.get()
if re_file and lo_file:
try:
gl.thread_num=int(thread_num_entry.get())
except:
gl.thread_num=int(10)
save_log(log='%s get file: %s\n' % (datetime.now(), re_file))
threads = []
wait_t = Toplevel()
wait_t.attributes("-topmost", 1)
wait_t.title("Get file:%s --> %s" % (re_file,lo_file))
w_text = Text(wait_t, bg='black', fg='green')
w_scroll = Scrollbar(wait_t, command=w_text.yview)
w_text.configure(yscrollcommand=w_scroll.set)
w_scroll.pack(side=RIGHT, fill=Y)
w_text.pack(fill=BOTH,expand=YES)
sleep(0.02)
root.update()
clear()
getfile_top.destroy()
for i in gl.server_all.keys():
if gl.server_all[i].selected.get() == 1:
w_text.insert(END,'%s\n' % i)
a = Thread(target=get_do,kwargs={'i':i,'lo_path':lo_file,'re_file':re_file,'FileSend':0},name=i)
a.start()
threads.append(a)
sleep(0.02)
root.update()
while True:
for a in threads:
sleep(0.02)
root.update()
if not a.isAlive():
sleep(0.02)
root.update()
if gl.server_all[a.getName()].err:
gl.cbuts[a.getName()]['foreground'] = "red"
try:
where = w_text.search('%s\n' % a.getName(), '0.0', END)
if where:
pastit = where + ('+%dc' % (len(a.getName())+1))
w_text.delete(where, pastit)
except:
pass
try:
err_text.insert(END, "--------------------------------------%s\n" % a.getName())
save_log(log="--------------------------------------%s\n" % a.getName())
err_text.insert(END, gl.server_all[a.getName()].err)
save_log(log=gl.server_all[a.getName()].err)
err_text.see(END)
gl.server_all[a.getName()].err = ''
sleep(0.02)
root.update()
except:
tl = Toplevel()
tl.attributes("-topmost", 1)
tl.title("ERROR:get file %s --> %s (%s)\n" % (re_file,lo_file,datetime.now()))
err_text = Text(tl, bg='black', fg='red')
err_scroll = Scrollbar(tl, command=err_text.yview)
err_text.configure(yscrollcommand=err_scroll.set)
err_scroll.pack(side=RIGHT, fill=Y)
err_text.pack(fill=BOTH,expand=YES)
err_text.insert(END, "--------------------------------------%s\n" % a.getName())
save_log(log="--------------------------------------%s\n" % a.getName())
err_text.insert(END, gl.server_all[a.getName()].err)
save_log(log=gl.server_all[a.getName()].err)
err_text.see(END)
gl.server_all[a.getName()].err = ''
sleep(0.02)
root.update()
threads.remove(a)
elif gl.server_all[a.getName()].get_file_status:
try:
where = w_text.search('%s' % a.getName(), '0.0', END)
pastit = where + ('+%dc' % (len(a.getName())+1))
w_text.delete(where, pastit)
except:
pass
text.insert(END, "--------------------------------------%s\n" % a.getName())
save_log(log="--------------------------------------%s\n" % a.getName())
text.insert(END, "get file %s %s\n" % (re_file, gl.server_all[a.getName()].get_file_status))
save_log(log="get file %s %s\n" % (re_file, gl.server_all[a.getName()].get_file_status))
gl.server_all[a.getName()].result = ''
sleep(0.02)
root.update()
threads.remove(a)
if not threads:
break
text.insert(END, "######################all the servers finished get file:%s --> %s (%s)\n" % (re_file,lo_file,datetime.now()))
save_log(log="######################all the servers finished get file:%s --> %s (%s)\n" % (re_file,lo_file,datetime.now()))
if w_text.get(0.0, END).split():
pass
else:
wait_t.destroy()
cmd_log.flush()
else:
tl = Toplevel()
tl.attributes("-topmost", 1)
tl.title("ERROR:get file %s --> %s (%s)\n" % (re_file,lo_file,datetime.now()))
err_text = Label(tl, bg='black', fg='red',width=100, height=10, text="ERROR:There is no file name or path name!")
err_text.pack(fill=BOTH)
def send_ui():
global sendfile_top
sendfile_top = Toplevel()
sendfile_top.attributes("-topmost", 1)
sendfile_top.title("send file")
send_remote = Label(sendfile_top, text="remote file:")
send_remote.grid(row=0, column=0)
global send_re
send_re = Entry(sendfile_top, insertbackground='green', width=50)
send_re.grid(row=0, column=1)
def send_file_select():
send_filename=askopenfilename()
send_lo.delete(0, END)
send_lo.insert(END,send_filename)
send_re.delete(0,END)
send_re.insert(END,"/tmp/"+os.path.split(send_filename)[-1])
send_select_but=Button(sendfile_top,text='...',command=send_file_select)
send_select_but.grid(row=1,column=2)
send_locate = Label(sendfile_top, text="local file:")
send_locate.grid(row=1, column=0)
global send_lo
send_lo = Entry(sendfile_top, insertbackground='green', width=50)
send_lo.grid(row=1, column=1)
sendfile_sub_but = Button(sendfile_top, text='send', command=send_file)
sendfile_sub_but.grid(row=2)
def send_file():
try:
thread_num=int(thread_num_entry.get())
except:
thread_num=int(10)
semaphore= Semaphore(thread_num)
def send_do(i,lo_file,re_file,FileSend):
if semaphore.acquire():
gl.server_all[i].FileTransfer(lo_file=lo_file,re_file=re_file,FileSend=FileSend)
semaphore.release()
re_file=send_re.get()
lo_file=send_lo.get()
if re_file and lo_file:
try:
gl.thread_num=int(thread_num_entry.get())
except:
gl.thread_num=int(10)
save_log(log='%s send file: %s --> %s \n' % (datetime.now(), lo_file, re_file))
threads = []
wait_t = Toplevel()
wait_t.attributes("-topmost", 1)
wait_t.title("Send file:%s --> %s" % (lo_file, re_file))
w_text = Text(wait_t, bg='black', fg='green')
w_scroll = Scrollbar(wait_t, command=w_text.yview)
w_text.configure(yscrollcommand=w_scroll.set)
w_scroll.pack(side=RIGHT, fill=Y)
w_text.pack(fill=BOTH,expand=YES)
sleep(0.02)
root.update()
clear()
sendfile_top.destroy()
for i in gl.server_all.keys():
if gl.server_all[i].selected.get() == 1:
w_text.insert(END,'%s\n' % i)
a = Thread(target=send_do,kwargs={'i':i,'lo_file':lo_file,'re_file':re_file,'FileSend':1},name=i)
a.start()
threads.append(a)
sleep(0.02)
root.update()
while True:
for a in threads:
sleep(0.02)
root.update()
if not a.isAlive():
sleep(0.02)
root.update()
if gl.server_all[a.getName()].err:
gl.cbuts[a.getName()]['foreground'] = "red"
try:
where = w_text.search('%s\n' % a.getName(), '0.0', END)
if where:
pastit = where + ('+%dc' % (len(a.getName())+1))
w_text.delete(where, pastit)
except:
pass
try:
err_text.insert(END, "--------------------------------------%s\n" % a.getName())
save_log(log="--------------------------------------%s\n" % a.getName())
err_text.insert(END, gl.server_all[a.getName()].err)
save_log(log=gl.server_all[a.getName()].err)
err_text.see(END)
gl.server_all[a.getName()].err = ''
sleep(0.02)
root.update()
except:
tl = Toplevel()
tl.attributes("-topmost", 1)
tl.title("ERROR:send file %s --> %s (%s)\n" % (lo_file,re_file,datetime.now()))
err_text = Text(tl, bg='black', fg='red')
err_scroll = Scrollbar(tl, command=err_text.yview)
err_text.configure(yscrollcommand=err_scroll.set)
err_scroll.pack(side=RIGHT, fill=Y)
err_text.pack(fill=BOTH,expand=YES)
err_text.insert(END, "--------------------------------------%s\n" % a.getName())
save_log(log="--------------------------------------%s\n" % a.getName())
err_text.insert(END, gl.server_all[a.getName()].err)
save_log(log=gl.server_all[a.getName()].err)
err_text.see(END)
gl.server_all[a.getName()].err = ''
sleep(0.02)
root.update()
threads.remove(a)
elif gl.server_all[a.getName()].send_file_status:
try:
where = w_text.search('%s\n' % a.getName(), '0.0', END)
pastit = where + ('+%dc' % (len(a.getName())+1))
w_text.delete(where, pastit)
except:
pass
text.insert(END, "--------------------------------------%s\n" % a.getName())
save_log(log="--------------------------------------%s\n" % a.getName())
text.insert(END, "send file %s --> %s %s\n" % (lo_file, re_file, gl.server_all[a.getName()].send_file_status))
save_log(log="send file %s --> %s %s\n" % (lo_file, re_file, gl.server_all[a.getName()].send_file_status))
gl.server_all[a.getName()].result = ''
sleep(0.02)
root.update()
threads.remove(a)
if not threads:
break
text.insert(END, "######################all the servers finished send file:%s --> %s (%s)\n" % (lo_file,re_file,datetime.now()))
save_log(log="######################all the servers finished send file:%s --> %s (%s)\n" % (lo_file,re_file,datetime.now()))
if w_text.get(0.0, END).split():
pass
else:
wait_t.destroy()
cmd_log.flush()
else:
tl = Toplevel()
tl.attributes("-topmost", 1)
tl.title("ERROR:send file %s --> %s (%s)\n" % (lo_file,re_file,datetime.now()))
err_text = Label(tl, bg='black', fg='red',width=100, height=10, text="ERROR:There is no file name or path name!")
err_text.pack(fill=BOTH)
class AutocompleteCombobox(Combobox):
def set_completion_list(self, completion_list):
"""Use our completion list as our drop down selection menu, arrows move through menu."""
self._completion_list = sorted(completion_list, key=str.lower)
self._hits = []
self._hit_index = 0
self.position = 0
self.bind('<KeyRelease>', self.handle_keyrelease)
self['values'] = self._completion_list
def autocomplete(self, delta=0):
if delta:
self.delete(self.position, END)
else:
self.position = len(self.get())
_hits = []
for element in self._completion_list:
if element.lower().startswith(self.get().lower()):
_hits.append(element)
if _hits != self._hits:
self._hit_index = 0
self._hits = _hits
if _hits == self._hits and self._hits:
self._hit_index = (self._hit_index + delta) % len(self._hits)
if self._hits:
self.delete(0, END)
self.insert(0, self._hits[self._hit_index])
self.select_range(self.position, END)
def handle_keyrelease(self, event):
if event.keysym == "Right":
self.position = self.index(END)
if len(event.keysym) == 1:
self.autocomplete()
class FullScreenApp(object):
def __init__(self, master, **kwargs):
self.root = master
elf.state = False
self.root.bind("<F11>", self.toggle_fullscreen)
self.root.bind("<Escape>", self.end_fullscreen)
def toggle_fullscreen(self, event=None):
self.state = not self.state # Just toggling the boolean
self.root.attributes("-fullscreen", self.state)
return "break"
def end_fullscreen(self, event=None):
self.state = False
self.root.attributes("-fullscreen", False)
return "break"
root = Tk()
def close_all():
for i in gl.server_all.keys():
if gl.server_all[i].connect_status:
gl.server_all[i].close()
root.destroy()
root.protocol("WM_DELETE_WINDOW", close_all)
root.option_add('*background', 'black')
root.option_add('*foreground', 'green')
root.title('jssh')
if platform.system()=='Linux':
jssh_home=os.environ['HOME']+"/jssh"
try:
os.makedirs(jssh_home)
except:
pass
gl.logfile=jssh_home+'/log.txt'
gl.history_file=jssh_home+'/history.data'
elif platform.system()=='Windows':
try:
os.makedirs(r'c:\jssh')
except:
pass
gl.logfile=r'c:\jssh\log.txt'
gl.history_file=r'c:\jssh\history.data'
else:
print 'system type is not supported'
if os.path.isfile(gl.history_file):
pass
else:
open(gl.history_file,'w').write('''(lp1
S'df -h'
p2
aS'ifconfig'
a.
''')
#root.iconbitmap(default='jssh.ico')
# 菜单栏
def open_logfile():
#os.system('notepad %s' % gl.logfile)
tl = Toplevel()
tl.title("Log")
log_text = Text(tl, bg='black', fg='green')
log_scroll = Scrollbar(tl, command=log_text.yview)
log_text.configure(yscrollcommand=log_scroll.set)
log_scroll.pack(side=RIGHT, fill=Y)
log_text.pack(fill=BOTH,expand=YES)
log=file(gl.logfile)
for i in log:
log_text.insert(END, i)
log_text.see(END)
log.close()
def help():
help_msg = '''
You should create server-list file frist:
formate:hostname ip:port username password
eg:hostname 192.168.1.10:22 root password
use utf-8 formate better,one server one line
Use Ctrl + left-click a server that can be manipulated separately.
Use right-click on a server you can find it in the results.
F11 for full screen!
'''
ht = Toplevel()
ht.attributes("-topmost", 1)
hl = Label(ht, text=help_msg, justify="left").pack()
menubar = Menu(root)
menubar.add_command(label="send file",command=send_ui)
menubar.add_command(label="get file", command=get_ui)
menubar.add_command(label="log", command=open_logfile)
menubar.add_command(label="help", command=help)
menubar.add_command(label="exit", command=close_all)
root.config(menu=menubar)
# 命令窗口
command_frame = Frame(root, bd=1, relief=SUNKEN)
command_frame.pack(side=TOP, fill=X)
history_file = open(gl.history_file, 'r')
try:
gl.history_cmd = (load(history_file))
except:
os.rename(gl.history_file,'%s_%s' % (gl.history_file,strftime("%Y-%m-%d_%H_%M")))
open(gl.history_file,'w').write('''(lp1
S'df -h'
p2
aS'ifconfig'
a.
''')
history_file.close()
entry = AutocompleteCombobox(command_frame)
entry.set_completion_list(gl.history_cmd)
entry.pack(fill=X)
# 确认按键
command_but = Button(command_frame, text='OK', state=DISABLED, command=gexe_cmd)
command_but.pack(side=RIGHT)
# 打开文件按键
file_but = Button(command_frame, text='select server list', command=open_list)
file_but.pack(side=LEFT)
# 执行返回结果框及进度条
text_frame = Frame(root, bd=2, relief=SUNKEN)
text_frame.pack(side=RIGHT, fill=BOTH,expand=YES)
text = Text(text_frame, insertbackground='green', fg='green')
scroll = Scrollbar(text_frame, command=text.yview)
text.configure(yscrollcommand=scroll.set)
scroll.pack(side=RIGHT, fill=Y)
text.pack(fill=BOTH,expand=YES)
# 服务器列表
server_frame = Frame(root, bd=2, relief=SUNKEN)
server_frame.pack(side=LEFT, fill=Y)
def select_all():
for i in gl.cbuts.keys():
gl.cbuts[i].select()
def deselect_all():
for i in gl.cbuts.keys():
gl.cbuts[i].deselect()
def select_con():
for i in gl.cbuts.keys():
if gl.server_all[i].connect_status:
gl.cbuts[i].select()
else:
gl.cbuts[i].deselect()
def deselect_reverse():
for i in gl.cbuts.keys():
if gl.server_all[i].selected.get() == 1:
gl.cbuts[i].deselect()
else:
gl.cbuts[i].select()
server_all_frame = Frame(server_frame, bd=2, relief=SUNKEN)
server_all_frame.pack(side=TOP)
Button(server_all_frame, text='all', command=select_all).grid(row=0, column=0,sticky='nesw')
Button(server_all_frame, text='none', command=deselect_all).grid(row=0, column=1,sticky='nesw')
Button(server_all_frame, text='just_connected', command=select_con).grid(row=1, column=0,sticky='nesw')
Button(server_all_frame, text='reverse', command=deselect_reverse).grid(row=1, column=1,sticky='nesw')
ft = Font(family='Fixdsys', size=11, weight=NORMAL, underline=1)
def listfunction(event):
canvas.configure(scrollregion=canvas.bbox("all"))
server_list_frame = Frame(server_frame, bd=2, relief=SUNKEN)
server_list_frame.pack(fill=Y,expand=YES)
canvas = Canvas(server_list_frame, width=150, height=500)
listframe = Frame(canvas)
myscrollbar = Scrollbar(server_list_frame, orient="vertical", command=canvas.yview)
canvas.configure(yscrollcommand=myscrollbar.set)
myscrollbar.pack(side="right", fill="y")
canvas.pack(side="left", fill=Y)
canvas.create_window((0, 0), window=listframe)
listframe.bind("<Configure>", listfunction)
# 连接断开按键
connect = Button(command_frame, text='connect', state=DISABLED, command=connect)
connect.pack(side=LEFT)
disconnect = Button(command_frame, text='disconnect', state=DISABLED, command=disconnect)
disconnect.pack(side=LEFT)
#线程数量限制
thread_num_label = Label(command_frame,text=' Max Threads: ')
thread_num_label.pack(side=LEFT)
thread_num_e = StringVar()
thread_num_entry = Entry(command_frame,textvariable=thread_num_e,width=5,insertbackground = 'green')
thread_num_e.set('10')
thread_num_entry.pack(side=LEFT)
# 鼠标右键
def save():
save_file = asksaveasfilename(initialdir='.')
if save_file:
open(save_file, 'w').write(text.get(0.0, END))
def clear():
text.delete('0.0', END)
menubar = Menu(root)
menubar.add_command(label='save', command=save)
menubar.add_command(label='clear', command=clear)
def popup(event): # 显示菜单
menubar.post(event.x_root, event.y_root)
text.bind('<Button-3>', popup)
cmd_log = open(gl.logfile, 'a')
def save_log(log=''):
cmd_log.write(log)
def the_end():
# cmd_log.close()
print 'the end'
signal(SIGTERM, the_end)
signal(SIGINT, the_end)
root.mainloop()
if __name__=='__main__':
main()
| false | true |
f7fdfa57e32dfd0ee950023c731ee2d5825b1369 | 4,233 | py | Python | usaspending_api/common/management/commands/matview_runner.py | gaybro8777/usaspending-api | fe9d730acd632401bbbefa168e3d86d59560314b | [
"CC0-1.0"
] | null | null | null | usaspending_api/common/management/commands/matview_runner.py | gaybro8777/usaspending-api | fe9d730acd632401bbbefa168e3d86d59560314b | [
"CC0-1.0"
] | null | null | null | usaspending_api/common/management/commands/matview_runner.py | gaybro8777/usaspending-api | fe9d730acd632401bbbefa168e3d86d59560314b | [
"CC0-1.0"
] | null | null | null | import asyncio
import logging
import psycopg2
import subprocess
from django.core.management.base import BaseCommand
from pathlib import Path
from usaspending_api.common.data_connectors.async_sql_query import async_run_creates
from usaspending_api.common.helpers.timing_helpers import Timer
from usaspending_api.common.matview_manager import (
DEFAULT_MATIVEW_DIR,
DEPENDENCY_FILEPATH,
DROP_OLD_MATVIEWS,
MATERIALIZED_VIEWS,
MATVIEW_GENERATOR_FILE,
OVERLAY_VIEWS,
)
from usaspending_api.common.helpers.sql_helpers import get_database_dsn_string
logger = logging.getLogger("console")
class Command(BaseCommand):
help = "Create, Run, Verify Materialized View SQL"
def faux_init(self, args):
self.matviews = MATERIALIZED_VIEWS
if args["only"]:
self.matviews = {args["only"]: MATERIALIZED_VIEWS[args["only"]]}
self.matview_dir = args["temp_dir"]
self.no_cleanup = args["leave_sql"]
self.run_dependencies = args["dependencies"]
def add_arguments(self, parser):
parser.add_argument("--only", choices=list(MATERIALIZED_VIEWS.keys()))
parser.add_argument(
"--leave-sql",
action="store_true",
help="Leave the generated SQL files instead of cleaning them after script completion.",
)
parser.add_argument(
"--temp-dir",
type=Path,
help="Choose a non-default directory to store materialized view SQL files.",
default=DEFAULT_MATIVEW_DIR,
)
parser.add_argument(
"--dependencies", action="store_true", help="Run the SQL dependencies before the materialized view SQL."
)
def handle(self, *args, **options):
"""Overloaded Command Entrypoint"""
with Timer(__name__):
self.faux_init(options)
self.generate_matview_sql()
if self.run_dependencies:
create_dependencies()
self.create_views()
if not self.no_cleanup:
self.cleanup()
def generate_matview_sql(self):
"""Convert JSON definition files to SQL"""
if self.matview_dir.exists():
logger.warn("Clearing dir {}".format(self.matview_dir))
recursive_delete(self.matview_dir)
self.matview_dir.mkdir()
# IF using this for operations, DO NOT LEAVE hardcoded `python3` in the command
exec_str = "python3 {} --quiet --dest={}/ --batch_indexes=3".format(MATVIEW_GENERATOR_FILE, self.matview_dir)
subprocess.call(exec_str, shell=True)
def cleanup(self):
"""Cleanup files after run"""
recursive_delete(self.matview_dir)
def create_views(self):
loop = asyncio.new_event_loop()
tasks = []
for matview, config in self.matviews.items():
logger.info("Creating Future for {}".format(matview))
sql = open(str(self.matview_dir / config["sql_filename"]), "r").read()
tasks.append(asyncio.ensure_future(async_run_creates(sql, wrapper=Timer(matview)), loop=loop))
loop.run_until_complete(asyncio.gather(*tasks))
loop.close()
for view in OVERLAY_VIEWS:
sql = open(str(view), "r").read()
run_sql(sql, "Creating Views")
drop_sql = open(str(DROP_OLD_MATVIEWS), "r").read()
run_sql(drop_sql, "Drop Old Materialized Views")
def create_dependencies():
sql_statements = open(str(DEPENDENCY_FILEPATH), "r").read()
run_sql(sql_statements, "dependencies")
def run_sql(sql, name):
with psycopg2.connect(dsn=get_database_dsn_string()) as connection:
with connection.cursor() as cursor:
with Timer(name):
cursor.execute(sql)
def recursive_delete(path):
"""Remove file or directory (clear entire dir structure)"""
path = Path(str(path)).resolve() # ensure it is an absolute path
if not path.exists() and len(str(path)) < 6: # don't delete the entire dir
return
if path.is_dir():
for f in path.glob("*"):
recursive_delete(f)
path.rmdir()
elif not path.is_dir():
path.unlink()
else:
logger.info("Nothing to delete")
| 34.414634 | 117 | 0.647768 | import asyncio
import logging
import psycopg2
import subprocess
from django.core.management.base import BaseCommand
from pathlib import Path
from usaspending_api.common.data_connectors.async_sql_query import async_run_creates
from usaspending_api.common.helpers.timing_helpers import Timer
from usaspending_api.common.matview_manager import (
DEFAULT_MATIVEW_DIR,
DEPENDENCY_FILEPATH,
DROP_OLD_MATVIEWS,
MATERIALIZED_VIEWS,
MATVIEW_GENERATOR_FILE,
OVERLAY_VIEWS,
)
from usaspending_api.common.helpers.sql_helpers import get_database_dsn_string
logger = logging.getLogger("console")
class Command(BaseCommand):
help = "Create, Run, Verify Materialized View SQL"
def faux_init(self, args):
self.matviews = MATERIALIZED_VIEWS
if args["only"]:
self.matviews = {args["only"]: MATERIALIZED_VIEWS[args["only"]]}
self.matview_dir = args["temp_dir"]
self.no_cleanup = args["leave_sql"]
self.run_dependencies = args["dependencies"]
def add_arguments(self, parser):
parser.add_argument("--only", choices=list(MATERIALIZED_VIEWS.keys()))
parser.add_argument(
"--leave-sql",
action="store_true",
help="Leave the generated SQL files instead of cleaning them after script completion.",
)
parser.add_argument(
"--temp-dir",
type=Path,
help="Choose a non-default directory to store materialized view SQL files.",
default=DEFAULT_MATIVEW_DIR,
)
parser.add_argument(
"--dependencies", action="store_true", help="Run the SQL dependencies before the materialized view SQL."
)
def handle(self, *args, **options):
with Timer(__name__):
self.faux_init(options)
self.generate_matview_sql()
if self.run_dependencies:
create_dependencies()
self.create_views()
if not self.no_cleanup:
self.cleanup()
def generate_matview_sql(self):
if self.matview_dir.exists():
logger.warn("Clearing dir {}".format(self.matview_dir))
recursive_delete(self.matview_dir)
self.matview_dir.mkdir()
exec_str = "python3 {} --quiet --dest={}/ --batch_indexes=3".format(MATVIEW_GENERATOR_FILE, self.matview_dir)
subprocess.call(exec_str, shell=True)
def cleanup(self):
recursive_delete(self.matview_dir)
def create_views(self):
loop = asyncio.new_event_loop()
tasks = []
for matview, config in self.matviews.items():
logger.info("Creating Future for {}".format(matview))
sql = open(str(self.matview_dir / config["sql_filename"]), "r").read()
tasks.append(asyncio.ensure_future(async_run_creates(sql, wrapper=Timer(matview)), loop=loop))
loop.run_until_complete(asyncio.gather(*tasks))
loop.close()
for view in OVERLAY_VIEWS:
sql = open(str(view), "r").read()
run_sql(sql, "Creating Views")
drop_sql = open(str(DROP_OLD_MATVIEWS), "r").read()
run_sql(drop_sql, "Drop Old Materialized Views")
def create_dependencies():
sql_statements = open(str(DEPENDENCY_FILEPATH), "r").read()
run_sql(sql_statements, "dependencies")
def run_sql(sql, name):
with psycopg2.connect(dsn=get_database_dsn_string()) as connection:
with connection.cursor() as cursor:
with Timer(name):
cursor.execute(sql)
def recursive_delete(path):
path = Path(str(path)).resolve()
if not path.exists() and len(str(path)) < 6:
return
if path.is_dir():
for f in path.glob("*"):
recursive_delete(f)
path.rmdir()
elif not path.is_dir():
path.unlink()
else:
logger.info("Nothing to delete")
| true | true |
f7fdfbe0771dca4c4180967e61b38ab3491eb32e | 692 | py | Python | migrations/versions/4f494d6d1000_added_purchase_date_to_the_stock_model.py | YellowFlash2012/stock-portfolio-io | 0a39c9a692aecc8d6b39e728efd63061112bc975 | [
"MIT"
] | null | null | null | migrations/versions/4f494d6d1000_added_purchase_date_to_the_stock_model.py | YellowFlash2012/stock-portfolio-io | 0a39c9a692aecc8d6b39e728efd63061112bc975 | [
"MIT"
] | null | null | null | migrations/versions/4f494d6d1000_added_purchase_date_to_the_stock_model.py | YellowFlash2012/stock-portfolio-io | 0a39c9a692aecc8d6b39e728efd63061112bc975 | [
"MIT"
] | null | null | null | """added purchase_date to the Stock model
Revision ID: 4f494d6d1000
Revises: df472f9e977f
Create Date: 2022-02-13 06:35:05.343296
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '4f494d6d1000'
down_revision = 'df472f9e977f'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('stocks', sa.Column('purchase_date', sa.DateTime(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('stocks', 'purchase_date')
# ### end Alembic commands ###
| 23.862069 | 85 | 0.702312 | from alembic import op
import sqlalchemy as sa
revision = '4f494d6d1000'
down_revision = 'df472f9e977f'
branch_labels = None
depends_on = None
def upgrade():
| true | true |
f7fdfbfb79e2eedab8854f28073d5e8bb50393cc | 30,421 | py | Python | pychron/dvc/dvc_persister.py | UIllinoisHALPychron/pychron | f21b79f4592a9fb9dc9a4cb2e4e943a3885ededc | [
"Apache-2.0"
] | null | null | null | pychron/dvc/dvc_persister.py | UIllinoisHALPychron/pychron | f21b79f4592a9fb9dc9a4cb2e4e943a3885ededc | [
"Apache-2.0"
] | 1 | 2021-12-16T18:48:03.000Z | 2021-12-16T18:48:03.000Z | pychron/dvc/dvc_persister.py | UIllinoisHALPychron/pychron | f21b79f4592a9fb9dc9a4cb2e4e943a3885ededc | [
"Apache-2.0"
] | null | null | null | # ===============================================================================
# Copyright 2015 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
import hashlib
import os
import shutil
from datetime import datetime
from apptools.preferences.preference_binding import bind_preference
from git.exc import GitCommandError
# ============= enthought library imports =======================
from sqlalchemy.exc import OperationalError, DatabaseError
from traits.api import Instance, Bool, Str
from uncertainties import std_dev, nominal_value
from yaml import YAMLError
from pychron.core.helpers.binpack import encode_blob, pack
from pychron.core.yaml import yload
from pychron.dvc import dvc_dump, analysis_path, repository_path, NPATH_MODIFIERS
from pychron.experiment.automated_run.persistence import BasePersister
from pychron.git_archive.repo_manager import GitRepoManager
from pychron.paths import paths
from pychron.pychron_constants import (
DVC_PROTOCOL,
NULL_STR,
ARGON_KEYS,
ARAR_MAPPING,
EXTRACTION_ATTRS,
META_ATTRS,
NULL_EXTRACT_DEVICES,
POSTCLEANUP,
PRECLEANUP,
CLEANUP,
EXTRACT_UNITS,
EXTRACT_VALUE,
DURATION,
WEIGHT,
CRYO_TEMP,
)
def format_repository_identifier(project):
return project.replace("/", "_").replace("\\", "_").replace(" ", "_")
def spectrometer_sha(settings, src, defl, gains):
sha = hashlib.sha1()
for d in (settings, src, defl, gains):
for k, v in sorted(d.items()):
sha.update(k.encode("utf-8"))
sha.update(str(v).encode("utf-8"))
return sha.hexdigest()
class DVCPersister(BasePersister):
active_repository = Instance(GitRepoManager)
dvc = Instance(DVC_PROTOCOL)
use_isotope_classifier = Bool(False)
use_uuid_path_name = Bool(True)
# isotope_classifier = Instance(IsotopeClassifier, ())
stage_files = Bool(True)
default_principal_investigator = Str
_positions = None
save_log_enabled = Bool(False)
arar_mapping = None
def __init__(self, bind=True, load_mapping=True, *args, **kw):
super(DVCPersister, self).__init__(*args, **kw)
if bind:
bind_preference(
self, "use_uuid_path_name", "pychron.experiment.use_uuid_path_name"
)
if load_mapping:
self._load_arar_mapping()
def per_spec_save(
self, pr, repository_identifier=None, commit=False, commit_tag=None, push=True
):
self.per_spec = pr
if repository_identifier:
self.initialize(repository_identifier, False)
self.pre_extraction_save()
self.pre_measurement_save()
self.post_extraction_save()
self.post_measurement_save(commit=commit, commit_tag=commit_tag, push=push)
def push(self):
# push changes
self.dvc.push_repository(self.active_repository)
# push commit
self.dvc.meta_push()
def initialize(self, repository, pull=True):
"""
setup git repos.
repositories are guaranteed to exist. The automated run factory clones the required projects
on demand.
:return:
"""
self.debug(
"^^^^^^^^^^^^^ Initialize DVCPersister {} pull={}".format(repository, pull)
)
self.dvc.initialize()
repository = format_repository_identifier(repository)
self.active_repository = repo = GitRepoManager()
root = repository_path(repository)
repo.open_repo(root)
remote = "origin"
if repo.has_remote(remote) and pull:
self.info("pulling changes from repo: {}".format(repository))
try:
repo.pull(
remote=remote,
use_progress=False,
use_auto_pull=self.dvc.use_auto_pull,
)
except GitCommandError:
self.warning("failed pulling changes")
self.debug_exception()
def pre_extraction_save(self):
pass
def post_extraction_save(self):
self.info("================= post extraction save started =================")
per_spec = self.per_spec
rblob = per_spec.response_blob # time vs measured response
oblob = per_spec.output_blob # time vs %output
sblob = per_spec.setpoint_blob # time vs requested
gp = per_spec.grain_polygons
if rblob is not None:
rblob = encode_blob(rblob)
if oblob is not None:
oblob = encode_blob(oblob)
if sblob is not None:
sblob = encode_blob(sblob)
if gp:
gp = [encode_blob(g) for g in gp]
obj = {
"measured_response": rblob,
"requested_output": oblob,
"setpoint_stream": sblob,
"snapshots": per_spec.snapshots,
"videos": per_spec.videos,
"grain_polygons": gp,
"extraction_context": per_spec.extraction_context,
}
pid = per_spec.pid
if pid:
obj["pid"] = pid
for e in EXTRACTION_ATTRS:
v = getattr(per_spec.run_spec, e)
obj[e] = v
if not per_spec.positions:
ps = [dict()]
else:
ps = []
for i, pp in enumerate(per_spec.positions):
pos, x, y, z = None, None, None, None
if isinstance(pp, tuple):
if len(pp) == 2:
x, y = pp
elif len(pp) == 3:
x, y, z = pp
else:
pos = pp
try:
ep = per_spec.extraction_positions[i]
x = ep[0]
y = ep[1]
if len(ep) == 3:
z = ep[2]
except IndexError:
self.debug("no extraction position for {}".format(pp))
except TypeError:
self.debug("invalid extraction position")
try:
pos = int(pos)
except BaseException:
pos = None
pd = {
"x": x,
"y": y,
"z": z,
"position": pos,
"is_degas": per_spec.run_spec.identifier == "dg",
}
ps.append(pd)
obj["positions"] = ps
self._positions = ps
hexsha = self.dvc.get_meta_head()
obj["commit"] = str(hexsha)
path = self._make_path(modifier="extraction")
dvc_dump(obj, path)
self.info("================= post extraction save finished =================")
def pre_measurement_save(self):
pass
def post_measurement_save(self, commit=True, commit_tag="COLLECTION", push=True):
"""
save
- analysis.json
- analysis.monitor.json
check if unique spectrometer.json
commit changes
push changes
:return:
"""
self.info("================= post measurement save started =================")
ret = True
ar = self.active_repository
# save spectrometer
spec_sha = self._get_spectrometer_sha()
spec_path = os.path.join(ar.path, "{}.json".format(spec_sha))
if not os.path.isfile(spec_path):
self._save_spectrometer_file(spec_path)
# self.dvc.meta_repo.save_gains(self.per_spec.run_spec.mass_spectrometer,
# self.per_spec.gains)
# save analysis
if not self.per_spec.timestamp:
timestamp = datetime.now()
else:
timestamp = self.per_spec.timestamp
# check repository identifier before saving
# will modify repository to NoRepo if repository_identifier does not exist
self._check_repository_identifier()
self._save_analysis(timestamp)
# save monitor
self._save_monitor()
# save peak center
self._save_peak_center(self.per_spec.peak_center)
# stage files
dvc = self.dvc
if self.stage_files:
if commit:
try:
ar.smart_pull(accept_their=True)
paths = [
spec_path,
] + [self._make_path(modifier=m) for m in NPATH_MODIFIERS]
for p in paths:
if os.path.isfile(p):
ar.add(p, commit=False)
else:
self.debug("not at valid file {}".format(p))
# commit files
ar.commit("<{}>".format(commit_tag))
# commit default data reduction
add = False
p = self._make_path("intercepts")
if os.path.isfile(p):
ar.add(p, commit=False)
add = True
p = self._make_path("baselines")
if os.path.isfile(p):
ar.add(p, commit=False)
add = True
if add:
ar.commit("<ISOEVO> default collection fits")
for pp, tag, msg in (
(
"blanks",
"BLANKS",
"preceding {}".format(self.per_spec.previous_blank_runid),
),
("icfactors", "ICFactor", "default"),
):
p = self._make_path(pp)
if os.path.isfile(p):
ar.add(p, commit=False)
ar.commit("<{}> {}".format(tag, msg))
if push:
# push changes
dvc.push_repository(ar)
# update meta
dvc.meta_pull(accept_our=True)
dvc.meta_commit(
"repo updated for analysis {}".format(
self.per_spec.run_spec.runid
)
)
if push:
# push commit
dvc.meta_push()
except GitCommandError as e:
self.warning(e)
if self.confirmation_dialog(
"NON FATAL\n\n"
"DVC/Git upload of analysis not successful."
"Do you want to CANCEL the experiment?\n",
timeout_ret=False,
timeout=30,
):
ret = False
with dvc.session_ctx():
try:
ret = self._save_analysis_db(timestamp) and ret
except DatabaseError as e:
self.warning_dialog(
"Fatal Error. Cannot save analysis to database. Cancelling "
"experiment. {}".format(e)
)
ret = False
self.info("================= post measurement save finished =================")
return ret
def save_run_log_file(self, path):
if self.save_enabled and self.save_log_enabled:
self.debug("saving run log file")
npath = self._make_path("logs", ".log")
shutil.copyfile(path, npath)
ar = self.active_repository
ar.smart_pull(accept_their=True)
ar.add(npath, commit=False)
ar.commit("<COLLECTION> log")
self.dvc.push_repository(ar)
# private
def _load_arar_mapping(self):
"""
Isotope: IsotopeKey
example arar_mapping.yaml
{
Ar40: 'Ar40',
Ar39: 'Ar39',
Ar38: 'Ar38',
Ar37: 'Ar37',
Ar36: 'Ar36L1'
}
:return:
"""
p = os.path.join(paths.setup_dir, "arar_mapping.yaml")
if os.path.isfile(p):
self.debug("loading arar mapping from {}".format(p))
# with open(p, 'r') as rfile:
try:
obj = yload(p)
except YAMLError:
obj = {}
for k in ARGON_KEYS:
if k not in obj:
self.warning(
"Invalid arar_mapping.yaml file. required keys={}".format(
ARGON_KEYS
)
)
return
self.arar_mapping = obj
def _check_repository_identifier(self):
repo_id = self.per_spec.run_spec.repository_identifier
db = self.dvc.db
repo = db.get_repository(repo_id)
if repo is None:
self.warning('No repository named ="{}" changing to NoRepo'.format(repo_id))
self.per_spec.run_spec.repository_identifier = "NoRepo"
repo = db.get_repository("NoRepo")
if repo is None:
db.add_repository("NoRepo", self.default_principal_investigator)
def _save_analysis_db(self, timestamp):
ps = self.per_spec
rs = ps.run_spec
d = {
k: getattr(rs, k)
for k in (
"uuid",
"analysis_type",
"aliquot",
"increment",
"mass_spectrometer",
WEIGHT,
CLEANUP,
PRECLEANUP,
POSTCLEANUP,
CRYO_TEMP,
DURATION,
EXTRACT_VALUE,
EXTRACT_UNITS,
)
}
d["comment"] = rs.comment[:200] if rs.comment else ""
ed = rs.extract_device
if ed in NULL_EXTRACT_DEVICES:
d["extract_device"] = "No Extract Device"
else:
d["extract_device"] = ed
d["timestamp"] = timestamp
# save script names
d["measurementName"] = ps.measurement_name
d["extractionName"] = ps.extraction_name
d["experiment_type"] = self.per_spec.experiment_type
db = self.dvc.db
an = db.add_analysis(**d)
if an is None:
self.warning("Failed adding analysis to database. See full log for error")
return
# save currents
self._save_currents(an)
# for iso in ps.isotope_group.isotopes.values():
# self.add_current(iso)
# db.add_analysis_result(an, iso)
# save media
if ps.snapshots:
for p in ps.snapshots:
db.add_media(p, an)
if ps.videos:
for p in ps.videos:
db.add_media(p, an)
if self._positions:
if rs.load_name and rs.load_name != NULL_STR:
load_name = rs.load_name
load_holder = rs.load_holder
db.add_load(load_name, load_holder, rs.username)
db.flush()
db.commit()
for position in self._positions:
self.debug("adding measured position {}".format(position))
if not db.add_measured_position(an, load=load_name, **position):
self.warning(
"failed adding position {}, load={}".format(
position, load_name
)
)
# all associations are handled by the ExperimentExecutor._retroactive_experiment_identifiers
# *** _retroactive_experiment_identifiers is currently disabled ***
if ps.use_repository_association:
db.add_repository_association(rs.repository_identifier, an)
self.debug('get identifier "{}"'.format(rs.identifier))
pos = db.get_identifier(rs.identifier)
self.debug("setting analysis irradiation position={}".format(pos))
if pos is None:
an.simple_identifier = int(rs.identifier)
else:
an.irradiation_position = pos
t = ps.tag
db.flush()
change = db.add_analysis_change(tag=t)
an.change = change
db.commit()
return True
def _save_currents(self, dban):
dvc = self.dvc
if dvc.update_currents_enabled:
ps = self.per_spec
db = dvc.db
for key, iso in ps.isotope_group.isotopes.items():
param = db.add_parameter("{}_intercept".format(key))
db.add_current(dban, iso.value, iso.error, param, iso.units)
param = db.add_parameter("{}_blank".format(key), iso.blank.units)
db.add_current(
dban, iso.blank.value, iso.blank.error, param, iso.blank.units
)
param = db.add_parameter("{}_bs_corrected".format(key))
v = iso.get_baseline_corrected_value()
db.add_current(dban, nominal_value(v), std_dev(v), param, iso.units)
param = db.add_parameter("{}_ic_corrected".format(key))
v = iso.get_ic_corrected_value()
db.add_current(dban, nominal_value(v), std_dev(v), param, iso.units)
param = db.add_parameter(key)
v = iso.get_non_detector_corrected_value()
db.add_current(dban, nominal_value(v), std_dev(v), param, iso.units)
param = db.add_parameter(iso.baseline.name)
db.add_current(
dban,
iso.baseline.value,
iso.baseline.error,
param,
iso.baseline.units,
)
param = db.add_parameter("{}_n".format(iso.baseline.name))
db.add_current(dban, iso.baseline.n, None, param, "int")
param = db.add_parameter("{}_n".format(iso.name))
db.add_current(dban, iso.n, None, param, "int")
def _save_analysis(self, timestamp):
isos = {}
dets = {}
signals = []
baselines = []
sniffs = []
blanks = {}
intercepts = {}
cbaselines = {}
icfactors = {}
endianness = ">"
per_spec = self.per_spec
source = {"emission": per_spec.emission, "trap": per_spec.trap}
clf = None
if self.use_isotope_classifier:
clf = self.application.get_service(
"pychron.classifier.isotope_classifier.IsotopeClassifier"
)
for key, iso in per_spec.isotope_group.items():
sblob = encode_blob(iso.pack(endianness, as_hex=False))
snblob = encode_blob(iso.sniff.pack(endianness, as_hex=False))
for ss, blob in ((signals, sblob), (sniffs, snblob)):
d = {"isotope": iso.name, "detector": iso.detector, "blob": blob}
ss.append(d)
detector = next(
(d for d in per_spec.active_detectors if d.name == iso.detector), None
)
isod = {
"detector": iso.detector,
"name": iso.name,
"units": detector.units,
"serial_id": detector.serial_id if detector else "00000",
}
if clf is not None:
klass, prob = clf.predict_isotope(iso)
isod.update(classification=klass, classification_probability=prob)
isos[key] = isod
if iso.detector not in dets:
bblob = encode_blob(iso.baseline.pack(endianness, as_hex=False))
baselines.append({"detector": iso.detector, "blob": bblob})
dets[iso.detector] = {
"deflection": per_spec.defl_dict.get(iso.detector),
"gain": per_spec.gains.get(iso.detector),
}
icfactors[iso.detector] = {
"value": float(nominal_value(iso.ic_factor or 1)),
"error": float(std_dev(iso.ic_factor or 0)),
"fit": "default",
"references": [],
}
cbaselines[iso.detector] = {
"fit": iso.baseline.fit,
"error_type": iso.baseline.error_type,
"filter_outliers_dict": iso.baseline.filter_outliers_dict,
"value": float(iso.baseline.value),
"error": float(iso.baseline.error),
}
intercepts[key] = {
"fit": iso.fit,
"error_type": iso.error_type,
"filter_outliers_dict": iso.filter_outliers_dict,
"value": float(iso.value),
"error": float(iso.error),
}
blanks[key] = {
"fit": "previous",
"error_type": "",
"references": [
{"record_id": per_spec.previous_blank_runid, "exclude": False}
],
"value": float(iso.blank.value),
"error": float(iso.blank.error),
}
obj = self._make_analysis_dict()
from pychron.version import __version__ as pversion
from pychron.experiment import __version__ as eversion
from pychron.dvc import __version__ as dversion
obj["timestamp"] = timestamp.isoformat()
obj["collection_version"] = "{}:{}".format(eversion, dversion)
obj["acquisition_software"] = "pychron {}".format(pversion)
obj["data_reduction_software"] = "pychron {}".format(pversion)
obj["environmental"] = {
"lab_temperatures": per_spec.lab_temperatures,
"lab_humiditys": per_spec.lab_humiditys,
"lab_pneumatics": per_spec.lab_pneumatics,
}
obj["laboratory"] = per_spec.laboratory
obj["instrument_name"] = per_spec.instrument_name
obj["analyst_name"] = per_spec.run_spec.username
obj["whiff_result"] = per_spec.whiff_result
obj["detectors"] = dets
obj["isotopes"] = isos
obj["spec_sha"] = self._get_spectrometer_sha()
obj["intensity_scalar"] = per_spec.intensity_scalar
obj["source"] = source
# save the conditionals
obj["conditionals"] = (
[c.to_dict() for c in per_spec.conditionals]
if per_spec.conditionals
else None
)
obj["tripped_conditional"] = (
per_spec.tripped_conditional.result_dict()
if per_spec.tripped_conditional
else None
)
# save the scripts
ms = per_spec.run_spec.mass_spectrometer
for si in (
"measurement",
"extraction",
"post_measurement",
"post_equilibration",
"hops",
):
name = getattr(per_spec, "{}_name".format(si))
blob = getattr(per_spec, "{}_blob".format(si))
if name:
self.dvc.meta_repo.update_script(ms, name, blob)
obj[si] = name
# save keys for the arar isotopes
akeys = self.arar_mapping
if akeys is None:
akeys = ARAR_MAPPING
obj["arar_mapping"] = akeys
# save experiment
self.debug("---------------- Experiment Queue saving disabled")
# self.dvc.update_experiment_queue(ms, self.per_spec.experiment_queue_name,
# self.per_spec.experiment_queue_blob)
self._save_macrochron(obj)
hexsha = str(self.dvc.get_meta_head())
obj["commit"] = hexsha
# dump runid.json
p = self._make_path()
dvc_dump(obj, p)
p = self._make_path(modifier="intercepts")
dvc_dump(intercepts, p)
# dump runid.blank.json
p = self._make_path(modifier="blanks")
dvc_dump(blanks, p)
p = self._make_path(modifier="baselines")
dvc_dump(cbaselines, p)
p = self._make_path(modifier="icfactors")
dvc_dump(icfactors, p)
# dump runid.data.json
p = self._make_path(modifier=".data")
data = {
"commit": hexsha,
"encoding": "base64",
"format": "{}ff".format(endianness),
"signals": signals,
"baselines": baselines,
"sniffs": sniffs,
}
dvc_dump(data, p)
def _save_macrochron(self, obj):
pass
def _save_monitor(self):
if self.per_spec.monitor:
p = self._make_path(modifier="monitor")
checks = []
for ci in self.per_spec.monitor.checks:
data = encode_blob(pack(">ff", ci.data))
params = dict(
name=ci.name,
parameter=ci.parameter,
criterion=ci.criterion,
comparator=ci.comparator,
tripped=ci.tripped,
data=data,
)
checks.append(params)
dvc_dump(checks, p)
def _save_spectrometer_file(self, path):
obj = dict(
spectrometer=dict(self.per_spec.spec_dict),
gains=dict(self.per_spec.gains),
deflections=dict(self.per_spec.defl_dict),
settings=self.per_spec.settings,
)
# hexsha = self.dvc.get_meta_head()
# obj['commit'] = str(hexsha)
dvc_dump(obj, path)
def _save_peak_center(self, pc):
self.info("DVC saving peakcenter")
p = self._make_path(modifier="peakcenter")
if pc:
fmt = ">ff"
obj = {
"reference_detector": pc.reference_detector.name,
"reference_isotope": pc.reference_isotope,
"fmt": fmt,
"interpolation": pc.interpolation_kind if pc.use_interpolation else "",
}
results = pc.get_results()
if results:
for result in results:
points = encode_blob(pack(fmt, result.points))
obj[result.detector] = {
"low_dac": result.low_dac,
"center_dac": result.center_dac,
"high_dac": result.high_dac,
"low_signal": result.low_signal,
"center_signal": result.center_signal,
"high_signal": result.high_signal,
"resolution": result.resolution,
"low_resolving_power": result.low_resolving_power,
"high_resolving_power": result.high_resolving_power,
"points": points,
}
dvc_dump(obj, p)
def _make_path(self, modifier=None, extension=".json"):
runid = self.per_spec.run_spec.runid
uuid = self.per_spec.run_spec.uuid
repository_identifier = self.per_spec.run_spec.repository_identifier
if self.use_uuid_path_name:
name = uuid, uuid
else:
name = runid, runid
return analysis_path(
name, repository_identifier, modifier, extension, mode="w", force_sublen=2
)
def _make_analysis_dict(self, keys=None):
if keys is None:
keys = META_ATTRS
def get(ki):
obj = self.per_spec
if not hasattr(obj, ki):
obj = self.per_spec.run_spec
try:
return getattr(obj, ki)
except AttributeError as e:
self.warning("Attribute error: attr={}, error={}".format(ki, e))
d = {k: get(k) for k in keys}
return d
def _get_spectrometer_sha(self):
"""
return a sha-1 hash.
generate using spec_dict, defl_dict, and gains
spec_dict: source parameters, cdd operating voltage
defl_dict: detector deflections
gains: detector gains
make hash using
for key,value in dictionary:
sha1.update(key)
sha1.update(value)
to ensure consistence, dictionaries are sorted by key
for key,value in sorted(dictionary)
:return:
"""
return spectrometer_sha(
self.per_spec.settings,
self.per_spec.spec_dict,
self.per_spec.defl_dict,
self.per_spec.gains,
)
# ============= EOF =============================================
# self._save_measured_positions()
#
#
# def _save_measured_positions(self):
# dvc = self.dvc
#
# load_name = self.per_spec.load_name
# for i, pp in enumerate(self.per_spec.positions):
# if isinstance(pp, tuple):
# if len(pp) > 1:
# if len(pp) == 3:
# dvc.add_measured_position('', load_name, x=pp[0], y=pp[1], z=pp[2])
# else:
# dvc.add_measured_position('', load_name, x=pp[0], y=pp[1])
# else:
# dvc.add_measured_position(pp[0], load_name)
#
# else:
# dbpos = dvc.add_measured_position(pp, load_name)
# try:
# ep = self.per_spec.extraction_positions[i]
# dbpos.x = ep[0]
# dbpos.y = ep[1]
# if len(ep) == 3:
# dbpos.z = ep[2]
# except IndexError:
# self.debug('no extraction position for {}'.format(pp))
| 33.35636 | 100 | 0.514447 |
import hashlib
import os
import shutil
from datetime import datetime
from apptools.preferences.preference_binding import bind_preference
from git.exc import GitCommandError
from sqlalchemy.exc import OperationalError, DatabaseError
from traits.api import Instance, Bool, Str
from uncertainties import std_dev, nominal_value
from yaml import YAMLError
from pychron.core.helpers.binpack import encode_blob, pack
from pychron.core.yaml import yload
from pychron.dvc import dvc_dump, analysis_path, repository_path, NPATH_MODIFIERS
from pychron.experiment.automated_run.persistence import BasePersister
from pychron.git_archive.repo_manager import GitRepoManager
from pychron.paths import paths
from pychron.pychron_constants import (
DVC_PROTOCOL,
NULL_STR,
ARGON_KEYS,
ARAR_MAPPING,
EXTRACTION_ATTRS,
META_ATTRS,
NULL_EXTRACT_DEVICES,
POSTCLEANUP,
PRECLEANUP,
CLEANUP,
EXTRACT_UNITS,
EXTRACT_VALUE,
DURATION,
WEIGHT,
CRYO_TEMP,
)
def format_repository_identifier(project):
return project.replace("/", "_").replace("\\", "_").replace(" ", "_")
def spectrometer_sha(settings, src, defl, gains):
sha = hashlib.sha1()
for d in (settings, src, defl, gains):
for k, v in sorted(d.items()):
sha.update(k.encode("utf-8"))
sha.update(str(v).encode("utf-8"))
return sha.hexdigest()
class DVCPersister(BasePersister):
active_repository = Instance(GitRepoManager)
dvc = Instance(DVC_PROTOCOL)
use_isotope_classifier = Bool(False)
use_uuid_path_name = Bool(True)
stage_files = Bool(True)
default_principal_investigator = Str
_positions = None
save_log_enabled = Bool(False)
arar_mapping = None
def __init__(self, bind=True, load_mapping=True, *args, **kw):
super(DVCPersister, self).__init__(*args, **kw)
if bind:
bind_preference(
self, "use_uuid_path_name", "pychron.experiment.use_uuid_path_name"
)
if load_mapping:
self._load_arar_mapping()
def per_spec_save(
self, pr, repository_identifier=None, commit=False, commit_tag=None, push=True
):
self.per_spec = pr
if repository_identifier:
self.initialize(repository_identifier, False)
self.pre_extraction_save()
self.pre_measurement_save()
self.post_extraction_save()
self.post_measurement_save(commit=commit, commit_tag=commit_tag, push=push)
def push(self):
self.dvc.push_repository(self.active_repository)
self.dvc.meta_push()
def initialize(self, repository, pull=True):
self.debug(
"^^^^^^^^^^^^^ Initialize DVCPersister {} pull={}".format(repository, pull)
)
self.dvc.initialize()
repository = format_repository_identifier(repository)
self.active_repository = repo = GitRepoManager()
root = repository_path(repository)
repo.open_repo(root)
remote = "origin"
if repo.has_remote(remote) and pull:
self.info("pulling changes from repo: {}".format(repository))
try:
repo.pull(
remote=remote,
use_progress=False,
use_auto_pull=self.dvc.use_auto_pull,
)
except GitCommandError:
self.warning("failed pulling changes")
self.debug_exception()
def pre_extraction_save(self):
pass
def post_extraction_save(self):
self.info("================= post extraction save started =================")
per_spec = self.per_spec
rblob = per_spec.response_blob
oblob = per_spec.output_blob
sblob = per_spec.setpoint_blob
gp = per_spec.grain_polygons
if rblob is not None:
rblob = encode_blob(rblob)
if oblob is not None:
oblob = encode_blob(oblob)
if sblob is not None:
sblob = encode_blob(sblob)
if gp:
gp = [encode_blob(g) for g in gp]
obj = {
"measured_response": rblob,
"requested_output": oblob,
"setpoint_stream": sblob,
"snapshots": per_spec.snapshots,
"videos": per_spec.videos,
"grain_polygons": gp,
"extraction_context": per_spec.extraction_context,
}
pid = per_spec.pid
if pid:
obj["pid"] = pid
for e in EXTRACTION_ATTRS:
v = getattr(per_spec.run_spec, e)
obj[e] = v
if not per_spec.positions:
ps = [dict()]
else:
ps = []
for i, pp in enumerate(per_spec.positions):
pos, x, y, z = None, None, None, None
if isinstance(pp, tuple):
if len(pp) == 2:
x, y = pp
elif len(pp) == 3:
x, y, z = pp
else:
pos = pp
try:
ep = per_spec.extraction_positions[i]
x = ep[0]
y = ep[1]
if len(ep) == 3:
z = ep[2]
except IndexError:
self.debug("no extraction position for {}".format(pp))
except TypeError:
self.debug("invalid extraction position")
try:
pos = int(pos)
except BaseException:
pos = None
pd = {
"x": x,
"y": y,
"z": z,
"position": pos,
"is_degas": per_spec.run_spec.identifier == "dg",
}
ps.append(pd)
obj["positions"] = ps
self._positions = ps
hexsha = self.dvc.get_meta_head()
obj["commit"] = str(hexsha)
path = self._make_path(modifier="extraction")
dvc_dump(obj, path)
self.info("================= post extraction save finished =================")
def pre_measurement_save(self):
pass
def post_measurement_save(self, commit=True, commit_tag="COLLECTION", push=True):
self.info("================= post measurement save started =================")
ret = True
ar = self.active_repository
spec_sha = self._get_spectrometer_sha()
spec_path = os.path.join(ar.path, "{}.json".format(spec_sha))
if not os.path.isfile(spec_path):
self._save_spectrometer_file(spec_path)
if not self.per_spec.timestamp:
timestamp = datetime.now()
else:
timestamp = self.per_spec.timestamp
self._check_repository_identifier()
self._save_analysis(timestamp)
self._save_monitor()
self._save_peak_center(self.per_spec.peak_center)
dvc = self.dvc
if self.stage_files:
if commit:
try:
ar.smart_pull(accept_their=True)
paths = [
spec_path,
] + [self._make_path(modifier=m) for m in NPATH_MODIFIERS]
for p in paths:
if os.path.isfile(p):
ar.add(p, commit=False)
else:
self.debug("not at valid file {}".format(p))
ar.commit("<{}>".format(commit_tag))
add = False
p = self._make_path("intercepts")
if os.path.isfile(p):
ar.add(p, commit=False)
add = True
p = self._make_path("baselines")
if os.path.isfile(p):
ar.add(p, commit=False)
add = True
if add:
ar.commit("<ISOEVO> default collection fits")
for pp, tag, msg in (
(
"blanks",
"BLANKS",
"preceding {}".format(self.per_spec.previous_blank_runid),
),
("icfactors", "ICFactor", "default"),
):
p = self._make_path(pp)
if os.path.isfile(p):
ar.add(p, commit=False)
ar.commit("<{}> {}".format(tag, msg))
if push:
dvc.push_repository(ar)
dvc.meta_pull(accept_our=True)
dvc.meta_commit(
"repo updated for analysis {}".format(
self.per_spec.run_spec.runid
)
)
if push:
dvc.meta_push()
except GitCommandError as e:
self.warning(e)
if self.confirmation_dialog(
"NON FATAL\n\n"
"DVC/Git upload of analysis not successful."
"Do you want to CANCEL the experiment?\n",
timeout_ret=False,
timeout=30,
):
ret = False
with dvc.session_ctx():
try:
ret = self._save_analysis_db(timestamp) and ret
except DatabaseError as e:
self.warning_dialog(
"Fatal Error. Cannot save analysis to database. Cancelling "
"experiment. {}".format(e)
)
ret = False
self.info("================= post measurement save finished =================")
return ret
def save_run_log_file(self, path):
if self.save_enabled and self.save_log_enabled:
self.debug("saving run log file")
npath = self._make_path("logs", ".log")
shutil.copyfile(path, npath)
ar = self.active_repository
ar.smart_pull(accept_their=True)
ar.add(npath, commit=False)
ar.commit("<COLLECTION> log")
self.dvc.push_repository(ar)
def _load_arar_mapping(self):
p = os.path.join(paths.setup_dir, "arar_mapping.yaml")
if os.path.isfile(p):
self.debug("loading arar mapping from {}".format(p))
try:
obj = yload(p)
except YAMLError:
obj = {}
for k in ARGON_KEYS:
if k not in obj:
self.warning(
"Invalid arar_mapping.yaml file. required keys={}".format(
ARGON_KEYS
)
)
return
self.arar_mapping = obj
def _check_repository_identifier(self):
repo_id = self.per_spec.run_spec.repository_identifier
db = self.dvc.db
repo = db.get_repository(repo_id)
if repo is None:
self.warning('No repository named ="{}" changing to NoRepo'.format(repo_id))
self.per_spec.run_spec.repository_identifier = "NoRepo"
repo = db.get_repository("NoRepo")
if repo is None:
db.add_repository("NoRepo", self.default_principal_investigator)
def _save_analysis_db(self, timestamp):
ps = self.per_spec
rs = ps.run_spec
d = {
k: getattr(rs, k)
for k in (
"uuid",
"analysis_type",
"aliquot",
"increment",
"mass_spectrometer",
WEIGHT,
CLEANUP,
PRECLEANUP,
POSTCLEANUP,
CRYO_TEMP,
DURATION,
EXTRACT_VALUE,
EXTRACT_UNITS,
)
}
d["comment"] = rs.comment[:200] if rs.comment else ""
ed = rs.extract_device
if ed in NULL_EXTRACT_DEVICES:
d["extract_device"] = "No Extract Device"
else:
d["extract_device"] = ed
d["timestamp"] = timestamp
d["measurementName"] = ps.measurement_name
d["extractionName"] = ps.extraction_name
d["experiment_type"] = self.per_spec.experiment_type
db = self.dvc.db
an = db.add_analysis(**d)
if an is None:
self.warning("Failed adding analysis to database. See full log for error")
return
self._save_currents(an)
if ps.snapshots:
for p in ps.snapshots:
db.add_media(p, an)
if ps.videos:
for p in ps.videos:
db.add_media(p, an)
if self._positions:
if rs.load_name and rs.load_name != NULL_STR:
load_name = rs.load_name
load_holder = rs.load_holder
db.add_load(load_name, load_holder, rs.username)
db.flush()
db.commit()
for position in self._positions:
self.debug("adding measured position {}".format(position))
if not db.add_measured_position(an, load=load_name, **position):
self.warning(
"failed adding position {}, load={}".format(
position, load_name
)
)
if ps.use_repository_association:
db.add_repository_association(rs.repository_identifier, an)
self.debug('get identifier "{}"'.format(rs.identifier))
pos = db.get_identifier(rs.identifier)
self.debug("setting analysis irradiation position={}".format(pos))
if pos is None:
an.simple_identifier = int(rs.identifier)
else:
an.irradiation_position = pos
t = ps.tag
db.flush()
change = db.add_analysis_change(tag=t)
an.change = change
db.commit()
return True
def _save_currents(self, dban):
dvc = self.dvc
if dvc.update_currents_enabled:
ps = self.per_spec
db = dvc.db
for key, iso in ps.isotope_group.isotopes.items():
param = db.add_parameter("{}_intercept".format(key))
db.add_current(dban, iso.value, iso.error, param, iso.units)
param = db.add_parameter("{}_blank".format(key), iso.blank.units)
db.add_current(
dban, iso.blank.value, iso.blank.error, param, iso.blank.units
)
param = db.add_parameter("{}_bs_corrected".format(key))
v = iso.get_baseline_corrected_value()
db.add_current(dban, nominal_value(v), std_dev(v), param, iso.units)
param = db.add_parameter("{}_ic_corrected".format(key))
v = iso.get_ic_corrected_value()
db.add_current(dban, nominal_value(v), std_dev(v), param, iso.units)
param = db.add_parameter(key)
v = iso.get_non_detector_corrected_value()
db.add_current(dban, nominal_value(v), std_dev(v), param, iso.units)
param = db.add_parameter(iso.baseline.name)
db.add_current(
dban,
iso.baseline.value,
iso.baseline.error,
param,
iso.baseline.units,
)
param = db.add_parameter("{}_n".format(iso.baseline.name))
db.add_current(dban, iso.baseline.n, None, param, "int")
param = db.add_parameter("{}_n".format(iso.name))
db.add_current(dban, iso.n, None, param, "int")
def _save_analysis(self, timestamp):
isos = {}
dets = {}
signals = []
baselines = []
sniffs = []
blanks = {}
intercepts = {}
cbaselines = {}
icfactors = {}
endianness = ">"
per_spec = self.per_spec
source = {"emission": per_spec.emission, "trap": per_spec.trap}
clf = None
if self.use_isotope_classifier:
clf = self.application.get_service(
"pychron.classifier.isotope_classifier.IsotopeClassifier"
)
for key, iso in per_spec.isotope_group.items():
sblob = encode_blob(iso.pack(endianness, as_hex=False))
snblob = encode_blob(iso.sniff.pack(endianness, as_hex=False))
for ss, blob in ((signals, sblob), (sniffs, snblob)):
d = {"isotope": iso.name, "detector": iso.detector, "blob": blob}
ss.append(d)
detector = next(
(d for d in per_spec.active_detectors if d.name == iso.detector), None
)
isod = {
"detector": iso.detector,
"name": iso.name,
"units": detector.units,
"serial_id": detector.serial_id if detector else "00000",
}
if clf is not None:
klass, prob = clf.predict_isotope(iso)
isod.update(classification=klass, classification_probability=prob)
isos[key] = isod
if iso.detector not in dets:
bblob = encode_blob(iso.baseline.pack(endianness, as_hex=False))
baselines.append({"detector": iso.detector, "blob": bblob})
dets[iso.detector] = {
"deflection": per_spec.defl_dict.get(iso.detector),
"gain": per_spec.gains.get(iso.detector),
}
icfactors[iso.detector] = {
"value": float(nominal_value(iso.ic_factor or 1)),
"error": float(std_dev(iso.ic_factor or 0)),
"fit": "default",
"references": [],
}
cbaselines[iso.detector] = {
"fit": iso.baseline.fit,
"error_type": iso.baseline.error_type,
"filter_outliers_dict": iso.baseline.filter_outliers_dict,
"value": float(iso.baseline.value),
"error": float(iso.baseline.error),
}
intercepts[key] = {
"fit": iso.fit,
"error_type": iso.error_type,
"filter_outliers_dict": iso.filter_outliers_dict,
"value": float(iso.value),
"error": float(iso.error),
}
blanks[key] = {
"fit": "previous",
"error_type": "",
"references": [
{"record_id": per_spec.previous_blank_runid, "exclude": False}
],
"value": float(iso.blank.value),
"error": float(iso.blank.error),
}
obj = self._make_analysis_dict()
from pychron.version import __version__ as pversion
from pychron.experiment import __version__ as eversion
from pychron.dvc import __version__ as dversion
obj["timestamp"] = timestamp.isoformat()
obj["collection_version"] = "{}:{}".format(eversion, dversion)
obj["acquisition_software"] = "pychron {}".format(pversion)
obj["data_reduction_software"] = "pychron {}".format(pversion)
obj["environmental"] = {
"lab_temperatures": per_spec.lab_temperatures,
"lab_humiditys": per_spec.lab_humiditys,
"lab_pneumatics": per_spec.lab_pneumatics,
}
obj["laboratory"] = per_spec.laboratory
obj["instrument_name"] = per_spec.instrument_name
obj["analyst_name"] = per_spec.run_spec.username
obj["whiff_result"] = per_spec.whiff_result
obj["detectors"] = dets
obj["isotopes"] = isos
obj["spec_sha"] = self._get_spectrometer_sha()
obj["intensity_scalar"] = per_spec.intensity_scalar
obj["source"] = source
obj["conditionals"] = (
[c.to_dict() for c in per_spec.conditionals]
if per_spec.conditionals
else None
)
obj["tripped_conditional"] = (
per_spec.tripped_conditional.result_dict()
if per_spec.tripped_conditional
else None
)
ms = per_spec.run_spec.mass_spectrometer
for si in (
"measurement",
"extraction",
"post_measurement",
"post_equilibration",
"hops",
):
name = getattr(per_spec, "{}_name".format(si))
blob = getattr(per_spec, "{}_blob".format(si))
if name:
self.dvc.meta_repo.update_script(ms, name, blob)
obj[si] = name
akeys = self.arar_mapping
if akeys is None:
akeys = ARAR_MAPPING
obj["arar_mapping"] = akeys
self.debug("---------------- Experiment Queue saving disabled")
self._save_macrochron(obj)
hexsha = str(self.dvc.get_meta_head())
obj["commit"] = hexsha
p = self._make_path()
dvc_dump(obj, p)
p = self._make_path(modifier="intercepts")
dvc_dump(intercepts, p)
p = self._make_path(modifier="blanks")
dvc_dump(blanks, p)
p = self._make_path(modifier="baselines")
dvc_dump(cbaselines, p)
p = self._make_path(modifier="icfactors")
dvc_dump(icfactors, p)
p = self._make_path(modifier=".data")
data = {
"commit": hexsha,
"encoding": "base64",
"format": "{}ff".format(endianness),
"signals": signals,
"baselines": baselines,
"sniffs": sniffs,
}
dvc_dump(data, p)
def _save_macrochron(self, obj):
pass
def _save_monitor(self):
if self.per_spec.monitor:
p = self._make_path(modifier="monitor")
checks = []
for ci in self.per_spec.monitor.checks:
data = encode_blob(pack(">ff", ci.data))
params = dict(
name=ci.name,
parameter=ci.parameter,
criterion=ci.criterion,
comparator=ci.comparator,
tripped=ci.tripped,
data=data,
)
checks.append(params)
dvc_dump(checks, p)
def _save_spectrometer_file(self, path):
obj = dict(
spectrometer=dict(self.per_spec.spec_dict),
gains=dict(self.per_spec.gains),
deflections=dict(self.per_spec.defl_dict),
settings=self.per_spec.settings,
)
dvc_dump(obj, path)
def _save_peak_center(self, pc):
self.info("DVC saving peakcenter")
p = self._make_path(modifier="peakcenter")
if pc:
fmt = ">ff"
obj = {
"reference_detector": pc.reference_detector.name,
"reference_isotope": pc.reference_isotope,
"fmt": fmt,
"interpolation": pc.interpolation_kind if pc.use_interpolation else "",
}
results = pc.get_results()
if results:
for result in results:
points = encode_blob(pack(fmt, result.points))
obj[result.detector] = {
"low_dac": result.low_dac,
"center_dac": result.center_dac,
"high_dac": result.high_dac,
"low_signal": result.low_signal,
"center_signal": result.center_signal,
"high_signal": result.high_signal,
"resolution": result.resolution,
"low_resolving_power": result.low_resolving_power,
"high_resolving_power": result.high_resolving_power,
"points": points,
}
dvc_dump(obj, p)
def _make_path(self, modifier=None, extension=".json"):
runid = self.per_spec.run_spec.runid
uuid = self.per_spec.run_spec.uuid
repository_identifier = self.per_spec.run_spec.repository_identifier
if self.use_uuid_path_name:
name = uuid, uuid
else:
name = runid, runid
return analysis_path(
name, repository_identifier, modifier, extension, mode="w", force_sublen=2
)
def _make_analysis_dict(self, keys=None):
if keys is None:
keys = META_ATTRS
def get(ki):
obj = self.per_spec
if not hasattr(obj, ki):
obj = self.per_spec.run_spec
try:
return getattr(obj, ki)
except AttributeError as e:
self.warning("Attribute error: attr={}, error={}".format(ki, e))
d = {k: get(k) for k in keys}
return d
def _get_spectrometer_sha(self):
return spectrometer_sha(
self.per_spec.settings,
self.per_spec.spec_dict,
self.per_spec.defl_dict,
self.per_spec.gains,
)
| true | true |
f7fdfc9633154d29d6fe5822b38b895623f50c65 | 420 | py | Python | consume.py | gonejack/pyscripts | 1c6a10b489e128de36567c384596954733c09e83 | [
"MIT"
] | null | null | null | consume.py | gonejack/pyscripts | 1c6a10b489e128de36567c384596954733c09e83 | [
"MIT"
] | null | null | null | consume.py | gonejack/pyscripts | 1c6a10b489e128de36567c384596954733c09e83 | [
"MIT"
] | null | null | null | import re
with open("/tmp/fifo") as fifo:
cache = {}
for line in fifo:
match = re.search(r"(201903\d{2}) (\d{2}:\d{2}:\d{2})", line)
if match:
date = match.group(1)
hour = match.group(2)
target = "./fixData/%s_%s" % (date, hour)
if target not in cache:
cache[target] = open(target, 'a')
cache.get(target).write(line)
| 26.25 | 69 | 0.483333 | import re
with open("/tmp/fifo") as fifo:
cache = {}
for line in fifo:
match = re.search(r"(201903\d{2}) (\d{2}:\d{2}:\d{2})", line)
if match:
date = match.group(1)
hour = match.group(2)
target = "./fixData/%s_%s" % (date, hour)
if target not in cache:
cache[target] = open(target, 'a')
cache.get(target).write(line)
| true | true |
f7fdfcd39461453f3a3073ff80cd3c20d8db35fd | 10,396 | py | Python | map_machine/ui/cli.py | Strubbl/map-machine | e2c6f8cd373bc5dba322129112cfa58874a8321b | [
"MIT"
] | 62 | 2021-09-18T02:37:03.000Z | 2022-03-21T22:58:35.000Z | map_machine/ui/cli.py | Strubbl/map-machine | e2c6f8cd373bc5dba322129112cfa58874a8321b | [
"MIT"
] | 77 | 2016-07-31T08:11:34.000Z | 2021-09-06T22:40:59.000Z | map_machine/ui/cli.py | Strubbl/map-machine | e2c6f8cd373bc5dba322129112cfa58874a8321b | [
"MIT"
] | 6 | 2021-10-13T07:27:21.000Z | 2022-02-10T03:57:29.000Z | """
Command-line user interface.
"""
import argparse
import sys
from map_machine import __version__
from map_machine.map_configuration import BuildingMode, DrawingMode, LabelMode
from map_machine.osm.osm_reader import STAGES_OF_DECAY
__author__ = "Sergey Vartanov"
__email__ = "me@enzet.ru"
BOXES: str = " ▏▎▍▌▋▊▉"
BOXES_LENGTH: int = len(BOXES)
COMMAND_LINES: dict[str, list[str]] = {
"render": ["render", "-b", "10.000,20.000,10.001,20.001"],
"render_with_tooltips": [
"render",
"-b",
"10.000,20.000,10.001,20.001",
"--tooltips",
],
"icons": ["icons"],
"mapcss": ["mapcss"],
"element": ["element", "--node", "amenity=bench,material=wood"],
"tile": ["tile", "--coordinates", "50.000,40.000"],
}
COMMANDS: list[str] = [
"render",
"server",
"tile",
"element",
"mapcss",
"icons",
"taginfo",
]
def parse_arguments(args: list[str]) -> argparse.Namespace:
"""Parse Map Machine command-line arguments."""
parser: argparse.ArgumentParser = argparse.ArgumentParser(
description="Map Machine. OpenStreetMap renderer with custom icon set"
)
parser.add_argument(
"-v",
"--version",
action="version",
version="Map Machine " + __version__,
)
subparser = parser.add_subparsers(dest="command")
render_parser = subparser.add_parser(
"render",
description="Render SVG map. Use --boundary-box to specify geo "
"boundaries, --input to specify OSM XML or JSON input file, or "
"--coordinates and --size to specify central point and resulting image "
"size.",
help="draw SVG map",
)
add_render_arguments(render_parser)
add_map_arguments(render_parser)
tile_parser = subparser.add_parser(
"tile",
description="Generate SVG and PNG 256 × 256 px tiles for slippy maps. "
"You can use server command to run server in order to display "
"generated tiles as a map (e.g. with Leaflet).",
help="generate SVG and PNG tiles for slippy maps",
)
add_tile_arguments(tile_parser)
add_map_arguments(tile_parser)
add_server_arguments(
subparser.add_parser(
"server",
description="Run in order to display generated tiles as a map "
"(e.g. with Leaflet).",
help="run tile server",
)
)
add_element_arguments(
subparser.add_parser(
"element",
description="Draw map element separately.",
help="draw OSM element: node, way, relation",
)
)
add_mapcss_arguments(
subparser.add_parser(
"mapcss",
description="Write directory with MapCSS file and generated "
"Röntgen icons.",
help="write MapCSS file",
)
)
subparser.add_parser(
"icons",
description="Generate Röntgen icons as a grid and as separate SVG "
"icons",
help="draw Röntgen icons",
)
subparser.add_parser(
"taginfo",
description="Generate JSON file for Taginfo project.",
help="write Taginfo JSON file",
)
arguments: argparse.Namespace = parser.parse_args(args[1:])
return arguments
def add_map_arguments(parser: argparse.ArgumentParser) -> None:
"""Add map-specific arguments."""
parser.add_argument(
"--buildings",
metavar="<mode>",
default="flat",
choices=(mode.value for mode in BuildingMode),
help="building drawing mode: "
+ ", ".join(mode.value for mode in BuildingMode),
)
parser.add_argument(
"--mode",
default="normal",
metavar="<string>",
choices=(mode.value for mode in DrawingMode),
help="map drawing mode: "
+ ", ".join(mode.value for mode in DrawingMode),
)
parser.add_argument(
"--overlap",
dest="overlap",
default=12,
type=int,
help="how many pixels should be left around icons and text",
metavar="<integer>",
)
parser.add_argument(
"--labels",
dest="label_mode",
default="main",
metavar="<string>",
choices=(mode.value for mode in LabelMode),
help="label drawing mode: "
+ ", ".join(mode.value for mode in LabelMode),
)
parser.add_argument(
"--level",
default="overground",
help="display only this floor level",
)
parser.add_argument(
"--seed",
default="",
help="seed for random",
metavar="<string>",
)
parser.add_argument(
"--tooltips",
help="add tooltips with tags for icons in SVG files",
action=argparse.BooleanOptionalAction,
default=False,
)
parser.add_argument(
"--country",
help="two-letter code (ISO 3166-1 alpha-2) of country, that should be "
"used for location restrictions",
default="world",
)
parser.add_argument(
"--ignore-level-matching",
help="draw all map features ignoring the current level",
action=argparse.BooleanOptionalAction,
default=False,
)
parser.add_argument(
"--roofs",
help="draw building roofs",
action=argparse.BooleanOptionalAction,
default=True,
)
def add_tile_arguments(parser: argparse.ArgumentParser) -> None:
"""Add arguments for tile command."""
parser.add_argument(
"-c",
"--coordinates",
metavar="<latitude>,<longitude>",
help="coordinates of any location inside the tile",
)
parser.add_argument(
"-t",
"--tile",
metavar="<zoom level>/<x>/<y>",
help="tile specification",
)
parser.add_argument(
"--cache",
help="path for temporary OSM files",
default="cache",
metavar="<path>",
)
parser.add_argument(
"-b",
"--boundary-box",
help="construct the minimum amount of tiles that cover the requested "
"boundary box",
metavar="<lon1>,<lat1>,<lon2>,<lat2>",
)
parser.add_argument(
"-z",
"--zoom",
type=str,
metavar="<range>",
help="OSM zoom levels; can be list of numbers or ranges, e.g. `16-18`, "
"`16,17,18`, or `16,18-20`",
default="18",
)
parser.add_argument(
"-i",
"--input",
dest="input_file_name",
metavar="<path>",
help="input OSM XML file name (if not specified, the file will be "
"downloaded using OpenStreetMap API)",
)
def add_server_arguments(parser: argparse.ArgumentParser) -> None:
"""Add arguments for server command."""
parser.add_argument(
"--cache",
help="path for temporary OSM files",
default="cache",
metavar="<path>",
)
parser.add_argument(
"--port",
help="port number",
default=8080,
type=int,
metavar="<integer>",
)
def add_element_arguments(parser: argparse.ArgumentParser) -> None:
"""Add arguments for element command."""
parser.add_argument("-n", "--node")
parser.add_argument("-w", "--way")
parser.add_argument("-r", "--relation")
def add_render_arguments(parser: argparse.ArgumentParser) -> None:
"""Add arguments for render command."""
parser.add_argument(
"-i",
"--input",
dest="input_file_names",
metavar="<path>",
nargs="*",
help="input XML file name or names (if not specified, file will be "
"downloaded using OpenStreetMap API)",
)
parser.add_argument(
"-o",
"--output",
dest="output_file_name",
metavar="<path>",
default="out/map.svg",
help="output SVG file name",
)
parser.add_argument(
"-b",
"--boundary-box",
metavar="<lon1>,<lat1>,<lon2>,<lat2>",
help="geo boundary box; if the first value is negative, enclose the "
"value with quotes and use space before `-`",
)
parser.add_argument(
"--cache",
help="path for temporary OSM files",
default="cache",
metavar="<path>",
)
parser.add_argument(
"-z",
"--zoom",
type=float,
metavar="<float>",
help="OSM zoom level",
default=18.0,
)
parser.add_argument(
"-c",
"--coordinates",
metavar="<latitude>,<longitude>",
help="coordinates of any location inside the tile",
)
parser.add_argument(
"-s",
"--size",
metavar="<width>,<height>",
help="resulted image size",
)
def add_mapcss_arguments(parser: argparse.ArgumentParser) -> None:
"""Add arguments for mapcss command."""
parser.add_argument(
"--icons",
action=argparse.BooleanOptionalAction,
default=True,
help="add icons for nodes and areas",
)
parser.add_argument(
"--ways",
action=argparse.BooleanOptionalAction,
default=False,
help="add style for ways and relations",
)
parser.add_argument(
"--lifecycle",
action=argparse.BooleanOptionalAction,
default=True,
help="add icons for lifecycle tags; be careful: this will increase the "
f"number of node and area selectors by {len(STAGES_OF_DECAY) + 1} "
f"times",
)
def progress_bar(
number: int, total: int, length: int = 20, step: int = 1000, text: str = ""
) -> None:
"""
Draw progress bar using Unicode symbols.
:param number: current value
:param total: maximum value
:param length: progress bar length.
:param step: frequency of progress bar updating (assuming that numbers go
subsequently)
:param text: short description
"""
if number == -1:
sys.stdout.write(f"100 % {length * '█'}▏{text}\n")
elif number % step == 0:
ratio: float = number / total
parts: int = int(ratio * length * BOXES_LENGTH)
fill_length: int = int(parts / BOXES_LENGTH)
box: str = BOXES[int(parts - fill_length * BOXES_LENGTH)]
sys.stdout.write(
f"{str(int(int(ratio * 1000.0) / 10.0)):>3} % "
f"{fill_length * '█'}{box}"
f"{int(length - fill_length - 1) * ' '}▏{text}\n\033[F"
)
| 28.797784 | 80 | 0.577722 | import argparse
import sys
from map_machine import __version__
from map_machine.map_configuration import BuildingMode, DrawingMode, LabelMode
from map_machine.osm.osm_reader import STAGES_OF_DECAY
__author__ = "Sergey Vartanov"
__email__ = "me@enzet.ru"
BOXES: str = " ▏▎▍▌▋▊▉"
BOXES_LENGTH: int = len(BOXES)
COMMAND_LINES: dict[str, list[str]] = {
"render": ["render", "-b", "10.000,20.000,10.001,20.001"],
"render_with_tooltips": [
"render",
"-b",
"10.000,20.000,10.001,20.001",
"--tooltips",
],
"icons": ["icons"],
"mapcss": ["mapcss"],
"element": ["element", "--node", "amenity=bench,material=wood"],
"tile": ["tile", "--coordinates", "50.000,40.000"],
}
COMMANDS: list[str] = [
"render",
"server",
"tile",
"element",
"mapcss",
"icons",
"taginfo",
]
def parse_arguments(args: list[str]) -> argparse.Namespace:
parser: argparse.ArgumentParser = argparse.ArgumentParser(
description="Map Machine. OpenStreetMap renderer with custom icon set"
)
parser.add_argument(
"-v",
"--version",
action="version",
version="Map Machine " + __version__,
)
subparser = parser.add_subparsers(dest="command")
render_parser = subparser.add_parser(
"render",
description="Render SVG map. Use --boundary-box to specify geo "
"boundaries, --input to specify OSM XML or JSON input file, or "
"--coordinates and --size to specify central point and resulting image "
"size.",
help="draw SVG map",
)
add_render_arguments(render_parser)
add_map_arguments(render_parser)
tile_parser = subparser.add_parser(
"tile",
description="Generate SVG and PNG 256 × 256 px tiles for slippy maps. "
"You can use server command to run server in order to display "
"generated tiles as a map (e.g. with Leaflet).",
help="generate SVG and PNG tiles for slippy maps",
)
add_tile_arguments(tile_parser)
add_map_arguments(tile_parser)
add_server_arguments(
subparser.add_parser(
"server",
description="Run in order to display generated tiles as a map "
"(e.g. with Leaflet).",
help="run tile server",
)
)
add_element_arguments(
subparser.add_parser(
"element",
description="Draw map element separately.",
help="draw OSM element: node, way, relation",
)
)
add_mapcss_arguments(
subparser.add_parser(
"mapcss",
description="Write directory with MapCSS file and generated "
"Röntgen icons.",
help="write MapCSS file",
)
)
subparser.add_parser(
"icons",
description="Generate Röntgen icons as a grid and as separate SVG "
"icons",
help="draw Röntgen icons",
)
subparser.add_parser(
"taginfo",
description="Generate JSON file for Taginfo project.",
help="write Taginfo JSON file",
)
arguments: argparse.Namespace = parser.parse_args(args[1:])
return arguments
def add_map_arguments(parser: argparse.ArgumentParser) -> None:
parser.add_argument(
"--buildings",
metavar="<mode>",
default="flat",
choices=(mode.value for mode in BuildingMode),
help="building drawing mode: "
+ ", ".join(mode.value for mode in BuildingMode),
)
parser.add_argument(
"--mode",
default="normal",
metavar="<string>",
choices=(mode.value for mode in DrawingMode),
help="map drawing mode: "
+ ", ".join(mode.value for mode in DrawingMode),
)
parser.add_argument(
"--overlap",
dest="overlap",
default=12,
type=int,
help="how many pixels should be left around icons and text",
metavar="<integer>",
)
parser.add_argument(
"--labels",
dest="label_mode",
default="main",
metavar="<string>",
choices=(mode.value for mode in LabelMode),
help="label drawing mode: "
+ ", ".join(mode.value for mode in LabelMode),
)
parser.add_argument(
"--level",
default="overground",
help="display only this floor level",
)
parser.add_argument(
"--seed",
default="",
help="seed for random",
metavar="<string>",
)
parser.add_argument(
"--tooltips",
help="add tooltips with tags for icons in SVG files",
action=argparse.BooleanOptionalAction,
default=False,
)
parser.add_argument(
"--country",
help="two-letter code (ISO 3166-1 alpha-2) of country, that should be "
"used for location restrictions",
default="world",
)
parser.add_argument(
"--ignore-level-matching",
help="draw all map features ignoring the current level",
action=argparse.BooleanOptionalAction,
default=False,
)
parser.add_argument(
"--roofs",
help="draw building roofs",
action=argparse.BooleanOptionalAction,
default=True,
)
def add_tile_arguments(parser: argparse.ArgumentParser) -> None:
parser.add_argument(
"-c",
"--coordinates",
metavar="<latitude>,<longitude>",
help="coordinates of any location inside the tile",
)
parser.add_argument(
"-t",
"--tile",
metavar="<zoom level>/<x>/<y>",
help="tile specification",
)
parser.add_argument(
"--cache",
help="path for temporary OSM files",
default="cache",
metavar="<path>",
)
parser.add_argument(
"-b",
"--boundary-box",
help="construct the minimum amount of tiles that cover the requested "
"boundary box",
metavar="<lon1>,<lat1>,<lon2>,<lat2>",
)
parser.add_argument(
"-z",
"--zoom",
type=str,
metavar="<range>",
help="OSM zoom levels; can be list of numbers or ranges, e.g. `16-18`, "
"`16,17,18`, or `16,18-20`",
default="18",
)
parser.add_argument(
"-i",
"--input",
dest="input_file_name",
metavar="<path>",
help="input OSM XML file name (if not specified, the file will be "
"downloaded using OpenStreetMap API)",
)
def add_server_arguments(parser: argparse.ArgumentParser) -> None:
parser.add_argument(
"--cache",
help="path for temporary OSM files",
default="cache",
metavar="<path>",
)
parser.add_argument(
"--port",
help="port number",
default=8080,
type=int,
metavar="<integer>",
)
def add_element_arguments(parser: argparse.ArgumentParser) -> None:
parser.add_argument("-n", "--node")
parser.add_argument("-w", "--way")
parser.add_argument("-r", "--relation")
def add_render_arguments(parser: argparse.ArgumentParser) -> None:
parser.add_argument(
"-i",
"--input",
dest="input_file_names",
metavar="<path>",
nargs="*",
help="input XML file name or names (if not specified, file will be "
"downloaded using OpenStreetMap API)",
)
parser.add_argument(
"-o",
"--output",
dest="output_file_name",
metavar="<path>",
default="out/map.svg",
help="output SVG file name",
)
parser.add_argument(
"-b",
"--boundary-box",
metavar="<lon1>,<lat1>,<lon2>,<lat2>",
help="geo boundary box; if the first value is negative, enclose the "
"value with quotes and use space before `-`",
)
parser.add_argument(
"--cache",
help="path for temporary OSM files",
default="cache",
metavar="<path>",
)
parser.add_argument(
"-z",
"--zoom",
type=float,
metavar="<float>",
help="OSM zoom level",
default=18.0,
)
parser.add_argument(
"-c",
"--coordinates",
metavar="<latitude>,<longitude>",
help="coordinates of any location inside the tile",
)
parser.add_argument(
"-s",
"--size",
metavar="<width>,<height>",
help="resulted image size",
)
def add_mapcss_arguments(parser: argparse.ArgumentParser) -> None:
parser.add_argument(
"--icons",
action=argparse.BooleanOptionalAction,
default=True,
help="add icons for nodes and areas",
)
parser.add_argument(
"--ways",
action=argparse.BooleanOptionalAction,
default=False,
help="add style for ways and relations",
)
parser.add_argument(
"--lifecycle",
action=argparse.BooleanOptionalAction,
default=True,
help="add icons for lifecycle tags; be careful: this will increase the "
f"number of node and area selectors by {len(STAGES_OF_DECAY) + 1} "
f"times",
)
def progress_bar(
number: int, total: int, length: int = 20, step: int = 1000, text: str = ""
) -> None:
if number == -1:
sys.stdout.write(f"100 % {length * '█'}▏{text}\n")
elif number % step == 0:
ratio: float = number / total
parts: int = int(ratio * length * BOXES_LENGTH)
fill_length: int = int(parts / BOXES_LENGTH)
box: str = BOXES[int(parts - fill_length * BOXES_LENGTH)]
sys.stdout.write(
f"{str(int(int(ratio * 1000.0) / 10.0)):>3} % "
f"{fill_length * '█'}{box}"
f"{int(length - fill_length - 1) * ' '}▏{text}\n\033[F"
)
| true | true |
f7fe002baac1c1bbc3290fe084401ccf665443da | 395 | py | Python | realworld/asgi.py | rebornweb/realworld | 2ee84a6a06bd976b795119e700ee419fba1e2966 | [
"MIT"
] | 27 | 2022-01-29T16:18:19.000Z | 2022-03-29T12:43:47.000Z | realworld/asgi.py | rebornweb/realworld | 2ee84a6a06bd976b795119e700ee419fba1e2966 | [
"MIT"
] | 1 | 2022-02-08T23:50:10.000Z | 2022-02-09T11:05:00.000Z | realworld/asgi.py | rebornweb/realworld | 2ee84a6a06bd976b795119e700ee419fba1e2966 | [
"MIT"
] | 5 | 2022-02-09T11:05:19.000Z | 2022-03-24T07:35:43.000Z | """
ASGI config for realworld project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/4.0/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'realworld.settings')
application = get_asgi_application()
| 23.235294 | 78 | 0.787342 |
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'realworld.settings')
application = get_asgi_application()
| true | true |
f7fe00a5405a1c019805694c4046c1320cb5acb3 | 33,026 | py | Python | apps/dash-web-trader/env/Lib/site-packages/plotly/validators/scatterternary/marker/__init__.py | alzo425/dash-sample-apps | d3e9f521a3bc2b8d39ed2922838ad35b9b17beb0 | [
"MIT"
] | 2 | 2019-10-23T08:14:26.000Z | 2019-10-23T08:14:27.000Z | apps/dash-web-trader/env/Lib/site-packages/plotly/validators/scatterternary/marker/__init__.py | alzo425/dash-sample-apps | d3e9f521a3bc2b8d39ed2922838ad35b9b17beb0 | [
"MIT"
] | null | null | null | apps/dash-web-trader/env/Lib/site-packages/plotly/validators/scatterternary/marker/__init__.py | alzo425/dash-sample-apps | d3e9f521a3bc2b8d39ed2922838ad35b9b17beb0 | [
"MIT"
] | 1 | 2021-02-02T02:56:39.000Z | 2021-02-02T02:56:39.000Z |
import _plotly_utils.basevalidators
class SymbolsrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(
self,
plotly_name='symbolsrc',
parent_name='scatterternary.marker',
**kwargs
):
super(SymbolsrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'none'),
role=kwargs.pop('role', 'info'),
**kwargs
)
import _plotly_utils.basevalidators
class SymbolValidator(_plotly_utils.basevalidators.EnumeratedValidator):
def __init__(
self,
plotly_name='symbol',
parent_name='scatterternary.marker',
**kwargs
):
super(SymbolValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
array_ok=kwargs.pop('array_ok', True),
edit_type=kwargs.pop('edit_type', 'style'),
role=kwargs.pop('role', 'style'),
values=kwargs.pop(
'values', [
0, 'circle', 100, 'circle-open', 200, 'circle-dot', 300,
'circle-open-dot', 1, 'square', 101, 'square-open', 201,
'square-dot', 301, 'square-open-dot', 2, 'diamond', 102,
'diamond-open', 202, 'diamond-dot', 302,
'diamond-open-dot', 3, 'cross', 103, 'cross-open', 203,
'cross-dot', 303, 'cross-open-dot', 4, 'x', 104, 'x-open',
204, 'x-dot', 304, 'x-open-dot', 5, 'triangle-up', 105,
'triangle-up-open', 205, 'triangle-up-dot', 305,
'triangle-up-open-dot', 6, 'triangle-down', 106,
'triangle-down-open', 206, 'triangle-down-dot', 306,
'triangle-down-open-dot', 7, 'triangle-left', 107,
'triangle-left-open', 207, 'triangle-left-dot', 307,
'triangle-left-open-dot', 8, 'triangle-right', 108,
'triangle-right-open', 208, 'triangle-right-dot', 308,
'triangle-right-open-dot', 9, 'triangle-ne', 109,
'triangle-ne-open', 209, 'triangle-ne-dot', 309,
'triangle-ne-open-dot', 10, 'triangle-se', 110,
'triangle-se-open', 210, 'triangle-se-dot', 310,
'triangle-se-open-dot', 11, 'triangle-sw', 111,
'triangle-sw-open', 211, 'triangle-sw-dot', 311,
'triangle-sw-open-dot', 12, 'triangle-nw', 112,
'triangle-nw-open', 212, 'triangle-nw-dot', 312,
'triangle-nw-open-dot', 13, 'pentagon', 113,
'pentagon-open', 213, 'pentagon-dot', 313,
'pentagon-open-dot', 14, 'hexagon', 114, 'hexagon-open',
214, 'hexagon-dot', 314, 'hexagon-open-dot', 15,
'hexagon2', 115, 'hexagon2-open', 215, 'hexagon2-dot', 315,
'hexagon2-open-dot', 16, 'octagon', 116, 'octagon-open',
216, 'octagon-dot', 316, 'octagon-open-dot', 17, 'star',
117, 'star-open', 217, 'star-dot', 317, 'star-open-dot',
18, 'hexagram', 118, 'hexagram-open', 218, 'hexagram-dot',
318, 'hexagram-open-dot', 19, 'star-triangle-up', 119,
'star-triangle-up-open', 219, 'star-triangle-up-dot', 319,
'star-triangle-up-open-dot', 20, 'star-triangle-down', 120,
'star-triangle-down-open', 220, 'star-triangle-down-dot',
320, 'star-triangle-down-open-dot', 21, 'star-square', 121,
'star-square-open', 221, 'star-square-dot', 321,
'star-square-open-dot', 22, 'star-diamond', 122,
'star-diamond-open', 222, 'star-diamond-dot', 322,
'star-diamond-open-dot', 23, 'diamond-tall', 123,
'diamond-tall-open', 223, 'diamond-tall-dot', 323,
'diamond-tall-open-dot', 24, 'diamond-wide', 124,
'diamond-wide-open', 224, 'diamond-wide-dot', 324,
'diamond-wide-open-dot', 25, 'hourglass', 125,
'hourglass-open', 26, 'bowtie', 126, 'bowtie-open', 27,
'circle-cross', 127, 'circle-cross-open', 28, 'circle-x',
128, 'circle-x-open', 29, 'square-cross', 129,
'square-cross-open', 30, 'square-x', 130, 'square-x-open',
31, 'diamond-cross', 131, 'diamond-cross-open', 32,
'diamond-x', 132, 'diamond-x-open', 33, 'cross-thin', 133,
'cross-thin-open', 34, 'x-thin', 134, 'x-thin-open', 35,
'asterisk', 135, 'asterisk-open', 36, 'hash', 136,
'hash-open', 236, 'hash-dot', 336, 'hash-open-dot', 37,
'y-up', 137, 'y-up-open', 38, 'y-down', 138, 'y-down-open',
39, 'y-left', 139, 'y-left-open', 40, 'y-right', 140,
'y-right-open', 41, 'line-ew', 141, 'line-ew-open', 42,
'line-ns', 142, 'line-ns-open', 43, 'line-ne', 143,
'line-ne-open', 44, 'line-nw', 144, 'line-nw-open'
]
),
**kwargs
)
import _plotly_utils.basevalidators
class SizesrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(
self,
plotly_name='sizesrc',
parent_name='scatterternary.marker',
**kwargs
):
super(SizesrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'none'),
role=kwargs.pop('role', 'info'),
**kwargs
)
import _plotly_utils.basevalidators
class SizerefValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(
self,
plotly_name='sizeref',
parent_name='scatterternary.marker',
**kwargs
):
super(SizerefValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'calc'),
role=kwargs.pop('role', 'style'),
**kwargs
)
import _plotly_utils.basevalidators
class SizemodeValidator(_plotly_utils.basevalidators.EnumeratedValidator):
def __init__(
self,
plotly_name='sizemode',
parent_name='scatterternary.marker',
**kwargs
):
super(SizemodeValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'calc'),
role=kwargs.pop('role', 'info'),
values=kwargs.pop('values', ['diameter', 'area']),
**kwargs
)
import _plotly_utils.basevalidators
class SizeminValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(
self,
plotly_name='sizemin',
parent_name='scatterternary.marker',
**kwargs
):
super(SizeminValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'calc'),
min=kwargs.pop('min', 0),
role=kwargs.pop('role', 'style'),
**kwargs
)
import _plotly_utils.basevalidators
class SizeValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(
self,
plotly_name='size',
parent_name='scatterternary.marker',
**kwargs
):
super(SizeValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
anim=kwargs.pop('anim', True),
array_ok=kwargs.pop('array_ok', True),
edit_type=kwargs.pop('edit_type', 'calc'),
min=kwargs.pop('min', 0),
role=kwargs.pop('role', 'style'),
**kwargs
)
import _plotly_utils.basevalidators
class ShowscaleValidator(_plotly_utils.basevalidators.BooleanValidator):
def __init__(
self,
plotly_name='showscale',
parent_name='scatterternary.marker',
**kwargs
):
super(ShowscaleValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'calc'),
role=kwargs.pop('role', 'info'),
**kwargs
)
import _plotly_utils.basevalidators
class ReversescaleValidator(_plotly_utils.basevalidators.BooleanValidator):
def __init__(
self,
plotly_name='reversescale',
parent_name='scatterternary.marker',
**kwargs
):
super(ReversescaleValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'plot'),
role=kwargs.pop('role', 'style'),
**kwargs
)
import _plotly_utils.basevalidators
class OpacitysrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(
self,
plotly_name='opacitysrc',
parent_name='scatterternary.marker',
**kwargs
):
super(OpacitysrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'none'),
role=kwargs.pop('role', 'info'),
**kwargs
)
import _plotly_utils.basevalidators
class OpacityValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(
self,
plotly_name='opacity',
parent_name='scatterternary.marker',
**kwargs
):
super(OpacityValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
anim=kwargs.pop('anim', True),
array_ok=kwargs.pop('array_ok', True),
edit_type=kwargs.pop('edit_type', 'style'),
max=kwargs.pop('max', 1),
min=kwargs.pop('min', 0),
role=kwargs.pop('role', 'style'),
**kwargs
)
import _plotly_utils.basevalidators
class MaxdisplayedValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(
self,
plotly_name='maxdisplayed',
parent_name='scatterternary.marker',
**kwargs
):
super(MaxdisplayedValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'plot'),
min=kwargs.pop('min', 0),
role=kwargs.pop('role', 'style'),
**kwargs
)
import _plotly_utils.basevalidators
class LineValidator(_plotly_utils.basevalidators.CompoundValidator):
def __init__(
self,
plotly_name='line',
parent_name='scatterternary.marker',
**kwargs
):
super(LineValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop('data_class_str', 'Line'),
data_docs=kwargs.pop(
'data_docs', """
autocolorscale
Determines whether the colorscale is a default
palette (`autocolorscale: true`) or the palette
determined by `marker.line.colorscale`. Has an
effect only if in `marker.line.color`is set to
a numerical array. In case `colorscale` is
unspecified or `autocolorscale` is true, the
default palette will be chosen according to
whether numbers in the `color` array are all
positive, all negative or mixed.
cauto
Determines whether or not the color domain is
computed with respect to the input data (here
in `marker.line.color`) or the bounds set in
`marker.line.cmin` and `marker.line.cmax` Has
an effect only if in `marker.line.color`is set
to a numerical array. Defaults to `false` when
`marker.line.cmin` and `marker.line.cmax` are
set by the user.
cmax
Sets the upper bound of the color domain. Has
an effect only if in `marker.line.color`is set
to a numerical array. Value should have the
same units as in `marker.line.color` and if
set, `marker.line.cmin` must be set as well.
cmid
Sets the mid-point of the color domain by
scaling `marker.line.cmin` and/or
`marker.line.cmax` to be equidistant to this
point. Has an effect only if in
`marker.line.color`is set to a numerical array.
Value should have the same units as in
`marker.line.color`. Has no effect when
`marker.line.cauto` is `false`.
cmin
Sets the lower bound of the color domain. Has
an effect only if in `marker.line.color`is set
to a numerical array. Value should have the
same units as in `marker.line.color` and if
set, `marker.line.cmax` must be set as well.
color
Sets themarker.linecolor. It accepts either a
specific color or an array of numbers that are
mapped to the colorscale relative to the max
and min values of the array or relative to
`marker.line.cmin` and `marker.line.cmax` if
set.
coloraxis
Sets a reference to a shared color axis.
References to these shared color axes are
"coloraxis", "coloraxis2", "coloraxis3", etc.
Settings for these shared color axes are set in
the layout, under `layout.coloraxis`,
`layout.coloraxis2`, etc. Note that multiple
color scales can be linked to the same color
axis.
colorscale
Sets the colorscale. Has an effect only if in
`marker.line.color`is set to a numerical array.
The colorscale must be an array containing
arrays mapping a normalized value to an rgb,
rgba, hex, hsl, hsv, or named color string. At
minimum, a mapping for the lowest (0) and
highest (1) values are required. For example,
`[[0, 'rgb(0,0,255)', [1, 'rgb(255,0,0)']]`. To
control the bounds of the colorscale in color
space, use`marker.line.cmin` and
`marker.line.cmax`. Alternatively, `colorscale`
may be a palette name string of the following
list: Greys,YlGnBu,Greens,YlOrRd,Bluered,RdBu,R
eds,Blues,Picnic,Rainbow,Portland,Jet,Hot,Black
body,Earth,Electric,Viridis,Cividis.
colorsrc
Sets the source reference on plot.ly for color
.
reversescale
Reverses the color mapping if true. Has an
effect only if in `marker.line.color`is set to
a numerical array. If true, `marker.line.cmin`
will correspond to the last color in the array
and `marker.line.cmax` will correspond to the
first color.
width
Sets the width (in px) of the lines bounding
the marker points.
widthsrc
Sets the source reference on plot.ly for width
.
"""
),
**kwargs
)
import _plotly_utils.basevalidators
class GradientValidator(_plotly_utils.basevalidators.CompoundValidator):
def __init__(
self,
plotly_name='gradient',
parent_name='scatterternary.marker',
**kwargs
):
super(GradientValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop('data_class_str', 'Gradient'),
data_docs=kwargs.pop(
'data_docs', """
color
Sets the final color of the gradient fill: the
center color for radial, the right for
horizontal, or the bottom for vertical.
colorsrc
Sets the source reference on plot.ly for color
.
type
Sets the type of gradient used to fill the
markers
typesrc
Sets the source reference on plot.ly for type
.
"""
),
**kwargs
)
import _plotly_utils.basevalidators
class ColorsrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(
self,
plotly_name='colorsrc',
parent_name='scatterternary.marker',
**kwargs
):
super(ColorsrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'none'),
role=kwargs.pop('role', 'info'),
**kwargs
)
import _plotly_utils.basevalidators
class ColorscaleValidator(_plotly_utils.basevalidators.ColorscaleValidator):
def __init__(
self,
plotly_name='colorscale',
parent_name='scatterternary.marker',
**kwargs
):
super(ColorscaleValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'calc'),
implied_edits=kwargs.pop(
'implied_edits', {'autocolorscale': False}
),
role=kwargs.pop('role', 'style'),
**kwargs
)
import _plotly_utils.basevalidators
class ColorBarValidator(_plotly_utils.basevalidators.CompoundValidator):
def __init__(
self,
plotly_name='colorbar',
parent_name='scatterternary.marker',
**kwargs
):
super(ColorBarValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop('data_class_str', 'ColorBar'),
data_docs=kwargs.pop(
'data_docs', """
bgcolor
Sets the color of padded area.
bordercolor
Sets the axis line color.
borderwidth
Sets the width (in px) or the border enclosing
this color bar.
dtick
Sets the step in-between ticks on this axis.
Use with `tick0`. Must be a positive number, or
special strings available to "log" and "date"
axes. If the axis `type` is "log", then ticks
are set every 10^(n*dtick) where n is the tick
number. For example, to set a tick mark at 1,
10, 100, 1000, ... set dtick to 1. To set tick
marks at 1, 100, 10000, ... set dtick to 2. To
set tick marks at 1, 5, 25, 125, 625, 3125, ...
set dtick to log_10(5), or 0.69897000433. "log"
has several special values; "L<f>", where `f`
is a positive number, gives ticks linearly
spaced in value (but not position). For example
`tick0` = 0.1, `dtick` = "L0.5" will put ticks
at 0.1, 0.6, 1.1, 1.6 etc. To show powers of 10
plus small digits between, use "D1" (all
digits) or "D2" (only 2 and 5). `tick0` is
ignored for "D1" and "D2". If the axis `type`
is "date", then you must convert the time to
milliseconds. For example, to set the interval
between ticks to one day, set `dtick` to
86400000.0. "date" also has special values
"M<n>" gives ticks spaced by a number of
months. `n` must be a positive integer. To set
ticks on the 15th of every third month, set
`tick0` to "2000-01-15" and `dtick` to "M3". To
set ticks every 4 years, set `dtick` to "M48"
exponentformat
Determines a formatting rule for the tick
exponents. For example, consider the number
1,000,000,000. If "none", it appears as
1,000,000,000. If "e", 1e+9. If "E", 1E+9. If
"power", 1x10^9 (with 9 in a super script). If
"SI", 1G. If "B", 1B.
len
Sets the length of the color bar This measure
excludes the padding of both ends. That is, the
color bar length is this length minus the
padding on both ends.
lenmode
Determines whether this color bar's length
(i.e. the measure in the color variation
direction) is set in units of plot "fraction"
or in *pixels. Use `len` to set the value.
nticks
Specifies the maximum number of ticks for the
particular axis. The actual number of ticks
will be chosen automatically to be less than or
equal to `nticks`. Has an effect only if
`tickmode` is set to "auto".
outlinecolor
Sets the axis line color.
outlinewidth
Sets the width (in px) of the axis line.
separatethousands
If "true", even 4-digit integers are separated
showexponent
If "all", all exponents are shown besides their
significands. If "first", only the exponent of
the first tick is shown. If "last", only the
exponent of the last tick is shown. If "none",
no exponents appear.
showticklabels
Determines whether or not the tick labels are
drawn.
showtickprefix
If "all", all tick labels are displayed with a
prefix. If "first", only the first tick is
displayed with a prefix. If "last", only the
last tick is displayed with a suffix. If
"none", tick prefixes are hidden.
showticksuffix
Same as `showtickprefix` but for tick suffixes.
thickness
Sets the thickness of the color bar This
measure excludes the size of the padding, ticks
and labels.
thicknessmode
Determines whether this color bar's thickness
(i.e. the measure in the constant color
direction) is set in units of plot "fraction"
or in "pixels". Use `thickness` to set the
value.
tick0
Sets the placement of the first tick on this
axis. Use with `dtick`. If the axis `type` is
"log", then you must take the log of your
starting tick (e.g. to set the starting tick to
100, set the `tick0` to 2) except when
`dtick`=*L<f>* (see `dtick` for more info). If
the axis `type` is "date", it should be a date
string, like date data. If the axis `type` is
"category", it should be a number, using the
scale where each category is assigned a serial
number from zero in the order it appears.
tickangle
Sets the angle of the tick labels with respect
to the horizontal. For example, a `tickangle`
of -90 draws the tick labels vertically.
tickcolor
Sets the tick color.
tickfont
Sets the color bar's tick label font
tickformat
Sets the tick label formatting rule using d3
formatting mini-languages which are very
similar to those in Python. For numbers, see: h
ttps://github.com/d3/d3-format/blob/master/READ
ME.md#locale_format And for dates see:
https://github.com/d3/d3-time-
format/blob/master/README.md#locale_format We
add one item to d3's date formatter: "%{n}f"
for fractional seconds with n digits. For
example, *2016-10-13 09:15:23.456* with
tickformat "%H~%M~%S.%2f" would display
"09~15~23.46"
tickformatstops
plotly.graph_objs.scatterternary.marker.colorba
r.Tickformatstop instance or dict with
compatible properties
tickformatstopdefaults
When used in a template (as layout.template.dat
a.scatterternary.marker.colorbar.tickformatstop
defaults), sets the default property values to
use for elements of
scatterternary.marker.colorbar.tickformatstops
ticklen
Sets the tick length (in px).
tickmode
Sets the tick mode for this axis. If "auto",
the number of ticks is set via `nticks`. If
"linear", the placement of the ticks is
determined by a starting position `tick0` and a
tick step `dtick` ("linear" is the default
value if `tick0` and `dtick` are provided). If
"array", the placement of the ticks is set via
`tickvals` and the tick text is `ticktext`.
("array" is the default value if `tickvals` is
provided).
tickprefix
Sets a tick label prefix.
ticks
Determines whether ticks are drawn or not. If
"", this axis' ticks are not drawn. If
"outside" ("inside"), this axis' are drawn
outside (inside) the axis lines.
ticksuffix
Sets a tick label suffix.
ticktext
Sets the text displayed at the ticks position
via `tickvals`. Only has an effect if
`tickmode` is set to "array". Used with
`tickvals`.
ticktextsrc
Sets the source reference on plot.ly for
ticktext .
tickvals
Sets the values at which ticks on this axis
appear. Only has an effect if `tickmode` is set
to "array". Used with `ticktext`.
tickvalssrc
Sets the source reference on plot.ly for
tickvals .
tickwidth
Sets the tick width (in px).
title
plotly.graph_objs.scatterternary.marker.colorba
r.Title instance or dict with compatible
properties
titlefont
Deprecated: Please use
scatterternary.marker.colorbar.title.font
instead. Sets this color bar's title font. Note
that the title's font used to be set by the now
deprecated `titlefont` attribute.
titleside
Deprecated: Please use
scatterternary.marker.colorbar.title.side
instead. Determines the location of color bar's
title with respect to the color bar. Note that
the title's location used to be set by the now
deprecated `titleside` attribute.
x
Sets the x position of the color bar (in plot
fraction).
xanchor
Sets this color bar's horizontal position
anchor. This anchor binds the `x` position to
the "left", "center" or "right" of the color
bar.
xpad
Sets the amount of padding (in px) along the x
direction.
y
Sets the y position of the color bar (in plot
fraction).
yanchor
Sets this color bar's vertical position anchor
This anchor binds the `y` position to the
"top", "middle" or "bottom" of the color bar.
ypad
Sets the amount of padding (in px) along the y
direction.
"""
),
**kwargs
)
import _plotly_utils.basevalidators
class ColoraxisValidator(_plotly_utils.basevalidators.SubplotidValidator):
def __init__(
self,
plotly_name='coloraxis',
parent_name='scatterternary.marker',
**kwargs
):
super(ColoraxisValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
dflt=kwargs.pop('dflt', None),
edit_type=kwargs.pop('edit_type', 'calc'),
regex=kwargs.pop('regex', '/^coloraxis([2-9]|[1-9][0-9]+)?$/'),
role=kwargs.pop('role', 'info'),
**kwargs
)
import _plotly_utils.basevalidators
class ColorValidator(_plotly_utils.basevalidators.ColorValidator):
def __init__(
self,
plotly_name='color',
parent_name='scatterternary.marker',
**kwargs
):
super(ColorValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
array_ok=kwargs.pop('array_ok', True),
edit_type=kwargs.pop('edit_type', 'style'),
role=kwargs.pop('role', 'style'),
colorscale_path=kwargs.pop(
'colorscale_path', 'scatterternary.marker.colorscale'
),
**kwargs
)
import _plotly_utils.basevalidators
class CminValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(
self,
plotly_name='cmin',
parent_name='scatterternary.marker',
**kwargs
):
super(CminValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'plot'),
implied_edits=kwargs.pop('implied_edits', {'cauto': False}),
role=kwargs.pop('role', 'info'),
**kwargs
)
import _plotly_utils.basevalidators
class CmidValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(
self,
plotly_name='cmid',
parent_name='scatterternary.marker',
**kwargs
):
super(CmidValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'calc'),
implied_edits=kwargs.pop('implied_edits', {}),
role=kwargs.pop('role', 'info'),
**kwargs
)
import _plotly_utils.basevalidators
class CmaxValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(
self,
plotly_name='cmax',
parent_name='scatterternary.marker',
**kwargs
):
super(CmaxValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'plot'),
implied_edits=kwargs.pop('implied_edits', {'cauto': False}),
role=kwargs.pop('role', 'info'),
**kwargs
)
import _plotly_utils.basevalidators
class CautoValidator(_plotly_utils.basevalidators.BooleanValidator):
def __init__(
self,
plotly_name='cauto',
parent_name='scatterternary.marker',
**kwargs
):
super(CautoValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'calc'),
implied_edits=kwargs.pop('implied_edits', {}),
role=kwargs.pop('role', 'info'),
**kwargs
)
import _plotly_utils.basevalidators
class AutocolorscaleValidator(_plotly_utils.basevalidators.BooleanValidator):
def __init__(
self,
plotly_name='autocolorscale',
parent_name='scatterternary.marker',
**kwargs
):
super(AutocolorscaleValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'calc'),
implied_edits=kwargs.pop('implied_edits', {}),
role=kwargs.pop('role', 'style'),
**kwargs
)
| 37.149606 | 79 | 0.548689 |
import _plotly_utils.basevalidators
class SymbolsrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(
self,
plotly_name='symbolsrc',
parent_name='scatterternary.marker',
**kwargs
):
super(SymbolsrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'none'),
role=kwargs.pop('role', 'info'),
**kwargs
)
import _plotly_utils.basevalidators
class SymbolValidator(_plotly_utils.basevalidators.EnumeratedValidator):
def __init__(
self,
plotly_name='symbol',
parent_name='scatterternary.marker',
**kwargs
):
super(SymbolValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
array_ok=kwargs.pop('array_ok', True),
edit_type=kwargs.pop('edit_type', 'style'),
role=kwargs.pop('role', 'style'),
values=kwargs.pop(
'values', [
0, 'circle', 100, 'circle-open', 200, 'circle-dot', 300,
'circle-open-dot', 1, 'square', 101, 'square-open', 201,
'square-dot', 301, 'square-open-dot', 2, 'diamond', 102,
'diamond-open', 202, 'diamond-dot', 302,
'diamond-open-dot', 3, 'cross', 103, 'cross-open', 203,
'cross-dot', 303, 'cross-open-dot', 4, 'x', 104, 'x-open',
204, 'x-dot', 304, 'x-open-dot', 5, 'triangle-up', 105,
'triangle-up-open', 205, 'triangle-up-dot', 305,
'triangle-up-open-dot', 6, 'triangle-down', 106,
'triangle-down-open', 206, 'triangle-down-dot', 306,
'triangle-down-open-dot', 7, 'triangle-left', 107,
'triangle-left-open', 207, 'triangle-left-dot', 307,
'triangle-left-open-dot', 8, 'triangle-right', 108,
'triangle-right-open', 208, 'triangle-right-dot', 308,
'triangle-right-open-dot', 9, 'triangle-ne', 109,
'triangle-ne-open', 209, 'triangle-ne-dot', 309,
'triangle-ne-open-dot', 10, 'triangle-se', 110,
'triangle-se-open', 210, 'triangle-se-dot', 310,
'triangle-se-open-dot', 11, 'triangle-sw', 111,
'triangle-sw-open', 211, 'triangle-sw-dot', 311,
'triangle-sw-open-dot', 12, 'triangle-nw', 112,
'triangle-nw-open', 212, 'triangle-nw-dot', 312,
'triangle-nw-open-dot', 13, 'pentagon', 113,
'pentagon-open', 213, 'pentagon-dot', 313,
'pentagon-open-dot', 14, 'hexagon', 114, 'hexagon-open',
214, 'hexagon-dot', 314, 'hexagon-open-dot', 15,
'hexagon2', 115, 'hexagon2-open', 215, 'hexagon2-dot', 315,
'hexagon2-open-dot', 16, 'octagon', 116, 'octagon-open',
216, 'octagon-dot', 316, 'octagon-open-dot', 17, 'star',
117, 'star-open', 217, 'star-dot', 317, 'star-open-dot',
18, 'hexagram', 118, 'hexagram-open', 218, 'hexagram-dot',
318, 'hexagram-open-dot', 19, 'star-triangle-up', 119,
'star-triangle-up-open', 219, 'star-triangle-up-dot', 319,
'star-triangle-up-open-dot', 20, 'star-triangle-down', 120,
'star-triangle-down-open', 220, 'star-triangle-down-dot',
320, 'star-triangle-down-open-dot', 21, 'star-square', 121,
'star-square-open', 221, 'star-square-dot', 321,
'star-square-open-dot', 22, 'star-diamond', 122,
'star-diamond-open', 222, 'star-diamond-dot', 322,
'star-diamond-open-dot', 23, 'diamond-tall', 123,
'diamond-tall-open', 223, 'diamond-tall-dot', 323,
'diamond-tall-open-dot', 24, 'diamond-wide', 124,
'diamond-wide-open', 224, 'diamond-wide-dot', 324,
'diamond-wide-open-dot', 25, 'hourglass', 125,
'hourglass-open', 26, 'bowtie', 126, 'bowtie-open', 27,
'circle-cross', 127, 'circle-cross-open', 28, 'circle-x',
128, 'circle-x-open', 29, 'square-cross', 129,
'square-cross-open', 30, 'square-x', 130, 'square-x-open',
31, 'diamond-cross', 131, 'diamond-cross-open', 32,
'diamond-x', 132, 'diamond-x-open', 33, 'cross-thin', 133,
'cross-thin-open', 34, 'x-thin', 134, 'x-thin-open', 35,
'asterisk', 135, 'asterisk-open', 36, 'hash', 136,
'hash-open', 236, 'hash-dot', 336, 'hash-open-dot', 37,
'y-up', 137, 'y-up-open', 38, 'y-down', 138, 'y-down-open',
39, 'y-left', 139, 'y-left-open', 40, 'y-right', 140,
'y-right-open', 41, 'line-ew', 141, 'line-ew-open', 42,
'line-ns', 142, 'line-ns-open', 43, 'line-ne', 143,
'line-ne-open', 44, 'line-nw', 144, 'line-nw-open'
]
),
**kwargs
)
import _plotly_utils.basevalidators
class SizesrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(
self,
plotly_name='sizesrc',
parent_name='scatterternary.marker',
**kwargs
):
super(SizesrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'none'),
role=kwargs.pop('role', 'info'),
**kwargs
)
import _plotly_utils.basevalidators
class SizerefValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(
self,
plotly_name='sizeref',
parent_name='scatterternary.marker',
**kwargs
):
super(SizerefValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'calc'),
role=kwargs.pop('role', 'style'),
**kwargs
)
import _plotly_utils.basevalidators
class SizemodeValidator(_plotly_utils.basevalidators.EnumeratedValidator):
def __init__(
self,
plotly_name='sizemode',
parent_name='scatterternary.marker',
**kwargs
):
super(SizemodeValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'calc'),
role=kwargs.pop('role', 'info'),
values=kwargs.pop('values', ['diameter', 'area']),
**kwargs
)
import _plotly_utils.basevalidators
class SizeminValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(
self,
plotly_name='sizemin',
parent_name='scatterternary.marker',
**kwargs
):
super(SizeminValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'calc'),
min=kwargs.pop('min', 0),
role=kwargs.pop('role', 'style'),
**kwargs
)
import _plotly_utils.basevalidators
class SizeValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(
self,
plotly_name='size',
parent_name='scatterternary.marker',
**kwargs
):
super(SizeValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
anim=kwargs.pop('anim', True),
array_ok=kwargs.pop('array_ok', True),
edit_type=kwargs.pop('edit_type', 'calc'),
min=kwargs.pop('min', 0),
role=kwargs.pop('role', 'style'),
**kwargs
)
import _plotly_utils.basevalidators
class ShowscaleValidator(_plotly_utils.basevalidators.BooleanValidator):
def __init__(
self,
plotly_name='showscale',
parent_name='scatterternary.marker',
**kwargs
):
super(ShowscaleValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'calc'),
role=kwargs.pop('role', 'info'),
**kwargs
)
import _plotly_utils.basevalidators
class ReversescaleValidator(_plotly_utils.basevalidators.BooleanValidator):
def __init__(
self,
plotly_name='reversescale',
parent_name='scatterternary.marker',
**kwargs
):
super(ReversescaleValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'plot'),
role=kwargs.pop('role', 'style'),
**kwargs
)
import _plotly_utils.basevalidators
class OpacitysrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(
self,
plotly_name='opacitysrc',
parent_name='scatterternary.marker',
**kwargs
):
super(OpacitysrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'none'),
role=kwargs.pop('role', 'info'),
**kwargs
)
import _plotly_utils.basevalidators
class OpacityValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(
self,
plotly_name='opacity',
parent_name='scatterternary.marker',
**kwargs
):
super(OpacityValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
anim=kwargs.pop('anim', True),
array_ok=kwargs.pop('array_ok', True),
edit_type=kwargs.pop('edit_type', 'style'),
max=kwargs.pop('max', 1),
min=kwargs.pop('min', 0),
role=kwargs.pop('role', 'style'),
**kwargs
)
import _plotly_utils.basevalidators
class MaxdisplayedValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(
self,
plotly_name='maxdisplayed',
parent_name='scatterternary.marker',
**kwargs
):
super(MaxdisplayedValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'plot'),
min=kwargs.pop('min', 0),
role=kwargs.pop('role', 'style'),
**kwargs
)
import _plotly_utils.basevalidators
class LineValidator(_plotly_utils.basevalidators.CompoundValidator):
def __init__(
self,
plotly_name='line',
parent_name='scatterternary.marker',
**kwargs
):
super(LineValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop('data_class_str', 'Line'),
data_docs=kwargs.pop(
'data_docs', """
autocolorscale
Determines whether the colorscale is a default
palette (`autocolorscale: true`) or the palette
determined by `marker.line.colorscale`. Has an
effect only if in `marker.line.color`is set to
a numerical array. In case `colorscale` is
unspecified or `autocolorscale` is true, the
default palette will be chosen according to
whether numbers in the `color` array are all
positive, all negative or mixed.
cauto
Determines whether or not the color domain is
computed with respect to the input data (here
in `marker.line.color`) or the bounds set in
`marker.line.cmin` and `marker.line.cmax` Has
an effect only if in `marker.line.color`is set
to a numerical array. Defaults to `false` when
`marker.line.cmin` and `marker.line.cmax` are
set by the user.
cmax
Sets the upper bound of the color domain. Has
an effect only if in `marker.line.color`is set
to a numerical array. Value should have the
same units as in `marker.line.color` and if
set, `marker.line.cmin` must be set as well.
cmid
Sets the mid-point of the color domain by
scaling `marker.line.cmin` and/or
`marker.line.cmax` to be equidistant to this
point. Has an effect only if in
`marker.line.color`is set to a numerical array.
Value should have the same units as in
`marker.line.color`. Has no effect when
`marker.line.cauto` is `false`.
cmin
Sets the lower bound of the color domain. Has
an effect only if in `marker.line.color`is set
to a numerical array. Value should have the
same units as in `marker.line.color` and if
set, `marker.line.cmax` must be set as well.
color
Sets themarker.linecolor. It accepts either a
specific color or an array of numbers that are
mapped to the colorscale relative to the max
and min values of the array or relative to
`marker.line.cmin` and `marker.line.cmax` if
set.
coloraxis
Sets a reference to a shared color axis.
References to these shared color axes are
"coloraxis", "coloraxis2", "coloraxis3", etc.
Settings for these shared color axes are set in
the layout, under `layout.coloraxis`,
`layout.coloraxis2`, etc. Note that multiple
color scales can be linked to the same color
axis.
colorscale
Sets the colorscale. Has an effect only if in
`marker.line.color`is set to a numerical array.
The colorscale must be an array containing
arrays mapping a normalized value to an rgb,
rgba, hex, hsl, hsv, or named color string. At
minimum, a mapping for the lowest (0) and
highest (1) values are required. For example,
`[[0, 'rgb(0,0,255)', [1, 'rgb(255,0,0)']]`. To
control the bounds of the colorscale in color
space, use`marker.line.cmin` and
`marker.line.cmax`. Alternatively, `colorscale`
may be a palette name string of the following
list: Greys,YlGnBu,Greens,YlOrRd,Bluered,RdBu,R
eds,Blues,Picnic,Rainbow,Portland,Jet,Hot,Black
body,Earth,Electric,Viridis,Cividis.
colorsrc
Sets the source reference on plot.ly for color
.
reversescale
Reverses the color mapping if true. Has an
effect only if in `marker.line.color`is set to
a numerical array. If true, `marker.line.cmin`
will correspond to the last color in the array
and `marker.line.cmax` will correspond to the
first color.
width
Sets the width (in px) of the lines bounding
the marker points.
widthsrc
Sets the source reference on plot.ly for width
.
"""
),
**kwargs
)
import _plotly_utils.basevalidators
class GradientValidator(_plotly_utils.basevalidators.CompoundValidator):
def __init__(
self,
plotly_name='gradient',
parent_name='scatterternary.marker',
**kwargs
):
super(GradientValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop('data_class_str', 'Gradient'),
data_docs=kwargs.pop(
'data_docs', """
color
Sets the final color of the gradient fill: the
center color for radial, the right for
horizontal, or the bottom for vertical.
colorsrc
Sets the source reference on plot.ly for color
.
type
Sets the type of gradient used to fill the
markers
typesrc
Sets the source reference on plot.ly for type
.
"""
),
**kwargs
)
import _plotly_utils.basevalidators
class ColorsrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(
self,
plotly_name='colorsrc',
parent_name='scatterternary.marker',
**kwargs
):
super(ColorsrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'none'),
role=kwargs.pop('role', 'info'),
**kwargs
)
import _plotly_utils.basevalidators
class ColorscaleValidator(_plotly_utils.basevalidators.ColorscaleValidator):
def __init__(
self,
plotly_name='colorscale',
parent_name='scatterternary.marker',
**kwargs
):
super(ColorscaleValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'calc'),
implied_edits=kwargs.pop(
'implied_edits', {'autocolorscale': False}
),
role=kwargs.pop('role', 'style'),
**kwargs
)
import _plotly_utils.basevalidators
class ColorBarValidator(_plotly_utils.basevalidators.CompoundValidator):
def __init__(
self,
plotly_name='colorbar',
parent_name='scatterternary.marker',
**kwargs
):
super(ColorBarValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop('data_class_str', 'ColorBar'),
data_docs=kwargs.pop(
'data_docs', """
bgcolor
Sets the color of padded area.
bordercolor
Sets the axis line color.
borderwidth
Sets the width (in px) or the border enclosing
this color bar.
dtick
Sets the step in-between ticks on this axis.
Use with `tick0`. Must be a positive number, or
special strings available to "log" and "date"
axes. If the axis `type` is "log", then ticks
are set every 10^(n*dtick) where n is the tick
number. For example, to set a tick mark at 1,
10, 100, 1000, ... set dtick to 1. To set tick
marks at 1, 100, 10000, ... set dtick to 2. To
set tick marks at 1, 5, 25, 125, 625, 3125, ...
set dtick to log_10(5), or 0.69897000433. "log"
has several special values; "L<f>", where `f`
is a positive number, gives ticks linearly
spaced in value (but not position). For example
`tick0` = 0.1, `dtick` = "L0.5" will put ticks
at 0.1, 0.6, 1.1, 1.6 etc. To show powers of 10
plus small digits between, use "D1" (all
digits) or "D2" (only 2 and 5). `tick0` is
ignored for "D1" and "D2". If the axis `type`
is "date", then you must convert the time to
milliseconds. For example, to set the interval
between ticks to one day, set `dtick` to
86400000.0. "date" also has special values
"M<n>" gives ticks spaced by a number of
months. `n` must be a positive integer. To set
ticks on the 15th of every third month, set
`tick0` to "2000-01-15" and `dtick` to "M3". To
set ticks every 4 years, set `dtick` to "M48"
exponentformat
Determines a formatting rule for the tick
exponents. For example, consider the number
1,000,000,000. If "none", it appears as
1,000,000,000. If "e", 1e+9. If "E", 1E+9. If
"power", 1x10^9 (with 9 in a super script). If
"SI", 1G. If "B", 1B.
len
Sets the length of the color bar This measure
excludes the padding of both ends. That is, the
color bar length is this length minus the
padding on both ends.
lenmode
Determines whether this color bar's length
(i.e. the measure in the color variation
direction) is set in units of plot "fraction"
or in *pixels. Use `len` to set the value.
nticks
Specifies the maximum number of ticks for the
particular axis. The actual number of ticks
will be chosen automatically to be less than or
equal to `nticks`. Has an effect only if
`tickmode` is set to "auto".
outlinecolor
Sets the axis line color.
outlinewidth
Sets the width (in px) of the axis line.
separatethousands
If "true", even 4-digit integers are separated
showexponent
If "all", all exponents are shown besides their
significands. If "first", only the exponent of
the first tick is shown. If "last", only the
exponent of the last tick is shown. If "none",
no exponents appear.
showticklabels
Determines whether or not the tick labels are
drawn.
showtickprefix
If "all", all tick labels are displayed with a
prefix. If "first", only the first tick is
displayed with a prefix. If "last", only the
last tick is displayed with a suffix. If
"none", tick prefixes are hidden.
showticksuffix
Same as `showtickprefix` but for tick suffixes.
thickness
Sets the thickness of the color bar This
measure excludes the size of the padding, ticks
and labels.
thicknessmode
Determines whether this color bar's thickness
(i.e. the measure in the constant color
direction) is set in units of plot "fraction"
or in "pixels". Use `thickness` to set the
value.
tick0
Sets the placement of the first tick on this
axis. Use with `dtick`. If the axis `type` is
"log", then you must take the log of your
starting tick (e.g. to set the starting tick to
100, set the `tick0` to 2) except when
`dtick`=*L<f>* (see `dtick` for more info). If
the axis `type` is "date", it should be a date
string, like date data. If the axis `type` is
"category", it should be a number, using the
scale where each category is assigned a serial
number from zero in the order it appears.
tickangle
Sets the angle of the tick labels with respect
to the horizontal. For example, a `tickangle`
of -90 draws the tick labels vertically.
tickcolor
Sets the tick color.
tickfont
Sets the color bar's tick label font
tickformat
Sets the tick label formatting rule using d3
formatting mini-languages which are very
similar to those in Python. For numbers, see: h
ttps://github.com/d3/d3-format/blob/master/READ
ME.md#locale_format And for dates see:
https://github.com/d3/d3-time-
format/blob/master/README.md#locale_format We
add one item to d3's date formatter: "%{n}f"
for fractional seconds with n digits. For
example, *2016-10-13 09:15:23.456* with
tickformat "%H~%M~%S.%2f" would display
"09~15~23.46"
tickformatstops
plotly.graph_objs.scatterternary.marker.colorba
r.Tickformatstop instance or dict with
compatible properties
tickformatstopdefaults
When used in a template (as layout.template.dat
a.scatterternary.marker.colorbar.tickformatstop
defaults), sets the default property values to
use for elements of
scatterternary.marker.colorbar.tickformatstops
ticklen
Sets the tick length (in px).
tickmode
Sets the tick mode for this axis. If "auto",
the number of ticks is set via `nticks`. If
"linear", the placement of the ticks is
determined by a starting position `tick0` and a
tick step `dtick` ("linear" is the default
value if `tick0` and `dtick` are provided). If
"array", the placement of the ticks is set via
`tickvals` and the tick text is `ticktext`.
("array" is the default value if `tickvals` is
provided).
tickprefix
Sets a tick label prefix.
ticks
Determines whether ticks are drawn or not. If
"", this axis' ticks are not drawn. If
"outside" ("inside"), this axis' are drawn
outside (inside) the axis lines.
ticksuffix
Sets a tick label suffix.
ticktext
Sets the text displayed at the ticks position
via `tickvals`. Only has an effect if
`tickmode` is set to "array". Used with
`tickvals`.
ticktextsrc
Sets the source reference on plot.ly for
ticktext .
tickvals
Sets the values at which ticks on this axis
appear. Only has an effect if `tickmode` is set
to "array". Used with `ticktext`.
tickvalssrc
Sets the source reference on plot.ly for
tickvals .
tickwidth
Sets the tick width (in px).
title
plotly.graph_objs.scatterternary.marker.colorba
r.Title instance or dict with compatible
properties
titlefont
Deprecated: Please use
scatterternary.marker.colorbar.title.font
instead. Sets this color bar's title font. Note
that the title's font used to be set by the now
deprecated `titlefont` attribute.
titleside
Deprecated: Please use
scatterternary.marker.colorbar.title.side
instead. Determines the location of color bar's
title with respect to the color bar. Note that
the title's location used to be set by the now
deprecated `titleside` attribute.
x
Sets the x position of the color bar (in plot
fraction).
xanchor
Sets this color bar's horizontal position
anchor. This anchor binds the `x` position to
the "left", "center" or "right" of the color
bar.
xpad
Sets the amount of padding (in px) along the x
direction.
y
Sets the y position of the color bar (in plot
fraction).
yanchor
Sets this color bar's vertical position anchor
This anchor binds the `y` position to the
"top", "middle" or "bottom" of the color bar.
ypad
Sets the amount of padding (in px) along the y
direction.
"""
),
**kwargs
)
import _plotly_utils.basevalidators
class ColoraxisValidator(_plotly_utils.basevalidators.SubplotidValidator):
def __init__(
self,
plotly_name='coloraxis',
parent_name='scatterternary.marker',
**kwargs
):
super(ColoraxisValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
dflt=kwargs.pop('dflt', None),
edit_type=kwargs.pop('edit_type', 'calc'),
regex=kwargs.pop('regex', '/^coloraxis([2-9]|[1-9][0-9]+)?$/'),
role=kwargs.pop('role', 'info'),
**kwargs
)
import _plotly_utils.basevalidators
class ColorValidator(_plotly_utils.basevalidators.ColorValidator):
def __init__(
self,
plotly_name='color',
parent_name='scatterternary.marker',
**kwargs
):
super(ColorValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
array_ok=kwargs.pop('array_ok', True),
edit_type=kwargs.pop('edit_type', 'style'),
role=kwargs.pop('role', 'style'),
colorscale_path=kwargs.pop(
'colorscale_path', 'scatterternary.marker.colorscale'
),
**kwargs
)
import _plotly_utils.basevalidators
class CminValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(
self,
plotly_name='cmin',
parent_name='scatterternary.marker',
**kwargs
):
super(CminValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'plot'),
implied_edits=kwargs.pop('implied_edits', {'cauto': False}),
role=kwargs.pop('role', 'info'),
**kwargs
)
import _plotly_utils.basevalidators
class CmidValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(
self,
plotly_name='cmid',
parent_name='scatterternary.marker',
**kwargs
):
super(CmidValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'calc'),
implied_edits=kwargs.pop('implied_edits', {}),
role=kwargs.pop('role', 'info'),
**kwargs
)
import _plotly_utils.basevalidators
class CmaxValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(
self,
plotly_name='cmax',
parent_name='scatterternary.marker',
**kwargs
):
super(CmaxValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'plot'),
implied_edits=kwargs.pop('implied_edits', {'cauto': False}),
role=kwargs.pop('role', 'info'),
**kwargs
)
import _plotly_utils.basevalidators
class CautoValidator(_plotly_utils.basevalidators.BooleanValidator):
def __init__(
self,
plotly_name='cauto',
parent_name='scatterternary.marker',
**kwargs
):
super(CautoValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'calc'),
implied_edits=kwargs.pop('implied_edits', {}),
role=kwargs.pop('role', 'info'),
**kwargs
)
import _plotly_utils.basevalidators
class AutocolorscaleValidator(_plotly_utils.basevalidators.BooleanValidator):
def __init__(
self,
plotly_name='autocolorscale',
parent_name='scatterternary.marker',
**kwargs
):
super(AutocolorscaleValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'calc'),
implied_edits=kwargs.pop('implied_edits', {}),
role=kwargs.pop('role', 'style'),
**kwargs
)
| true | true |
f7fe017c83c501bcc0aa1472abfbdea6955896c5 | 208 | py | Python | Kattis (Tae Ho Kim)/Triple Texting/Answer.py | kim4t/Kattis | 67a0aef92d65f5f9294e9a3338ef68e38e697c1c | [
"Unlicense"
] | null | null | null | Kattis (Tae Ho Kim)/Triple Texting/Answer.py | kim4t/Kattis | 67a0aef92d65f5f9294e9a3338ef68e38e697c1c | [
"Unlicense"
] | null | null | null | Kattis (Tae Ho Kim)/Triple Texting/Answer.py | kim4t/Kattis | 67a0aef92d65f5f9294e9a3338ef68e38e697c1c | [
"Unlicense"
] | null | null | null | s = input()
list = list(s)
l = len(list)//3
w1=''
w2=''
w3=''
for i in range(len(list)):
if(i<l): w1+=list[i]
elif(l<=i<2*l): w2+=list[i]
else: w3+=list[i]
if(w1==w2):
print(w1)
else:print(w3) | 16 | 31 | 0.524038 | s = input()
list = list(s)
l = len(list)//3
w1=''
w2=''
w3=''
for i in range(len(list)):
if(i<l): w1+=list[i]
elif(l<=i<2*l): w2+=list[i]
else: w3+=list[i]
if(w1==w2):
print(w1)
else:print(w3) | true | true |
f7fe019bf105ac77a2febfa8d277d5397ef387fd | 3,078 | py | Python | artsubj-from-npimport.py | rwst/wikidata-molbio | 198587fda16f81cf241c398650f79594c07cbdee | [
"CC0-1.0"
] | 2 | 2021-05-16T09:42:59.000Z | 2022-03-14T11:17:15.000Z | artsubj-from-npimport.py | rwst/wikidata-molbio | 198587fda16f81cf241c398650f79594c07cbdee | [
"CC0-1.0"
] | null | null | null | artsubj-from-npimport.py | rwst/wikidata-molbio | 198587fda16f81cf241c398650f79594c07cbdee | [
"CC0-1.0"
] | 1 | 2021-05-16T09:54:14.000Z | 2021-05-16T09:54:14.000Z |
import os, json, argparse, sys, datetime, time, csv
"""
"""
# Initiate the parser
parser = argparse.ArgumentParser()
parser.add_argument("-q", "--query", help="perform SPARQL query",
action="store_true")
# Read arguments from the command line
args = parser.parse_args()
# Check for --version or -V
dontquery = not args.query
script = os.path.basename(sys.argv[0])[:-3]
if dontquery is False:
print('performing query...')
ret = os.popen('wd sparql {}.rq >{}.json'.format(script, script))
if ret.close() is not None:
raise
file = open('{}.json'.format(script))
s = file.read()
jol = json.loads(s)
taxa = {} # compound --> taxon --> ref
subjs = {} # ref --> compound
for d in jol:
comp = d.get('comp')
taxon = d.get('taxon')
ref = d.get('ref')
subj = d.get('ingr')
t = taxa.get(comp)
if t is None:
taxa[comp] = { taxon : set([ref]) }
else:
tt = t.get(taxon)
if tt is None:
t[taxon] = set([ref])
else:
tt.add(ref)
a = subjs.get(ref)
if a is None:
subjs[ref] = set([subj])
else:
a.add(subj)
print("number of references used: {}".format(len(subjs.keys())))
ntsubjs = {}
ncsubjs = {}
for comp in taxa.keys():
t = taxa.get(comp)
for taxon,refset in t.items():
for ref in refset:
s = subjs.get(ref)
if s is None or (s is not None and taxon not in s):
r = ntsubjs.get(ref)
if r is None:
ntsubjs[ref] = set([taxon])
else:
r.add(taxon)
if s is None or (s is not None and comp not in s):
r = ncsubjs.get(ref)
if r is None:
ncsubjs[ref] = set([comp])
else:
r.add(comp)
print("number of references with new taxon subject: {}".format(len(ntsubjs.keys())))
print("number of references with new compound subject: {}".format(len(ncsubjs.keys())))
allrefs = set(ntsubjs.keys()).union(set(ncsubjs.keys()))
print("number of references to change: {}".format(len(allrefs)))
print("don't change: {}".format(set(subjs.keys()).difference(allrefs)))
maxtclaims = 0
maxcclaims = 0
for ref in allrefs:
claims = []
if ref in ntsubjs.keys():
for taxon in ntsubjs.get(ref):
claim = { "value": taxon, "references": { "P248": "Q104225190"} }
claims.append(claim)
maxtclaims = max(maxtclaims, len(ntsubjs.get(ref)))
if ref in ncsubjs.keys():
for comp in ncsubjs.get(ref):
claim = { "value": comp, "references": { "P248": "Q104225190"} }
claims.append(claim)
# if len(ncsubjs.get(ref)) > 500:
# print("xxx {} {}".format(len(ncsubjs.get(ref)), ref))
maxcclaims = max(maxcclaims, len(ncsubjs.get(ref)))
j = {"id": ref, "claims": { "P921" : claims } }
print(json.dumps(j), flush=True)
print("max taxa added per reference: {}".format(maxtclaims))
print("max compounds added per reference: {}".format(maxcclaims))
| 30.176471 | 87 | 0.561079 |
import os, json, argparse, sys, datetime, time, csv
parser = argparse.ArgumentParser()
parser.add_argument("-q", "--query", help="perform SPARQL query",
action="store_true")
args = parser.parse_args()
dontquery = not args.query
script = os.path.basename(sys.argv[0])[:-3]
if dontquery is False:
print('performing query...')
ret = os.popen('wd sparql {}.rq >{}.json'.format(script, script))
if ret.close() is not None:
raise
file = open('{}.json'.format(script))
s = file.read()
jol = json.loads(s)
taxa = {}
subjs = {}
for d in jol:
comp = d.get('comp')
taxon = d.get('taxon')
ref = d.get('ref')
subj = d.get('ingr')
t = taxa.get(comp)
if t is None:
taxa[comp] = { taxon : set([ref]) }
else:
tt = t.get(taxon)
if tt is None:
t[taxon] = set([ref])
else:
tt.add(ref)
a = subjs.get(ref)
if a is None:
subjs[ref] = set([subj])
else:
a.add(subj)
print("number of references used: {}".format(len(subjs.keys())))
ntsubjs = {}
ncsubjs = {}
for comp in taxa.keys():
t = taxa.get(comp)
for taxon,refset in t.items():
for ref in refset:
s = subjs.get(ref)
if s is None or (s is not None and taxon not in s):
r = ntsubjs.get(ref)
if r is None:
ntsubjs[ref] = set([taxon])
else:
r.add(taxon)
if s is None or (s is not None and comp not in s):
r = ncsubjs.get(ref)
if r is None:
ncsubjs[ref] = set([comp])
else:
r.add(comp)
print("number of references with new taxon subject: {}".format(len(ntsubjs.keys())))
print("number of references with new compound subject: {}".format(len(ncsubjs.keys())))
allrefs = set(ntsubjs.keys()).union(set(ncsubjs.keys()))
print("number of references to change: {}".format(len(allrefs)))
print("don't change: {}".format(set(subjs.keys()).difference(allrefs)))
maxtclaims = 0
maxcclaims = 0
for ref in allrefs:
claims = []
if ref in ntsubjs.keys():
for taxon in ntsubjs.get(ref):
claim = { "value": taxon, "references": { "P248": "Q104225190"} }
claims.append(claim)
maxtclaims = max(maxtclaims, len(ntsubjs.get(ref)))
if ref in ncsubjs.keys():
for comp in ncsubjs.get(ref):
claim = { "value": comp, "references": { "P248": "Q104225190"} }
claims.append(claim)
# if len(ncsubjs.get(ref)) > 500:
# print("xxx {} {}".format(len(ncsubjs.get(ref)), ref))
maxcclaims = max(maxcclaims, len(ncsubjs.get(ref)))
j = {"id": ref, "claims": { "P921" : claims } }
print(json.dumps(j), flush=True)
print("max taxa added per reference: {}".format(maxtclaims))
print("max compounds added per reference: {}".format(maxcclaims))
| true | true |
f7fe0265d9ab1b2a5fbea6dd4235ff9c7fa6cbe2 | 217 | py | Python | webempresa/services/admin.py | JulioAlbertoTum/web-emp-django2 | f4ee48885f5f0166d3620c27569f7cbcaf997561 | [
"MIT"
] | null | null | null | webempresa/services/admin.py | JulioAlbertoTum/web-emp-django2 | f4ee48885f5f0166d3620c27569f7cbcaf997561 | [
"MIT"
] | 7 | 2020-06-05T22:07:06.000Z | 2022-03-11T23:54:48.000Z | webempresa/services/admin.py | JulioAlbertoTum/web-emp-django2 | f4ee48885f5f0166d3620c27569f7cbcaf997561 | [
"MIT"
] | null | null | null | from django.contrib import admin
from .models import Service
# Register your models here.
class ServiceAdmin(admin.ModelAdmin):
readonly_fields = ('created', 'updated')
admin.site.register(Service, ServiceAdmin) | 27.125 | 44 | 0.78341 | from django.contrib import admin
from .models import Service
class ServiceAdmin(admin.ModelAdmin):
readonly_fields = ('created', 'updated')
admin.site.register(Service, ServiceAdmin) | true | true |
f7fe03cb2684776bd3e75f9a0d36782abe353d52 | 2,716 | py | Python | rbw/utils/ffmpeg.py | CNCLgithub/rigid_body_world | e3bf8bf062d38a3fb00c8b9f36a866593769e9cb | [
"MIT"
] | null | null | null | rbw/utils/ffmpeg.py | CNCLgithub/rigid_body_world | e3bf8bf062d38a3fb00c8b9f36a866593769e9cb | [
"MIT"
] | null | null | null | rbw/utils/ffmpeg.py | CNCLgithub/rigid_body_world | e3bf8bf062d38a3fb00c8b9f36a866593769e9cb | [
"MIT"
] | 1 | 2021-10-02T18:09:51.000Z | 2021-10-02T18:09:51.000Z | import os
import shlex
import subprocess
from pprint import pprint
def continous(source, out, start, vframes, fps):
cmd = ('ffmpeg -y -start_number {0:d} -framerate {2:d} -i {1!s} -hide_banner -crf 5 '+ \
'-preset slow -c:v libx264 -pix_fmt yuv420p')
cmd = cmd.format(start, source, fps)
if not vframes is None:
cmd += ' -vframes {0:d}'.format(vframes)
cmd += ' ' + out
return cmd
def extend(source, out, dur, fps):
cmd = ('ffmpeg -y -i {0!s} -vf tpad=stop_mode=clone:' + \
'stop_duration={1:f} {2!s}').format(source, dur, out)
return cmd
def concat(b, out, a, reverse = False):
cmd = 'ffmpeg -y -f concat -safe 0 -i ' +\
'<(for f in \'{0!s}\' \'{1!s}\'; do echo \"file \'$f\'\"; done) ' + \
'-c copy {2!s}'
if reverse:
cmd = cmd.format(b, a, out)
else:
cmd = cmd.format(a, b, out)
return cmd
def blank(src, out, dur, fps):
cmd = 'ffmpeg -y -f lavfi -r {0:d} -i color=white:600x400:d={1:f} -pix_fmt yuv420p {2!s}'
cmd = cmd.format(fps, dur, out)
return cmd
def chain(cmds, args, source, out, suffix):
out_p = out + '_' + suffix + '{0:d}.mp4'
src = source
cmd_chain = []
to_remove = []
for i,(cmd,arg) in enumerate(zip(cmds, args)):
out_i = out_p.format(i)
cmd_chain.append(cmd(src, out_i, *arg))
to_remove.append(out_i)
src = out_i
cmd_chain.append('mv {0!s} {1!s}.mp4'.format(to_remove[-1], out))
cmd_chain.append(('rm ' + ' '.join(to_remove[:-1])))
return cmd_chain
def pause(source, vframes, fps, loc, dur, out):
out1 = out + '_p1'
cmd = chain([continous, extend],
[(1, loc, fps), (dur, fps)],
source, out1, 'a')
cmd += chain([continous, concat],
[(loc, vframes, fps), (out1+'.mp4', False)],
source, out, 'b')
cmd.append('rm ' + out1+'.mp4')
return cmd
def stimuli(a, b, fps, dur, out):
d = os.path.dirname(__file__)
src = os.path.join(d, 'white_600_400.png')
cmds = chain([blank, concat, concat],
[(dur, fps), (a, False), (b, True)],
src, out, 'e')
return cmds
def run_cmd(cmds):
for cmd in cmds:
print(cmd)
subprocess.run(cmd, check=False, shell = True, executable='/bin/bash' )
def continuous_movie(source, out, fps = 60, vframes = None):
cmd = continous(source, out + '.mp4', 0, vframes, fps)
run_cmd([cmd])
def paused_movie(source, out, fps = 60, loc = 20, dur = 0.5):
cmd = pause(source, None, fps, loc, dur, out)
run_cmd(cmd)
def stimuli_movie(a, b, out, fps = 60, dur = 0.5):
cmds = stimuli(a, b, fps, dur, out)
run_cmd(cmds)
| 31.581395 | 93 | 0.554492 | import os
import shlex
import subprocess
from pprint import pprint
def continous(source, out, start, vframes, fps):
cmd = ('ffmpeg -y -start_number {0:d} -framerate {2:d} -i {1!s} -hide_banner -crf 5 '+ \
'-preset slow -c:v libx264 -pix_fmt yuv420p')
cmd = cmd.format(start, source, fps)
if not vframes is None:
cmd += ' -vframes {0:d}'.format(vframes)
cmd += ' ' + out
return cmd
def extend(source, out, dur, fps):
cmd = ('ffmpeg -y -i {0!s} -vf tpad=stop_mode=clone:' + \
'stop_duration={1:f} {2!s}').format(source, dur, out)
return cmd
def concat(b, out, a, reverse = False):
cmd = 'ffmpeg -y -f concat -safe 0 -i ' +\
'<(for f in \'{0!s}\' \'{1!s}\'; do echo \"file \'$f\'\"; done) ' + \
'-c copy {2!s}'
if reverse:
cmd = cmd.format(b, a, out)
else:
cmd = cmd.format(a, b, out)
return cmd
def blank(src, out, dur, fps):
cmd = 'ffmpeg -y -f lavfi -r {0:d} -i color=white:600x400:d={1:f} -pix_fmt yuv420p {2!s}'
cmd = cmd.format(fps, dur, out)
return cmd
def chain(cmds, args, source, out, suffix):
out_p = out + '_' + suffix + '{0:d}.mp4'
src = source
cmd_chain = []
to_remove = []
for i,(cmd,arg) in enumerate(zip(cmds, args)):
out_i = out_p.format(i)
cmd_chain.append(cmd(src, out_i, *arg))
to_remove.append(out_i)
src = out_i
cmd_chain.append('mv {0!s} {1!s}.mp4'.format(to_remove[-1], out))
cmd_chain.append(('rm ' + ' '.join(to_remove[:-1])))
return cmd_chain
def pause(source, vframes, fps, loc, dur, out):
out1 = out + '_p1'
cmd = chain([continous, extend],
[(1, loc, fps), (dur, fps)],
source, out1, 'a')
cmd += chain([continous, concat],
[(loc, vframes, fps), (out1+'.mp4', False)],
source, out, 'b')
cmd.append('rm ' + out1+'.mp4')
return cmd
def stimuli(a, b, fps, dur, out):
d = os.path.dirname(__file__)
src = os.path.join(d, 'white_600_400.png')
cmds = chain([blank, concat, concat],
[(dur, fps), (a, False), (b, True)],
src, out, 'e')
return cmds
def run_cmd(cmds):
for cmd in cmds:
print(cmd)
subprocess.run(cmd, check=False, shell = True, executable='/bin/bash' )
def continuous_movie(source, out, fps = 60, vframes = None):
cmd = continous(source, out + '.mp4', 0, vframes, fps)
run_cmd([cmd])
def paused_movie(source, out, fps = 60, loc = 20, dur = 0.5):
cmd = pause(source, None, fps, loc, dur, out)
run_cmd(cmd)
def stimuli_movie(a, b, out, fps = 60, dur = 0.5):
cmds = stimuli(a, b, fps, dur, out)
run_cmd(cmds)
| true | true |
f7fe061ae5402f8739f69ff13a598a9c6934f9f7 | 375 | py | Python | Appointment/migrations/0004_auto_20210304_2051.py | CiganOliviu/InfiniteShoot | 14f7fb21e360e3c58876d82ebbe206054c72958e | [
"MIT"
] | 1 | 2021-04-02T16:45:37.000Z | 2021-04-02T16:45:37.000Z | Appointment/migrations/0004_auto_20210304_2051.py | CiganOliviu/InfiniteShoot-1 | 6322ae34f88caaffc1de29dfa4f6d86d175810a7 | [
"Apache-2.0"
] | null | null | null | Appointment/migrations/0004_auto_20210304_2051.py | CiganOliviu/InfiniteShoot-1 | 6322ae34f88caaffc1de29dfa4f6d86d175810a7 | [
"Apache-2.0"
] | null | null | null | # Generated by Django 3.1.7 on 2021-03-04 18:51
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('Appointment', '0003_adminresponse'),
]
operations = [
migrations.RenameField(
model_name='appointment',
old_name='calendar',
new_name='desired_date',
),
]
| 19.736842 | 47 | 0.597333 |
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('Appointment', '0003_adminresponse'),
]
operations = [
migrations.RenameField(
model_name='appointment',
old_name='calendar',
new_name='desired_date',
),
]
| true | true |
f7fe09025329dea7a8f4983ed00b94e14ffabaa4 | 130 | py | Python | brute/__main__.py | noa/brute | 2c016c59b7748bd4b3a800488efdec8a45a33532 | [
"MIT"
] | 1 | 2018-11-29T22:42:04.000Z | 2018-11-29T22:42:04.000Z | brute/__main__.py | noa/brute | 2c016c59b7748bd4b3a800488efdec8a45a33532 | [
"MIT"
] | null | null | null | brute/__main__.py | noa/brute | 2c016c59b7748bd4b3a800488efdec8a45a33532 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""brute.__main__: executed when brute directory is called as script."""
from .brute import main
main()
| 18.571429 | 72 | 0.676923 |
from .brute import main
main()
| true | true |
f7fe09257557768a0c1b4fb3b1c5fea27a669b7f | 17,017 | py | Python | kuber/latest/node_v1.py | datalayer-externals/kuber | 4d577950ce7d1be2b882fbe66827dc3d7e67b350 | [
"MIT"
] | 1 | 2019-06-11T04:57:34.000Z | 2019-06-11T04:57:34.000Z | kuber/latest/node_v1.py | datalayer-externals/kuber | 4d577950ce7d1be2b882fbe66827dc3d7e67b350 | [
"MIT"
] | 1 | 2019-05-05T22:08:13.000Z | 2019-05-06T11:43:32.000Z | kuber/latest/node_v1.py | datalayer-externals/kuber | 4d577950ce7d1be2b882fbe66827dc3d7e67b350 | [
"MIT"
] | 2 | 2021-05-08T14:47:56.000Z | 2021-10-15T21:47:04.000Z | import typing # noqa: F401
from kubernetes import client # noqa: F401
from kuber import kube_api as _kube_api # noqa: F401
from kuber import definitions as _kuber_definitions # noqa: F401
from kuber import _types # noqa: F401
from kuber.latest.meta_v1 import ListMeta # noqa: F401
from kuber.latest.meta_v1 import ObjectMeta # noqa: F401
from kuber.latest.core_v1 import Toleration # noqa: F401
class Overhead(_kuber_definitions.Definition):
"""
Overhead structure represents the resource overhead
associated with running a pod.
"""
def __init__(
self,
pod_fixed: dict = None,
):
"""Create Overhead instance."""
super(Overhead, self).__init__(api_version="node/v1", kind="Overhead")
self._properties = {
"podFixed": pod_fixed if pod_fixed is not None else {},
}
self._types = {
"podFixed": (dict, None),
}
@property
def pod_fixed(self) -> dict:
"""
PodFixed represents the fixed resource overhead associated
with running a pod.
"""
return typing.cast(
dict,
self._properties.get("podFixed"),
)
@pod_fixed.setter
def pod_fixed(self, value: dict):
"""
PodFixed represents the fixed resource overhead associated
with running a pod.
"""
self._properties["podFixed"] = value
def __enter__(self) -> "Overhead":
return self
def __exit__(self, exc_type, exc_val, exc_tb):
return False
class RuntimeClass(_kuber_definitions.Resource):
"""
RuntimeClass defines a class of container runtime supported
in the cluster. The RuntimeClass is used to determine which
container runtime is used to run all containers in a pod.
RuntimeClasses are manually defined by a user or cluster
provisioner, and referenced in the PodSpec. The Kubelet is
responsible for resolving the RuntimeClassName reference
before running the pod. For more details, see
https://kubernetes.io/docs/concepts/containers/runtime-
class/
"""
def __init__(
self,
handler: str = None,
metadata: "ObjectMeta" = None,
overhead: "Overhead" = None,
scheduling: "Scheduling" = None,
):
"""Create RuntimeClass instance."""
super(RuntimeClass, self).__init__(api_version="node/v1", kind="RuntimeClass")
self._properties = {
"handler": handler if handler is not None else "",
"metadata": metadata if metadata is not None else ObjectMeta(),
"overhead": overhead if overhead is not None else Overhead(),
"scheduling": scheduling if scheduling is not None else Scheduling(),
}
self._types = {
"apiVersion": (str, None),
"handler": (str, None),
"kind": (str, None),
"metadata": (ObjectMeta, None),
"overhead": (Overhead, None),
"scheduling": (Scheduling, None),
}
@property
def handler(self) -> str:
"""
Handler specifies the underlying runtime and configuration
that the CRI implementation will use to handle pods of this
class. The possible values are specific to the node & CRI
configuration. It is assumed that all handlers are
available on every node, and handlers of the same name are
equivalent on every node. For example, a handler called
"runc" might specify that the runc OCI runtime (using native
Linux containers) will be used to run the containers in a
pod. The Handler must be lowercase, conform to the DNS Label
(RFC 1123) requirements, and is immutable.
"""
return typing.cast(
str,
self._properties.get("handler"),
)
@handler.setter
def handler(self, value: str):
"""
Handler specifies the underlying runtime and configuration
that the CRI implementation will use to handle pods of this
class. The possible values are specific to the node & CRI
configuration. It is assumed that all handlers are
available on every node, and handlers of the same name are
equivalent on every node. For example, a handler called
"runc" might specify that the runc OCI runtime (using native
Linux containers) will be used to run the containers in a
pod. The Handler must be lowercase, conform to the DNS Label
(RFC 1123) requirements, and is immutable.
"""
self._properties["handler"] = value
@property
def metadata(self) -> "ObjectMeta":
"""
More info:
https://git.k8s.io/community/contributors/devel/sig-
architecture/api-conventions.md#metadata
"""
return typing.cast(
"ObjectMeta",
self._properties.get("metadata"),
)
@metadata.setter
def metadata(self, value: typing.Union["ObjectMeta", dict]):
"""
More info:
https://git.k8s.io/community/contributors/devel/sig-
architecture/api-conventions.md#metadata
"""
if isinstance(value, dict):
value = typing.cast(
ObjectMeta,
ObjectMeta().from_dict(value),
)
self._properties["metadata"] = value
@property
def overhead(self) -> "Overhead":
"""
Overhead represents the resource overhead associated with
running a pod for a given RuntimeClass. For more details,
see
https://kubernetes.io/docs/concepts/scheduling-
eviction/pod-overhead/
This field is in beta starting v1.18 and is only honored by
servers that enable the PodOverhead feature.
"""
return typing.cast(
"Overhead",
self._properties.get("overhead"),
)
@overhead.setter
def overhead(self, value: typing.Union["Overhead", dict]):
"""
Overhead represents the resource overhead associated with
running a pod for a given RuntimeClass. For more details,
see
https://kubernetes.io/docs/concepts/scheduling-
eviction/pod-overhead/
This field is in beta starting v1.18 and is only honored by
servers that enable the PodOverhead feature.
"""
if isinstance(value, dict):
value = typing.cast(
Overhead,
Overhead().from_dict(value),
)
self._properties["overhead"] = value
@property
def scheduling(self) -> "Scheduling":
"""
Scheduling holds the scheduling constraints to ensure that
pods running with this RuntimeClass are scheduled to nodes
that support it. If scheduling is nil, this RuntimeClass is
assumed to be supported by all nodes.
"""
return typing.cast(
"Scheduling",
self._properties.get("scheduling"),
)
@scheduling.setter
def scheduling(self, value: typing.Union["Scheduling", dict]):
"""
Scheduling holds the scheduling constraints to ensure that
pods running with this RuntimeClass are scheduled to nodes
that support it. If scheduling is nil, this RuntimeClass is
assumed to be supported by all nodes.
"""
if isinstance(value, dict):
value = typing.cast(
Scheduling,
Scheduling().from_dict(value),
)
self._properties["scheduling"] = value
def create_resource(self, namespace: "str" = None):
"""
Creates the RuntimeClass in the currently
configured Kubernetes cluster.
"""
names = ["create_namespaced_runtime_class", "create_runtime_class"]
_kube_api.execute(
action="create",
resource=self,
names=names,
namespace=namespace,
api_client=None,
api_args={"body": self.to_dict()},
)
def replace_resource(self, namespace: "str" = None):
"""
Replaces the RuntimeClass in the currently
configured Kubernetes cluster.
"""
names = ["replace_namespaced_runtime_class", "replace_runtime_class"]
_kube_api.execute(
action="replace",
resource=self,
names=names,
namespace=namespace,
api_client=None,
api_args={"body": self.to_dict(), "name": self.metadata.name},
)
def patch_resource(self, namespace: "str" = None):
"""
Patches the RuntimeClass in the currently
configured Kubernetes cluster.
"""
names = ["patch_namespaced_runtime_class", "patch_runtime_class"]
_kube_api.execute(
action="patch",
resource=self,
names=names,
namespace=namespace,
api_client=None,
api_args={"body": self.to_dict(), "name": self.metadata.name},
)
def get_resource_status(self, namespace: "str" = None):
"""This resource does not have a status."""
pass
def read_resource(self, namespace: str = None):
"""
Reads the RuntimeClass from the currently configured
Kubernetes cluster and returns the low-level definition object.
"""
names = [
"read_namespaced_runtime_class",
"read_runtime_class",
]
return _kube_api.execute(
action="read",
resource=self,
names=names,
namespace=namespace,
api_client=None,
api_args={"name": self.metadata.name},
)
def delete_resource(
self,
namespace: str = None,
propagation_policy: str = "Foreground",
grace_period_seconds: int = 10,
):
"""
Deletes the RuntimeClass from the currently configured
Kubernetes cluster.
"""
names = [
"delete_namespaced_runtime_class",
"delete_runtime_class",
]
body = client.V1DeleteOptions(
propagation_policy=propagation_policy,
grace_period_seconds=grace_period_seconds,
)
_kube_api.execute(
action="delete",
resource=self,
names=names,
namespace=namespace,
api_client=None,
api_args={"name": self.metadata.name, "body": body},
)
@staticmethod
def get_resource_api(
api_client: client.ApiClient = None, **kwargs
) -> "client.NodeV1Api":
"""
Returns an instance of the kubernetes API client associated with
this object.
"""
if api_client:
kwargs["apl_client"] = api_client
return client.NodeV1Api(**kwargs)
def __enter__(self) -> "RuntimeClass":
return self
def __exit__(self, exc_type, exc_val, exc_tb):
return False
class RuntimeClassList(_kuber_definitions.Collection):
"""
RuntimeClassList is a list of RuntimeClass objects.
"""
def __init__(
self,
items: typing.List["RuntimeClass"] = None,
metadata: "ListMeta" = None,
):
"""Create RuntimeClassList instance."""
super(RuntimeClassList, self).__init__(
api_version="node/v1", kind="RuntimeClassList"
)
self._properties = {
"items": items if items is not None else [],
"metadata": metadata if metadata is not None else ListMeta(),
}
self._types = {
"apiVersion": (str, None),
"items": (list, RuntimeClass),
"kind": (str, None),
"metadata": (ListMeta, None),
}
@property
def items(self) -> typing.List["RuntimeClass"]:
"""
Items is a list of schema objects.
"""
return typing.cast(
typing.List["RuntimeClass"],
self._properties.get("items"),
)
@items.setter
def items(
self, value: typing.Union[typing.List["RuntimeClass"], typing.List[dict]]
):
"""
Items is a list of schema objects.
"""
cleaned: typing.List[RuntimeClass] = []
for item in value:
if isinstance(item, dict):
item = typing.cast(
RuntimeClass,
RuntimeClass().from_dict(item),
)
cleaned.append(typing.cast(RuntimeClass, item))
self._properties["items"] = cleaned
@property
def metadata(self) -> "ListMeta":
"""
Standard list metadata. More info:
https://git.k8s.io/community/contributors/devel/sig-
architecture/api-conventions.md#metadata
"""
return typing.cast(
"ListMeta",
self._properties.get("metadata"),
)
@metadata.setter
def metadata(self, value: typing.Union["ListMeta", dict]):
"""
Standard list metadata. More info:
https://git.k8s.io/community/contributors/devel/sig-
architecture/api-conventions.md#metadata
"""
if isinstance(value, dict):
value = typing.cast(
ListMeta,
ListMeta().from_dict(value),
)
self._properties["metadata"] = value
@staticmethod
def get_resource_api(
api_client: client.ApiClient = None, **kwargs
) -> "client.NodeV1Api":
"""
Returns an instance of the kubernetes API client associated with
this object.
"""
if api_client:
kwargs["apl_client"] = api_client
return client.NodeV1Api(**kwargs)
def __enter__(self) -> "RuntimeClassList":
return self
def __exit__(self, exc_type, exc_val, exc_tb):
return False
class Scheduling(_kuber_definitions.Definition):
"""
Scheduling specifies the scheduling constraints for nodes
supporting a RuntimeClass.
"""
def __init__(
self,
node_selector: dict = None,
tolerations: typing.List["Toleration"] = None,
):
"""Create Scheduling instance."""
super(Scheduling, self).__init__(api_version="node/v1", kind="Scheduling")
self._properties = {
"nodeSelector": node_selector if node_selector is not None else {},
"tolerations": tolerations if tolerations is not None else [],
}
self._types = {
"nodeSelector": (dict, None),
"tolerations": (list, Toleration),
}
@property
def node_selector(self) -> dict:
"""
nodeSelector lists labels that must be present on nodes that
support this RuntimeClass. Pods using this RuntimeClass can
only be scheduled to a node matched by this selector. The
RuntimeClass nodeSelector is merged with a pod's existing
nodeSelector. Any conflicts will cause the pod to be
rejected in admission.
"""
return typing.cast(
dict,
self._properties.get("nodeSelector"),
)
@node_selector.setter
def node_selector(self, value: dict):
"""
nodeSelector lists labels that must be present on nodes that
support this RuntimeClass. Pods using this RuntimeClass can
only be scheduled to a node matched by this selector. The
RuntimeClass nodeSelector is merged with a pod's existing
nodeSelector. Any conflicts will cause the pod to be
rejected in admission.
"""
self._properties["nodeSelector"] = value
@property
def tolerations(self) -> typing.List["Toleration"]:
"""
tolerations are appended (excluding duplicates) to pods
running with this RuntimeClass during admission, effectively
unioning the set of nodes tolerated by the pod and the
RuntimeClass.
"""
return typing.cast(
typing.List["Toleration"],
self._properties.get("tolerations"),
)
@tolerations.setter
def tolerations(
self, value: typing.Union[typing.List["Toleration"], typing.List[dict]]
):
"""
tolerations are appended (excluding duplicates) to pods
running with this RuntimeClass during admission, effectively
unioning the set of nodes tolerated by the pod and the
RuntimeClass.
"""
cleaned: typing.List[Toleration] = []
for item in value:
if isinstance(item, dict):
item = typing.cast(
Toleration,
Toleration().from_dict(item),
)
cleaned.append(typing.cast(Toleration, item))
self._properties["tolerations"] = cleaned
def __enter__(self) -> "Scheduling":
return self
def __exit__(self, exc_type, exc_val, exc_tb):
return False
| 32.725 | 86 | 0.59323 | import typing
from kubernetes import client
from kuber import kube_api as _kube_api
from kuber import definitions as _kuber_definitions
from kuber import _types
from kuber.latest.meta_v1 import ListMeta
from kuber.latest.meta_v1 import ObjectMeta
from kuber.latest.core_v1 import Toleration
class Overhead(_kuber_definitions.Definition):
def __init__(
self,
pod_fixed: dict = None,
):
super(Overhead, self).__init__(api_version="node/v1", kind="Overhead")
self._properties = {
"podFixed": pod_fixed if pod_fixed is not None else {},
}
self._types = {
"podFixed": (dict, None),
}
@property
def pod_fixed(self) -> dict:
return typing.cast(
dict,
self._properties.get("podFixed"),
)
@pod_fixed.setter
def pod_fixed(self, value: dict):
self._properties["podFixed"] = value
def __enter__(self) -> "Overhead":
return self
def __exit__(self, exc_type, exc_val, exc_tb):
return False
class RuntimeClass(_kuber_definitions.Resource):
def __init__(
self,
handler: str = None,
metadata: "ObjectMeta" = None,
overhead: "Overhead" = None,
scheduling: "Scheduling" = None,
):
super(RuntimeClass, self).__init__(api_version="node/v1", kind="RuntimeClass")
self._properties = {
"handler": handler if handler is not None else "",
"metadata": metadata if metadata is not None else ObjectMeta(),
"overhead": overhead if overhead is not None else Overhead(),
"scheduling": scheduling if scheduling is not None else Scheduling(),
}
self._types = {
"apiVersion": (str, None),
"handler": (str, None),
"kind": (str, None),
"metadata": (ObjectMeta, None),
"overhead": (Overhead, None),
"scheduling": (Scheduling, None),
}
@property
def handler(self) -> str:
return typing.cast(
str,
self._properties.get("handler"),
)
@handler.setter
def handler(self, value: str):
self._properties["handler"] = value
@property
def metadata(self) -> "ObjectMeta":
return typing.cast(
"ObjectMeta",
self._properties.get("metadata"),
)
@metadata.setter
def metadata(self, value: typing.Union["ObjectMeta", dict]):
if isinstance(value, dict):
value = typing.cast(
ObjectMeta,
ObjectMeta().from_dict(value),
)
self._properties["metadata"] = value
@property
def overhead(self) -> "Overhead":
return typing.cast(
"Overhead",
self._properties.get("overhead"),
)
@overhead.setter
def overhead(self, value: typing.Union["Overhead", dict]):
if isinstance(value, dict):
value = typing.cast(
Overhead,
Overhead().from_dict(value),
)
self._properties["overhead"] = value
@property
def scheduling(self) -> "Scheduling":
return typing.cast(
"Scheduling",
self._properties.get("scheduling"),
)
@scheduling.setter
def scheduling(self, value: typing.Union["Scheduling", dict]):
if isinstance(value, dict):
value = typing.cast(
Scheduling,
Scheduling().from_dict(value),
)
self._properties["scheduling"] = value
def create_resource(self, namespace: "str" = None):
names = ["create_namespaced_runtime_class", "create_runtime_class"]
_kube_api.execute(
action="create",
resource=self,
names=names,
namespace=namespace,
api_client=None,
api_args={"body": self.to_dict()},
)
def replace_resource(self, namespace: "str" = None):
names = ["replace_namespaced_runtime_class", "replace_runtime_class"]
_kube_api.execute(
action="replace",
resource=self,
names=names,
namespace=namespace,
api_client=None,
api_args={"body": self.to_dict(), "name": self.metadata.name},
)
def patch_resource(self, namespace: "str" = None):
names = ["patch_namespaced_runtime_class", "patch_runtime_class"]
_kube_api.execute(
action="patch",
resource=self,
names=names,
namespace=namespace,
api_client=None,
api_args={"body": self.to_dict(), "name": self.metadata.name},
)
def get_resource_status(self, namespace: "str" = None):
pass
def read_resource(self, namespace: str = None):
names = [
"read_namespaced_runtime_class",
"read_runtime_class",
]
return _kube_api.execute(
action="read",
resource=self,
names=names,
namespace=namespace,
api_client=None,
api_args={"name": self.metadata.name},
)
def delete_resource(
self,
namespace: str = None,
propagation_policy: str = "Foreground",
grace_period_seconds: int = 10,
):
names = [
"delete_namespaced_runtime_class",
"delete_runtime_class",
]
body = client.V1DeleteOptions(
propagation_policy=propagation_policy,
grace_period_seconds=grace_period_seconds,
)
_kube_api.execute(
action="delete",
resource=self,
names=names,
namespace=namespace,
api_client=None,
api_args={"name": self.metadata.name, "body": body},
)
@staticmethod
def get_resource_api(
api_client: client.ApiClient = None, **kwargs
) -> "client.NodeV1Api":
if api_client:
kwargs["apl_client"] = api_client
return client.NodeV1Api(**kwargs)
def __enter__(self) -> "RuntimeClass":
return self
def __exit__(self, exc_type, exc_val, exc_tb):
return False
class RuntimeClassList(_kuber_definitions.Collection):
def __init__(
self,
items: typing.List["RuntimeClass"] = None,
metadata: "ListMeta" = None,
):
super(RuntimeClassList, self).__init__(
api_version="node/v1", kind="RuntimeClassList"
)
self._properties = {
"items": items if items is not None else [],
"metadata": metadata if metadata is not None else ListMeta(),
}
self._types = {
"apiVersion": (str, None),
"items": (list, RuntimeClass),
"kind": (str, None),
"metadata": (ListMeta, None),
}
@property
def items(self) -> typing.List["RuntimeClass"]:
return typing.cast(
typing.List["RuntimeClass"],
self._properties.get("items"),
)
@items.setter
def items(
self, value: typing.Union[typing.List["RuntimeClass"], typing.List[dict]]
):
cleaned: typing.List[RuntimeClass] = []
for item in value:
if isinstance(item, dict):
item = typing.cast(
RuntimeClass,
RuntimeClass().from_dict(item),
)
cleaned.append(typing.cast(RuntimeClass, item))
self._properties["items"] = cleaned
@property
def metadata(self) -> "ListMeta":
return typing.cast(
"ListMeta",
self._properties.get("metadata"),
)
@metadata.setter
def metadata(self, value: typing.Union["ListMeta", dict]):
if isinstance(value, dict):
value = typing.cast(
ListMeta,
ListMeta().from_dict(value),
)
self._properties["metadata"] = value
@staticmethod
def get_resource_api(
api_client: client.ApiClient = None, **kwargs
) -> "client.NodeV1Api":
if api_client:
kwargs["apl_client"] = api_client
return client.NodeV1Api(**kwargs)
def __enter__(self) -> "RuntimeClassList":
return self
def __exit__(self, exc_type, exc_val, exc_tb):
return False
class Scheduling(_kuber_definitions.Definition):
def __init__(
self,
node_selector: dict = None,
tolerations: typing.List["Toleration"] = None,
):
super(Scheduling, self).__init__(api_version="node/v1", kind="Scheduling")
self._properties = {
"nodeSelector": node_selector if node_selector is not None else {},
"tolerations": tolerations if tolerations is not None else [],
}
self._types = {
"nodeSelector": (dict, None),
"tolerations": (list, Toleration),
}
@property
def node_selector(self) -> dict:
return typing.cast(
dict,
self._properties.get("nodeSelector"),
)
@node_selector.setter
def node_selector(self, value: dict):
self._properties["nodeSelector"] = value
@property
def tolerations(self) -> typing.List["Toleration"]:
return typing.cast(
typing.List["Toleration"],
self._properties.get("tolerations"),
)
@tolerations.setter
def tolerations(
self, value: typing.Union[typing.List["Toleration"], typing.List[dict]]
):
cleaned: typing.List[Toleration] = []
for item in value:
if isinstance(item, dict):
item = typing.cast(
Toleration,
Toleration().from_dict(item),
)
cleaned.append(typing.cast(Toleration, item))
self._properties["tolerations"] = cleaned
def __enter__(self) -> "Scheduling":
return self
def __exit__(self, exc_type, exc_val, exc_tb):
return False
| true | true |
f7fe0a23919edf02171642d4dde95287f46708c0 | 91,404 | py | Python | src/transformers/trainer.py | silvershine157/transformers | e6ce636e02ec1cd3f9893af6ab1eec4f113025db | [
"Apache-2.0"
] | 1 | 2021-03-08T20:38:33.000Z | 2021-03-08T20:38:33.000Z | src/transformers/trainer.py | Iwontbecreative/transformers | fd01104435914dd65c34026dcec8be008c40ee60 | [
"Apache-2.0"
] | null | null | null | src/transformers/trainer.py | Iwontbecreative/transformers | fd01104435914dd65c34026dcec8be008c40ee60 | [
"Apache-2.0"
] | 1 | 2021-11-06T19:12:30.000Z | 2021-11-06T19:12:30.000Z | # coding=utf-8
# Copyright 2020-present the HuggingFace Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
The Trainer class, to easily train a 🤗 Transformers from scratch or finetune it on a new task.
"""
import collections
import gc
import inspect
import math
import os
import re
import shutil
import time
import warnings
from pathlib import Path
from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple, Union
# Integrations must be imported before ML frameworks:
from .integrations import ( # isort: split
default_hp_search_backend,
get_reporting_integration_callbacks,
hp_params,
is_fairscale_available,
is_optuna_available,
is_ray_tune_available,
run_hp_search_optuna,
run_hp_search_ray,
init_deepspeed,
)
import numpy as np
import torch
from packaging import version
from torch import nn
from torch.utils.data.dataloader import DataLoader
from torch.utils.data.dataset import Dataset
from torch.utils.data.distributed import DistributedSampler
from torch.utils.data.sampler import RandomSampler, SequentialSampler
from .data.data_collator import DataCollator, DataCollatorWithPadding, default_data_collator
from .file_utils import (
WEIGHTS_NAME,
is_apex_available,
is_datasets_available,
is_in_notebook,
is_sagemaker_distributed_available,
is_torch_tpu_available,
)
from .modeling_utils import PreTrainedModel, unwrap_model
from .optimization import Adafactor, AdamW, get_scheduler
from .tokenization_utils_base import PreTrainedTokenizerBase
from .trainer_callback import (
CallbackHandler,
DefaultFlowCallback,
PrinterCallback,
ProgressCallback,
TrainerCallback,
TrainerControl,
TrainerState,
)
from .trainer_pt_utils import (
DistributedLengthGroupedSampler,
DistributedTensorGatherer,
LabelSmoother,
LengthGroupedSampler,
SequentialDistributedSampler,
distributed_broadcast_scalars,
distributed_concat,
nested_concat,
nested_detach,
nested_numpify,
nested_xla_mesh_reduce,
reissue_pt_warnings,
)
from .trainer_utils import (
PREFIX_CHECKPOINT_DIR,
BestRun,
EvalPrediction,
HPSearchBackend,
PredictionOutput,
ShardedDDPOption,
TrainerMemoryTracker,
TrainOutput,
default_compute_objective,
default_hp_space,
get_last_checkpoint,
set_seed,
speed_metrics,
)
from .training_args import ParallelMode, TrainingArguments
from .utils import logging
from .utils.modeling_auto_mapping import MODEL_FOR_QUESTION_ANSWERING_MAPPING_NAMES
_is_native_amp_available = False
DEFAULT_CALLBACKS = [DefaultFlowCallback]
DEFAULT_PROGRESS_CALLBACK = ProgressCallback
if is_in_notebook():
from .utils.notebook import NotebookProgressCallback
DEFAULT_PROGRESS_CALLBACK = NotebookProgressCallback
if is_apex_available():
from apex import amp
if version.parse(torch.__version__) >= version.parse("1.6"):
_is_native_amp_available = True
from torch.cuda.amp import autocast
if is_datasets_available():
import datasets
if is_torch_tpu_available():
import torch_xla.core.xla_model as xm
import torch_xla.debug.metrics as met
import torch_xla.distributed.parallel_loader as pl
if is_fairscale_available():
import fairscale
from fairscale.nn.data_parallel import ShardedDataParallel as ShardedDDP
from fairscale.optim import OSS
from fairscale.optim.grad_scaler import ShardedGradScaler
if version.parse(fairscale.__version__) >= version.parse("0.3"):
from fairscale.nn.data_parallel import FullyShardedDataParallel as FullyShardedDDP
else:
FullyShardedDDP = None
if is_sagemaker_distributed_available():
import smdistributed.dataparallel.torch.distributed as dist
from smdistributed.dataparallel.torch.parallel.distributed import DistributedDataParallel as DDP
else:
import torch.distributed as dist
if TYPE_CHECKING:
import optuna
logger = logging.get_logger(__name__)
class Trainer:
"""
Trainer is a simple but feature-complete training and eval loop for PyTorch, optimized for 🤗 Transformers.
Args:
model (:class:`~transformers.PreTrainedModel` or :obj:`torch.nn.Module`, `optional`):
The model to train, evaluate or use for predictions. If not provided, a ``model_init`` must be passed.
.. note::
:class:`~transformers.Trainer` is optimized to work with the :class:`~transformers.PreTrainedModel`
provided by the library. You can still use your own models defined as :obj:`torch.nn.Module` as long as
they work the same way as the 🤗 Transformers models.
args (:class:`~transformers.TrainingArguments`, `optional`):
The arguments to tweak for training. Will default to a basic instance of
:class:`~transformers.TrainingArguments` with the ``output_dir`` set to a directory named `tmp_trainer` in
the current directory if not provided.
data_collator (:obj:`DataCollator`, `optional`):
The function to use to form a batch from a list of elements of :obj:`train_dataset` or :obj:`eval_dataset`.
Will default to :func:`~transformers.default_data_collator` if no ``tokenizer`` is provided, an instance of
:func:`~transformers.DataCollatorWithPadding` otherwise.
train_dataset (:obj:`torch.utils.data.dataset.Dataset`, `optional`):
The dataset to use for training. If it is an :obj:`datasets.Dataset`, columns not accepted by the
``model.forward()`` method are automatically removed.
eval_dataset (:obj:`torch.utils.data.dataset.Dataset`, `optional`):
The dataset to use for evaluation. If it is an :obj:`datasets.Dataset`, columns not accepted by the
``model.forward()`` method are automatically removed.
tokenizer (:class:`PreTrainedTokenizerBase`, `optional`):
The tokenizer used to preprocess the data. If provided, will be used to automatically pad the inputs the
maximum length when batching inputs, and it will be saved along the model to make it easier to rerun an
interrupted training or reuse the fine-tuned model.
model_init (:obj:`Callable[[], PreTrainedModel]`, `optional`):
A function that instantiates the model to be used. If provided, each call to
:meth:`~transformers.Trainer.train` will start from a new instance of the model as given by this function.
The function may have zero argument, or a single one containing the optuna/Ray Tune trial object, to be
able to choose different architectures according to hyper parameters (such as layer count, sizes of inner
layers, dropout probabilities etc).
compute_metrics (:obj:`Callable[[EvalPrediction], Dict]`, `optional`):
The function that will be used to compute metrics at evaluation. Must take a
:class:`~transformers.EvalPrediction` and return a dictionary string to metric values.
callbacks (List of :obj:`~transformers.TrainerCallback`, `optional`):
A list of callbacks to customize the training loop. Will add those to the list of default callbacks
detailed in :doc:`here <callback>`.
If you want to remove one of the default callbacks used, use the :meth:`Trainer.remove_callback` method.
optimizers (:obj:`Tuple[torch.optim.Optimizer, torch.optim.lr_scheduler.LambdaLR`, `optional`): A tuple
containing the optimizer and the scheduler to use. Will default to an instance of
:class:`~transformers.AdamW` on your model and a scheduler given by
:func:`~transformers.get_linear_schedule_with_warmup` controlled by :obj:`args`.
Important attributes:
- **model** -- Always points to the core model. If using a transformers model, it will be a
:class:`~transformers.PreTrainedModel` subclass.
- **model_wrapped** -- Always points to the most external model in case one or more other modules wrap the
original model. This is the model that should be used for the forward pass. For example, under ``DeepSpeed``,
the inner model is wrapped in ``DeepSpeed`` and then again in ``torch.nn.DistributedDataParallel``. If the
inner model hasn't been wrapped, then ``self.model_wrapped`` is the same as ``self.model``.
- **is_model_parallel** -- Whether or not a model has been switched to a model parallel mode (different from
data parallelism, this means some of the model layers are split on different GPUs).
- **place_model_on_device** -- Whether or not to automatically place the model on the device - it will be set
to :obj:`False` if model parallel or deepspeed is used, or if the default
``TrainingArguments.place_model_on_device`` is overridden to return :obj:`False` .
- **is_in_train** -- Whether or not a model is currently running ``train`` (e.g. when ``evaluate`` is called
while in ``train``)
"""
from .trainer_pt_utils import _get_learning_rate, log_metrics, metrics_format, save_metrics, save_state
def __init__(
self,
model: Union[PreTrainedModel, torch.nn.Module] = None,
args: TrainingArguments = None,
data_collator: Optional[DataCollator] = None,
train_dataset: Optional[Dataset] = None,
eval_dataset: Optional[Dataset] = None,
tokenizer: Optional["PreTrainedTokenizerBase"] = None,
model_init: Callable[[], PreTrainedModel] = None,
compute_metrics: Optional[Callable[[EvalPrediction], Dict]] = None,
callbacks: Optional[List[TrainerCallback]] = None,
optimizers: Tuple[torch.optim.Optimizer, torch.optim.lr_scheduler.LambdaLR] = (None, None),
):
if args is None:
output_dir = "tmp_trainer"
logger.info(f"No `TrainingArguments` passed, using `output_dir={output_dir}`.")
args = TrainingArguments(output_dir=output_dir)
self.args = args
# Seed must be set before instantiating the model when using model
set_seed(self.args.seed)
self.hp_name = None
self.deepspeed = None
self.is_in_train = False
# memory metrics - must set up as early as possible
self._memory_tracker = TrainerMemoryTracker(self.args.skip_memory_metrics)
self._memory_tracker.start()
# force device and distributed setup init explicitly
args._setup_devices
if model is None:
if model_init is not None:
self.model_init = model_init
model = self.call_model_init()
else:
raise RuntimeError("`Trainer` requires either a `model` or `model_init` argument")
else:
if model_init is not None:
warnings.warn(
"`Trainer` requires either a `model` or `model_init` argument, but not both. "
"`model_init` will overwrite your model when calling the `train` method. This will become a fatal error in the next release.",
FutureWarning,
)
self.model_init = model_init
if hasattr(model, "is_parallelizable") and model.is_parallelizable and model.model_parallel:
self.is_model_parallel = True
else:
self.is_model_parallel = False
# Setup Sharded DDP training
self.sharded_ddp = None
if len(args.sharded_ddp) > 0:
if args.deepspeed:
raise ValueError(
"Using --sharded_ddp xxx together with --deepspeed is not possible, deactivate one of those flags."
)
if args.local_rank == -1:
raise ValueError("Using sharded DDP only works in distributed training.")
elif not is_fairscale_available():
raise ImportError("Sharded DDP training requires fairscale: `pip install fairscale`.")
elif ShardedDDPOption.SIMPLE not in args.sharded_ddp and FullyShardedDDP is None:
raise ImportError(
"Sharded DDP in a mode other than simple training requires fairscale version >= 0.3, found "
f"{fairscale.__version__}. Upgrade your fairscale library: `pip install --upgrade fairscale`."
)
elif ShardedDDPOption.SIMPLE in args.sharded_ddp:
self.sharded_ddp = ShardedDDPOption.SIMPLE
elif ShardedDDPOption.ZERO_DP_2 in args.sharded_ddp:
self.sharded_ddp = ShardedDDPOption.ZERO_DP_2
elif ShardedDDPOption.ZERO_DP_3 in args.sharded_ddp:
self.sharded_ddp = ShardedDDPOption.ZERO_DP_3
# one place to sort out whether to place the model on device or not
self.place_model_on_device = args.place_model_on_device
if (
self.is_model_parallel
or (args.deepspeed and args.do_train)
or (args.fp16_full_eval and not args.do_train)
or (self.sharded_ddp in [ShardedDDPOption.ZERO_DP_2, ShardedDDPOption.ZERO_DP_3])
):
self.place_model_on_device = False
default_collator = default_data_collator if tokenizer is None else DataCollatorWithPadding(tokenizer)
self.data_collator = data_collator if data_collator is not None else default_collator
self.train_dataset = train_dataset
self.eval_dataset = eval_dataset
self.tokenizer = tokenizer
# postpone switching model to cuda when:
# 1. MP - since we are trying to fit a much bigger than 1 gpu model
# 2. fp16-enabled DeepSpeed loads the model in half the size and it doesn't need .to() anyway,
# and we only use deepspeed for training at the moment
if self.place_model_on_device:
model = model.to(args.device)
# Force n_gpu to 1 to avoid DataParallel as MP will manage the GPUs
if self.is_model_parallel:
self.args._n_gpu = 1
# later use `self.model is self.model_wrapped` to check if it's wrapped or not
self.model_wrapped = model
self.model = model
self.compute_metrics = compute_metrics
self.optimizer, self.lr_scheduler = optimizers
if model_init is not None and (self.optimizer is not None or self.lr_scheduler is not None):
raise RuntimeError(
"Passing a `model_init` is incompatible with providing the `optimizers` argument."
"You should subclass `Trainer` and override the `create_optimizer_and_scheduler` method."
)
default_callbacks = DEFAULT_CALLBACKS + get_reporting_integration_callbacks(self.args.report_to)
callbacks = default_callbacks if callbacks is None else default_callbacks + callbacks
self.callback_handler = CallbackHandler(
callbacks, self.model, self.tokenizer, self.optimizer, self.lr_scheduler
)
self.add_callback(PrinterCallback if self.args.disable_tqdm else DEFAULT_PROGRESS_CALLBACK)
# Will be set to True by `self._setup_loggers()` on first call to `self.log()`.
self._loggers_initialized = False
# Create output directory if needed
if self.is_world_process_zero():
os.makedirs(self.args.output_dir, exist_ok=True)
if not callable(self.data_collator) and callable(getattr(self.data_collator, "collate_batch", None)):
raise ValueError("The `data_collator` should be a simple callable (function, class with `__call__`).")
if args.max_steps > 0:
logger.info("max_steps is given, it will override any value given in num_train_epochs")
# Enforce rules on using datasets with no __len__
if train_dataset is not None and not isinstance(train_dataset, collections.abc.Sized) and args.max_steps <= 0:
raise ValueError("train_dataset does not implement __len__, max_steps has to be specified")
if eval_dataset is not None and not isinstance(eval_dataset, collections.abc.Sized):
raise ValueError("eval_dataset must implement __len__")
self._signature_columns = None
if is_datasets_available():
if isinstance(train_dataset, datasets.Dataset):
self._remove_unused_columns(self.train_dataset, description="training")
if isinstance(eval_dataset, datasets.Dataset):
self._remove_unused_columns(self.eval_dataset, description="evaluation")
# Mixed precision setup
self.use_apex = False
self.use_amp = False
self.fp16_backend = None
if args.fp16:
if args.fp16_backend == "auto":
self.fp16_backend = "amp" if _is_native_amp_available else "apex"
else:
self.fp16_backend = args.fp16_backend
logger.info(f"Using {self.fp16_backend} fp16 backend")
if args.fp16 and not args.deepspeed: # deepspeed manages its own fp16
if self.fp16_backend == "amp":
self.use_amp = True
self.scaler = ShardedGradScaler() if self.sharded_ddp is not None else torch.cuda.amp.GradScaler()
else:
if not is_apex_available():
raise ImportError(
"Using FP16 with APEX but APEX is not installed, please refer to https://www.github.com/nvidia/apex."
)
self.use_apex = True
# Label smoothing
if self.args.label_smoothing_factor != 0:
self.label_smoother = LabelSmoother(epsilon=self.args.label_smoothing_factor)
else:
self.label_smoother = None
self.state = TrainerState()
self.control = TrainerControl()
# Internal variable for total_flos used to count as tensors (for distributed + TPU), will be sent in the
# state at each call to self.log.
self._total_flos = None
self.hp_search_backend = None
self.use_tune_checkpoints = False
default_label_names = (
["start_positions", "end_positions"]
if type(self.model).__name__ in MODEL_FOR_QUESTION_ANSWERING_MAPPING_NAMES.values()
else ["labels"]
)
self.label_names = default_label_names if self.args.label_names is None else self.args.label_names
self.control = self.callback_handler.on_init_end(self.args, self.state, self.control)
# very last
self._memory_tracker.stop_and_update_metrics()
def add_callback(self, callback):
"""
Add a callback to the current list of :class:`~transformer.TrainerCallback`.
Args:
callback (:obj:`type` or :class:`~transformer.TrainerCallback`):
A :class:`~transformer.TrainerCallback` class or an instance of a :class:`~transformer.TrainerCallback`.
In the first case, will instantiate a member of that class.
"""
self.callback_handler.add_callback(callback)
def pop_callback(self, callback):
"""
Remove a callback from the current list of :class:`~transformer.TrainerCallback` and returns it.
If the callback is not found, returns :obj:`None` (and no error is raised).
Args:
callback (:obj:`type` or :class:`~transformer.TrainerCallback`):
A :class:`~transformer.TrainerCallback` class or an instance of a :class:`~transformer.TrainerCallback`.
In the first case, will pop the first member of that class found in the list of callbacks.
Returns:
:class:`~transformer.TrainerCallback`: The callback removed, if found.
"""
return self.callback_handler.pop_callback(callback)
def remove_callback(self, callback):
"""
Remove a callback from the current list of :class:`~transformer.TrainerCallback`.
Args:
callback (:obj:`type` or :class:`~transformer.TrainerCallback`):
A :class:`~transformer.TrainerCallback` class or an instance of a :class:`~transformer.TrainerCallback`.
In the first case, will remove the first member of that class found in the list of callbacks.
"""
self.callback_handler.remove_callback(callback)
def _remove_unused_columns(self, dataset: "datasets.Dataset", description: Optional[str] = None):
if not self.args.remove_unused_columns:
return
if self._signature_columns is None:
# Inspect model forward signature to keep only the arguments it accepts.
signature = inspect.signature(self.model.forward)
self._signature_columns = list(signature.parameters.keys())
# Labels may be named label or label_ids, the default data collator handles that.
self._signature_columns += ["label", "label_ids"]
columns = [k for k in self._signature_columns if k in dataset.column_names]
ignored_columns = list(set(dataset.column_names) - set(self._signature_columns))
if len(ignored_columns) > 0:
dset_description = "" if description is None else f"in the {description} set "
logger.info(
f"The following columns {dset_description} don't have a corresponding argument in "
f"`{self.model.__class__.__name__}.forward` and have been ignored: {', '.join(ignored_columns)}."
)
dataset.set_format(type=dataset.format["type"], columns=columns, format_kwargs=dataset.format["format_kwargs"])
def _get_train_sampler(self) -> Optional[torch.utils.data.sampler.Sampler]:
if isinstance(self.train_dataset, torch.utils.data.IterableDataset) or not isinstance(
self.train_dataset, collections.abc.Sized
):
return None
# Gather the number of processes and this process index.
if self.args.parallel_mode == ParallelMode.TPU:
num_processes = xm.xrt_world_size()
process_index = xm.get_ordinal()
elif (
self.args.parallel_mode == ParallelMode.DISTRIBUTED
or self.args.parallel_mode == ParallelMode.SAGEMAKER_DISTRIBUTED
):
num_processes = dist.get_world_size()
process_index = dist.get_rank()
else:
num_processes = 1
process_index = 0
# Build the sampler.
if self.args.group_by_length:
model_input_name = self.tokenizer.model_input_names[0] if self.tokenizer is not None else None
if num_processes <= 1:
return LengthGroupedSampler(
self.train_dataset, self.args.train_batch_size, model_input_name=model_input_name
)
else:
return DistributedLengthGroupedSampler(
self.train_dataset,
self.args.train_batch_size,
num_replicas=num_processes,
rank=process_index,
model_input_name=model_input_name,
)
else:
if num_processes <= 1:
return RandomSampler(self.train_dataset)
else:
return DistributedSampler(self.train_dataset, num_replicas=num_processes, rank=process_index)
def get_train_dataloader(self) -> DataLoader:
"""
Returns the training :class:`~torch.utils.data.DataLoader`.
Will use no sampler if :obj:`self.train_dataset` does not implement :obj:`__len__`, a random sampler (adapted
to distributed training if necessary) otherwise.
Subclass and override this method if you want to inject some custom behavior.
"""
if self.train_dataset is None:
raise ValueError("Trainer: training requires a train_dataset.")
train_sampler = self._get_train_sampler()
return DataLoader(
self.train_dataset,
batch_size=self.args.train_batch_size,
sampler=train_sampler,
collate_fn=self.data_collator,
drop_last=self.args.dataloader_drop_last,
num_workers=self.args.dataloader_num_workers,
pin_memory=self.args.dataloader_pin_memory,
)
def _get_eval_sampler(self, eval_dataset: Dataset) -> Optional[torch.utils.data.sampler.Sampler]:
if is_torch_tpu_available():
return SequentialDistributedSampler(eval_dataset, num_replicas=xm.xrt_world_size(), rank=xm.get_ordinal())
elif self.args.local_rank != -1:
return SequentialDistributedSampler(eval_dataset)
else:
return SequentialSampler(eval_dataset)
def get_eval_dataloader(self, eval_dataset: Optional[Dataset] = None) -> DataLoader:
"""
Returns the evaluation :class:`~torch.utils.data.DataLoader`.
Subclass and override this method if you want to inject some custom behavior.
Args:
eval_dataset (:obj:`torch.utils.data.dataset.Dataset`, `optional`):
If provided, will override :obj:`self.eval_dataset`. If it is an :obj:`datasets.Dataset`, columns not
accepted by the ``model.forward()`` method are automatically removed. It must implement :obj:`__len__`.
"""
if eval_dataset is None and self.eval_dataset is None:
raise ValueError("Trainer: evaluation requires an eval_dataset.")
elif eval_dataset is not None and not isinstance(eval_dataset, collections.abc.Sized):
raise ValueError("eval_dataset must implement __len__")
elif is_datasets_available() and isinstance(eval_dataset, datasets.Dataset):
self._remove_unused_columns(eval_dataset, description="evaluation")
eval_dataset = eval_dataset if eval_dataset is not None else self.eval_dataset
eval_sampler = self._get_eval_sampler(eval_dataset)
return DataLoader(
eval_dataset,
sampler=eval_sampler,
batch_size=self.args.eval_batch_size,
collate_fn=self.data_collator,
drop_last=self.args.dataloader_drop_last,
num_workers=self.args.dataloader_num_workers,
pin_memory=self.args.dataloader_pin_memory,
)
def get_test_dataloader(self, test_dataset: Dataset) -> DataLoader:
"""
Returns the test :class:`~torch.utils.data.DataLoader`.
Subclass and override this method if you want to inject some custom behavior.
Args:
test_dataset (:obj:`torch.utils.data.dataset.Dataset`, `optional`):
The test dataset to use. If it is an :obj:`datasets.Dataset`, columns not accepted by the
``model.forward()`` method are automatically removed. It must implement :obj:`__len__`.
"""
if not isinstance(test_dataset, collections.abc.Sized):
raise ValueError("test_dataset must implement __len__")
elif is_datasets_available() and isinstance(test_dataset, datasets.Dataset):
self._remove_unused_columns(test_dataset, description="test")
test_sampler = self._get_eval_sampler(test_dataset)
# We use the same batch_size as for eval.
return DataLoader(
test_dataset,
sampler=test_sampler,
batch_size=self.args.eval_batch_size,
collate_fn=self.data_collator,
drop_last=self.args.dataloader_drop_last,
pin_memory=self.args.dataloader_pin_memory,
)
def create_optimizer_and_scheduler(self, num_training_steps: int):
"""
Setup the optimizer and the learning rate scheduler.
We provide a reasonable default that works well. If you want to use something else, you can pass a tuple in the
Trainer's init through :obj:`optimizers`, or subclass and override this method in a subclass.
"""
if self.optimizer is None:
no_decay = ["bias", "LayerNorm.weight"]
optimizer_grouped_parameters = [
{
"params": [p for n, p in self.model.named_parameters() if not any(nd in n for nd in no_decay)],
"weight_decay": self.args.weight_decay,
},
{
"params": [p for n, p in self.model.named_parameters() if any(nd in n for nd in no_decay)],
"weight_decay": 0.0,
},
]
optimizer_cls = Adafactor if self.args.adafactor else AdamW
if self.args.adafactor:
optimizer_cls = Adafactor
optimizer_kwargs = {"scale_parameter": False, "relative_step": False}
else:
optimizer_cls = AdamW
optimizer_kwargs = {
"betas": (self.args.adam_beta1, self.args.adam_beta2),
"eps": self.args.adam_epsilon,
}
optimizer_kwargs["lr"] = self.args.learning_rate
if self.sharded_ddp == ShardedDDPOption.SIMPLE:
self.optimizer = OSS(
params=optimizer_grouped_parameters,
optim=optimizer_cls,
**optimizer_kwargs,
)
else:
self.optimizer = optimizer_cls(optimizer_grouped_parameters, **optimizer_kwargs)
if self.lr_scheduler is None:
warmup_steps = (
self.args.warmup_steps
if self.args.warmup_steps > 0
else math.ceil(num_training_steps * self.args.warmup_ratio)
)
self.lr_scheduler = get_scheduler(
self.args.lr_scheduler_type,
self.optimizer,
num_warmup_steps=warmup_steps,
num_training_steps=num_training_steps,
)
def num_examples(self, dataloader: DataLoader) -> int:
"""
Helper to get number of samples in a :class:`~torch.utils.data.DataLoader` by accessing its dataset.
Will raise an exception if the underlying dataset dese not implement method :obj:`__len__`
"""
return len(dataloader.dataset)
def _hp_search_setup(self, trial: Union["optuna.Trial", Dict[str, Any]]):
""" HP search setup code """
self._trial = trial
if self.hp_search_backend is None or trial is None:
return
params = self.hp_space(trial) if self.hp_search_backend == HPSearchBackend.OPTUNA else trial
for key, value in params.items():
if not hasattr(self.args, key):
raise AttributeError(
f"Trying to set {key} in the hyperparameter search but there is no corresponding field in `TrainingArguments`."
)
old_attr = getattr(self.args, key, None)
# Casting value to the proper type
if old_attr is not None:
value = type(old_attr)(value)
setattr(self.args, key, value)
if self.hp_search_backend == HPSearchBackend.OPTUNA:
logger.info("Trial:", trial.params)
def _report_to_hp_search(
self, trial: Union["optuna.Trial", Dict[str, Any]], epoch: int, metrics: Dict[str, float]
):
if self.hp_search_backend is None or trial is None:
return
self.objective = self.compute_objective(metrics.copy())
if self.hp_search_backend == HPSearchBackend.OPTUNA:
import optuna
trial.report(self.objective, epoch)
if trial.should_prune():
raise optuna.TrialPruned()
elif self.hp_search_backend == HPSearchBackend.RAY:
from ray import tune
if self.control.should_save:
self._tune_save_checkpoint()
tune.report(objective=self.objective, **metrics)
def _tune_save_checkpoint(self):
from ray import tune
if not self.use_tune_checkpoints:
return
with tune.checkpoint_dir(step=self.state.global_step) as checkpoint_dir:
output_dir = os.path.join(checkpoint_dir, f"{PREFIX_CHECKPOINT_DIR}-{self.state.global_step}")
self.save_model(output_dir)
if self.is_world_process_zero():
self.state.save_to_json(os.path.join(output_dir, "trainer_state.json"))
torch.save(self.optimizer.state_dict(), os.path.join(output_dir, "optimizer.pt"))
torch.save(self.lr_scheduler.state_dict(), os.path.join(output_dir, "scheduler.pt"))
def call_model_init(self, trial=None):
model_init_argcount = len(inspect.signature(self.model_init).parameters)
if model_init_argcount == 0:
model = self.model_init()
elif model_init_argcount == 1:
model = self.model_init(trial)
else:
raise RuntimeError("model_init should have 0 or 1 argument.")
if model is None:
raise RuntimeError("model_init should not return None.")
return model
def _wrap_model(self, model, training=True):
# already initialized its own DDP and AMP
if self.deepspeed:
return self.deepspeed
# Mixed precision training with apex (torch < 1.6)
if self.use_apex and training:
model, self.optimizer = amp.initialize(model, self.optimizer, opt_level=self.args.fp16_opt_level)
# Multi-gpu training (should be after apex fp16 initialization)
if self.args.n_gpu > 1:
model = torch.nn.DataParallel(model)
# Note: in torch.distributed mode, there's no point in wrapping the model
# inside a DistributedDataParallel as we'll be under `no_grad` anyways.
if not training:
return model
# Distributed training (should be after apex fp16 initialization)
if self.sharded_ddp is not None:
# Sharded DDP!
if self.sharded_ddp == ShardedDDPOption.SIMPLE:
model = ShardedDDP(model, self.optimizer)
else:
mixed_precision = self.args.fp16
cpu_offload = ShardedDDPOption.OFFLOAD in self.args.sharded_ddp
zero_3 = self.sharded_ddp == ShardedDDPOption.ZERO_DP_3
# XXX: Breaking the self.model convention but I see no way around it for now.
self.model = model = FullyShardedDDP(
model, mixed_precision=mixed_precision, reshard_after_forward=zero_3, cpu_offload=cpu_offload
).to(self.args.device)
elif is_sagemaker_distributed_available():
model = DDP(model, device_ids=[dist.get_local_rank()], broadcast_buffers=False)
elif self.args.local_rank != -1:
if self.args.ddp_find_unused_parameters is not None:
find_unused_parameters = self.args.ddp_find_unused_parameters
elif isinstance(model, PreTrainedModel):
# find_unused_parameters breaks checkpointing as per
# https://github.com/huggingface/transformers/pull/4659#issuecomment-643356021
find_unused_parameters = not getattr(model.config, "gradient_checkpointing", False)
else:
find_unused_parameters = True
model = torch.nn.parallel.DistributedDataParallel(
model,
device_ids=[self.args.local_rank],
output_device=self.args.local_rank,
find_unused_parameters=find_unused_parameters,
)
return model
def train(
self,
resume_from_checkpoint: Optional[Union[str, bool]] = None,
trial: Union["optuna.Trial", Dict[str, Any]] = None,
**kwargs,
):
"""
Main training entry point.
Args:
resume_from_checkpoint (:obj:`str` or :obj:`bool`, `optional`):
If a :obj:`str`, local path to a saved checkpoint as saved by a previous instance of
:class:`~transformers.Trainer`. If a :obj:`bool` and equals `True`, load the last checkpoint in
`args.output_dir` as saved by a previous instance of :class:`~transformers.Trainer`. If present,
training will resume from the model/optimizer/scheduler states loaded here.
trial (:obj:`optuna.Trial` or :obj:`Dict[str, Any]`, `optional`):
The trial run or the hyperparameter dictionary for hyperparameter search.
kwargs:
Additional keyword arguments used to hide deprecated arguments
"""
# memory metrics - must set up as early as possible
self._memory_tracker.start()
self.is_in_train = True
if "model_path" in kwargs:
resume_from_checkpoint = kwargs.pop("model_path")
warnings.warn(
"`model_path` is deprecated and will be removed in a future version. Use `resume_from_checkpoint` "
"instead.",
FutureWarning,
)
if len(kwargs) > 0:
raise TypeError(f"train() received got unexpected keyword arguments: {', '.join(list(kwargs.keys()))}.")
# This might change the seed so needs to run first.
self._hp_search_setup(trial)
# Model re-init
model_reloaded = False
if self.model_init is not None:
# Seed must be set before instantiating the model when using model_init.
set_seed(self.args.seed)
self.model = self.call_model_init(trial)
model_reloaded = True
# Reinitializes optimizer and scheduler
self.optimizer, self.lr_scheduler = None, None
# Load potential model checkpoint
if isinstance(resume_from_checkpoint, bool) and resume_from_checkpoint:
resume_from_checkpoint = get_last_checkpoint(self.args.output_dir)
if resume_from_checkpoint is None:
raise ValueError(f"No valid checkpoint found in output directory ({self.args.output_dir})")
if resume_from_checkpoint is not None and os.path.isfile(os.path.join(resume_from_checkpoint, WEIGHTS_NAME)):
logger.info(f"Loading model from {resume_from_checkpoint}).")
if isinstance(self.model, PreTrainedModel):
self.model = self.model.from_pretrained(resume_from_checkpoint)
model_reloaded = True
else:
state_dict = torch.load(os.path.join(resume_from_checkpoint, WEIGHTS_NAME))
self.model.load_state_dict(state_dict)
# If model was re-initialized, put it on the right device and update self.model_wrapped
if model_reloaded:
if self.place_model_on_device:
self.model = self.model.to(self.args.device)
self.model_wrapped = self.model
# Keeping track whether we can can len() on the dataset or not
train_dataset_is_sized = isinstance(self.train_dataset, collections.abc.Sized)
# Data loader and number of training steps
train_dataloader = self.get_train_dataloader()
# Setting up training control variables:
# number of training epochs: num_train_epochs
# number of training steps per epoch: num_update_steps_per_epoch
# total number of training steps to execute: max_steps
if train_dataset_is_sized:
num_update_steps_per_epoch = len(train_dataloader) // self.args.gradient_accumulation_steps
num_update_steps_per_epoch = max(num_update_steps_per_epoch, 1)
if self.args.max_steps > 0:
max_steps = self.args.max_steps
num_train_epochs = self.args.max_steps // num_update_steps_per_epoch + int(
self.args.max_steps % num_update_steps_per_epoch > 0
)
else:
max_steps = math.ceil(self.args.num_train_epochs * num_update_steps_per_epoch)
num_train_epochs = math.ceil(self.args.num_train_epochs)
else:
# see __init__. max_steps is set when the dataset has no __len__
max_steps = self.args.max_steps
num_train_epochs = 1
num_update_steps_per_epoch = max_steps
delay_optimizer_creation = self.sharded_ddp is not None and self.sharded_ddp != ShardedDDPOption.SIMPLE
if self.args.deepspeed:
model, optimizer, lr_scheduler = init_deepspeed(self, num_training_steps=max_steps)
self.model = model.module
self.model_wrapped = model # will get further wrapped in DDP
self.deepspeed = model # DeepSpeedEngine object
self.optimizer = optimizer
self.lr_scheduler = lr_scheduler
elif not delay_optimizer_creation:
self.create_optimizer_and_scheduler(num_training_steps=max_steps)
self.state = TrainerState()
self.state.is_hyper_param_search = trial is not None
# Check if saved optimizer or scheduler states exist
self._load_optimizer_and_scheduler(resume_from_checkpoint)
model = self._wrap_model(self.model_wrapped)
# for the rest of this function `model` is the outside model, whether it was wrapped or not
if model is not self.model:
self.model_wrapped = model
if delay_optimizer_creation:
self.create_optimizer_and_scheduler(num_training_steps=max_steps)
# important: at this point:
# self.model is the Transformers Model
# self.model_wrapped is DDP(Transformers Model), Deepspeed(Transformers Model), etc.
# Train!
if is_torch_tpu_available():
world_size = xm.xrt_world_size()
elif self.args.local_rank != -1:
world_size = dist.get_world_size()
else:
world_size = 1
total_train_batch_size = self.args.train_batch_size * self.args.gradient_accumulation_steps * world_size
num_examples = (
self.num_examples(train_dataloader)
if train_dataset_is_sized
else total_train_batch_size * self.args.max_steps
)
logger.info("***** Running training *****")
logger.info(f" Num examples = {num_examples}")
logger.info(f" Num Epochs = {num_train_epochs}")
logger.info(f" Instantaneous batch size per device = {self.args.per_device_train_batch_size}")
logger.info(f" Total train batch size (w. parallel, distributed & accumulation) = {total_train_batch_size}")
logger.info(f" Gradient Accumulation steps = {self.args.gradient_accumulation_steps}")
logger.info(f" Total optimization steps = {max_steps}")
self.state.epoch = 0
start_time = time.time()
epochs_trained = 0
steps_trained_in_current_epoch = 0
# Check if continuing training from a checkpoint
if resume_from_checkpoint is not None and os.path.isfile(
os.path.join(resume_from_checkpoint, "trainer_state.json")
):
self.state = TrainerState.load_from_json(os.path.join(resume_from_checkpoint, "trainer_state.json"))
epochs_trained = self.state.global_step // num_update_steps_per_epoch
if not self.args.ignore_data_skip:
steps_trained_in_current_epoch = self.state.global_step % (num_update_steps_per_epoch)
steps_trained_in_current_epoch *= self.args.gradient_accumulation_steps
else:
steps_trained_in_current_epoch = 0
logger.info(" Continuing training from checkpoint, will skip to saved global_step")
logger.info(f" Continuing training from epoch {epochs_trained}")
logger.info(f" Continuing training from global step {self.state.global_step}")
if not self.args.ignore_data_skip:
logger.info(
f" Will skip the first {epochs_trained} epochs then the first {steps_trained_in_current_epoch} "
"batches in the first epoch."
)
# Update the references
self.callback_handler.model = self.model
self.callback_handler.optimizer = self.optimizer
self.callback_handler.lr_scheduler = self.lr_scheduler
self.callback_handler.train_dataloader = train_dataloader
self.state.trial_name = self.hp_name(trial) if self.hp_name is not None else None
self.state.trial_params = hp_params(trial) if trial is not None else None
# This should be the same if the state has been saved but in case the training arguments changed, it's safer
# to set this after the load.
self.state.max_steps = max_steps
self.state.num_train_epochs = num_train_epochs
self.state.is_local_process_zero = self.is_local_process_zero()
self.state.is_world_process_zero = self.is_world_process_zero()
# tr_loss is a tensor to avoid synchronization of TPUs through .item()
tr_loss = torch.tensor(0.0).to(self.args.device)
# _total_loss_scalar is updated everytime .item() has to be called on tr_loss and stores the sum of all losses
self._total_loss_scalar = 0.0
self._globalstep_last_logged = self.state.global_step
self._total_flos = self.state.total_flos
model.zero_grad()
self.control = self.callback_handler.on_train_begin(self.args, self.state, self.control)
# Skip the first epochs_trained epochs to get the random state of the dataloader at the right point.
if not self.args.ignore_data_skip:
for epoch in range(epochs_trained):
# We just need to begin an iteration to create the randomization of the sampler.
for _ in train_dataloader:
break
for epoch in range(epochs_trained, num_train_epochs):
if isinstance(train_dataloader, DataLoader) and isinstance(train_dataloader.sampler, DistributedSampler):
train_dataloader.sampler.set_epoch(epoch)
if is_torch_tpu_available():
parallel_loader = pl.ParallelLoader(train_dataloader, [self.args.device]).per_device_loader(
self.args.device
)
epoch_iterator = parallel_loader
else:
epoch_iterator = train_dataloader
# Reset the past mems state at the beginning of each epoch if necessary.
if self.args.past_index >= 0:
self._past = None
steps_in_epoch = (
len(epoch_iterator)
if train_dataset_is_sized
else self.args.max_steps * self.args.gradient_accumulation_steps
)
self.control = self.callback_handler.on_epoch_begin(self.args, self.state, self.control)
for step, inputs in enumerate(epoch_iterator):
# Skip past any already trained steps if resuming training
if steps_trained_in_current_epoch > 0:
steps_trained_in_current_epoch -= 1
continue
if (step + 1) % self.args.gradient_accumulation_steps == 0:
self.control = self.callback_handler.on_step_begin(self.args, self.state, self.control)
if (
((step + 1) % self.args.gradient_accumulation_steps != 0)
and self.args.local_rank != -1
and self.args._no_sync_in_gradient_accumulation
):
# Avoid unnecessary DDP synchronization since there will be no backward pass on this example.
with model.no_sync():
tr_loss += self.training_step(model, inputs)
else:
tr_loss += self.training_step(model, inputs)
self._total_flos += float(self.floating_point_ops(inputs))
# Optimizer step for deepspeed must be called on every step regardless of the value of gradient_accumulation_steps
if self.deepspeed:
self.deepspeed.step()
if (step + 1) % self.args.gradient_accumulation_steps == 0 or (
# last step in epoch but step is always smaller than gradient_accumulation_steps
steps_in_epoch <= self.args.gradient_accumulation_steps
and (step + 1) == steps_in_epoch
):
# Gradient clipping
if self.args.max_grad_norm is not None and self.args.max_grad_norm > 0 and not self.deepspeed:
# deepspeed does its own clipping
if self.use_amp:
# AMP: gradients need unscaling
self.scaler.unscale_(self.optimizer)
if hasattr(self.optimizer, "clip_grad_norm"):
# Some optimizers (like the sharded optimizer) have a specific way to do gradient clipping
self.optimizer.clip_grad_norm(self.args.max_grad_norm)
elif hasattr(model, "clip_grad_norm_"):
# Some models (like FullyShardedDDP) have a specific way to do gradient clipping
model.clip_grad_norm_(self.args.max_grad_norm)
else:
# Revert to normal clipping otherwise, handling Apex or full precision
torch.nn.utils.clip_grad_norm_(
amp.master_params(self.optimizer) if self.use_apex else model.parameters(),
self.args.max_grad_norm,
)
# Optimizer step
if self.deepspeed:
pass # called outside the loop
elif is_torch_tpu_available():
xm.optimizer_step(self.optimizer)
elif self.use_amp:
self.scaler.step(self.optimizer)
self.scaler.update()
else:
self.optimizer.step()
if not self.deepspeed:
self.lr_scheduler.step()
model.zero_grad()
self.state.global_step += 1
self.state.epoch = epoch + (step + 1) / steps_in_epoch
self.control = self.callback_handler.on_step_end(self.args, self.state, self.control)
self._maybe_log_save_evaluate(tr_loss, model, trial, epoch)
if self.control.should_epoch_stop or self.control.should_training_stop:
break
self.control = self.callback_handler.on_epoch_end(self.args, self.state, self.control)
self._maybe_log_save_evaluate(tr_loss, model, trial, epoch)
if self.args.tpu_metrics_debug or self.args.debug:
if is_torch_tpu_available():
# tpu-comment: Logging debug metrics for PyTorch/XLA (compile, execute times, ops, etc.)
xm.master_print(met.metrics_report())
else:
logger.warning(
"You enabled PyTorch/XLA debug metrics but you don't have a TPU "
"configured. Check your training configuration if this is unexpected."
)
if self.control.should_training_stop:
break
if self.args.past_index and hasattr(self, "_past"):
# Clean the state at the end of training
delattr(self, "_past")
logger.info("\n\nTraining completed. Do not forget to share your model on huggingface.co/models =)\n\n")
if self.args.load_best_model_at_end and self.state.best_model_checkpoint is not None:
logger.info(
f"Loading best model from {self.state.best_model_checkpoint} (score: {self.state.best_metric})."
)
if isinstance(self.model, PreTrainedModel):
self.model = self.model.from_pretrained(self.state.best_model_checkpoint)
if self.place_model_on_device:
self.model = self.model.to(self.args.device)
else:
state_dict = torch.load(os.path.join(self.state.best_model_checkpoint, WEIGHTS_NAME))
self.model.load_state_dict(state_dict)
if self.deepspeed:
self.deepspeed.load_checkpoint(
self.state.best_model_checkpoint, load_optimizer_states=False, load_lr_scheduler_states=False
)
metrics = speed_metrics("train", start_time, self.state.max_steps)
if self._total_flos is not None:
self.store_flos()
metrics["total_flos"] = self.state.total_flos
self.log(metrics)
self.control = self.callback_handler.on_train_end(self.args, self.state, self.control)
# add remaining tr_loss
self._total_loss_scalar += tr_loss.item()
if self.deepspeed:
# free up any memory that might be useful for eval
self.deepspeed = None
self.optimizer = None
self.lr_scheduler = None
self.model_wrapped = self.model
gc.collect() # force memory release
# to restore normal behavior outside of train replay the place_model_on_device logic w/o deepspeed
self.place_model_on_device = self.args.place_model_on_device
if self.is_model_parallel:
self.place_model_on_device = False
self.is_in_train = False
self._memory_tracker.stop_and_update_metrics(metrics)
return TrainOutput(self.state.global_step, self._total_loss_scalar / self.state.global_step, metrics)
def _maybe_log_save_evaluate(self, tr_loss, model, trial, epoch):
if self.control.should_log:
logs: Dict[str, float] = {}
tr_loss_scalar = tr_loss.item()
# reset tr_loss to zero
tr_loss -= tr_loss
logs["loss"] = round(tr_loss_scalar / (self.state.global_step - self._globalstep_last_logged), 4)
logs["learning_rate"] = self._get_learning_rate()
self._total_loss_scalar += tr_loss_scalar
self._globalstep_last_logged = self.state.global_step
self.log(logs)
metrics = None
if self.control.should_evaluate:
metrics = self.evaluate()
self._report_to_hp_search(trial, epoch, metrics)
if self.control.should_save:
self._save_checkpoint(model, trial, metrics=metrics)
self.control = self.callback_handler.on_save(self.args, self.state, self.control)
def _save_checkpoint(self, model, trial, metrics=None):
# In all cases, including ddp/dp/deepspeed, self.model is always a reference to the model we
# want to save except FullyShardedDDP.
# assert unwrap_model(model) is self.model, "internal model should be a reference to self.model"
# Save model checkpoint
checkpoint_folder = f"{PREFIX_CHECKPOINT_DIR}-{self.state.global_step}"
if self.hp_search_backend is not None and trial is not None:
if self.hp_search_backend == HPSearchBackend.OPTUNA:
run_id = trial.number
else:
from ray import tune
run_id = tune.get_trial_id()
run_name = self.hp_name(trial) if self.hp_name is not None else f"run-{run_id}"
run_dir = os.path.join(self.args.output_dir, run_name)
else:
run_dir = self.args.output_dir
self.store_flos()
output_dir = os.path.join(run_dir, checkpoint_folder)
self.save_model(output_dir)
if self.deepspeed:
self.deepspeed.save_checkpoint(output_dir)
# Save optimizer and scheduler
if self.sharded_ddp == ShardedDDPOption.SIMPLE:
self.optimizer.consolidate_state_dict()
if is_torch_tpu_available():
xm.rendezvous("saving_optimizer_states")
xm.save(self.optimizer.state_dict(), os.path.join(output_dir, "optimizer.pt"))
with warnings.catch_warnings(record=True) as caught_warnings:
xm.save(self.lr_scheduler.state_dict(), os.path.join(output_dir, "scheduler.pt"))
reissue_pt_warnings(caught_warnings)
elif self.is_world_process_zero() and not self.deepspeed:
# deepspeed.save_checkpoint above saves model/optim/sched
torch.save(self.optimizer.state_dict(), os.path.join(output_dir, "optimizer.pt"))
with warnings.catch_warnings(record=True) as caught_warnings:
torch.save(self.lr_scheduler.state_dict(), os.path.join(output_dir, "scheduler.pt"))
reissue_pt_warnings(caught_warnings)
# Determine the new best metric / best model checkpoint
if metrics is not None and self.args.metric_for_best_model is not None:
metric_to_check = self.args.metric_for_best_model
if not metric_to_check.startswith("eval_"):
metric_to_check = f"eval_{metric_to_check}"
metric_value = metrics[metric_to_check]
operator = np.greater if self.args.greater_is_better else np.less
if (
self.state.best_metric is None
or self.state.best_model_checkpoint is None
or operator(metric_value, self.state.best_metric)
):
self.state.best_metric = metric_value
self.state.best_model_checkpoint = output_dir
# Save the Trainer state
if self.is_world_process_zero():
self.state.save_to_json(os.path.join(output_dir, "trainer_state.json"))
# Maybe delete some older checkpoints.
if self.is_world_process_zero():
self._rotate_checkpoints(use_mtime=True, output_dir=run_dir)
def _load_optimizer_and_scheduler(self, checkpoint):
"""If optimizer and scheduler states exist, load them."""
if checkpoint is None:
return
if os.path.isfile(os.path.join(checkpoint, "optimizer.pt")) and os.path.isfile(
os.path.join(checkpoint, "scheduler.pt")
):
# Load in optimizer and scheduler states
if is_torch_tpu_available():
# On TPU we have to take some extra precautions to properly load the states on the right device.
optimizer_state = torch.load(os.path.join(checkpoint, "optimizer.pt"), map_location="cpu")
with warnings.catch_warnings(record=True) as caught_warnings:
lr_scheduler_state = torch.load(os.path.join(checkpoint, "scheduler.pt"), map_location="cpu")
reissue_pt_warnings(caught_warnings)
xm.send_cpu_data_to_device(optimizer_state, self.args.device)
xm.send_cpu_data_to_device(lr_scheduler_state, self.args.device)
self.optimizer.load_state_dict(optimizer_state)
self.lr_scheduler.load_state_dict(lr_scheduler_state)
else:
self.optimizer.load_state_dict(
torch.load(os.path.join(checkpoint, "optimizer.pt"), map_location=self.args.device)
)
with warnings.catch_warnings(record=True) as caught_warnings:
self.lr_scheduler.load_state_dict(torch.load(os.path.join(checkpoint, "scheduler.pt")))
reissue_pt_warnings(caught_warnings)
if self.deepspeed:
# Not sure how to check if there is a saved deepspeed checkpoint, but since it just return None if it fails to find a deepspeed checkpoint this is sort of a check-n-load function
self.deepspeed.load_checkpoint(checkpoint, load_optimizer_states=True, load_lr_scheduler_states=True)
def hyperparameter_search(
self,
hp_space: Optional[Callable[["optuna.Trial"], Dict[str, float]]] = None,
compute_objective: Optional[Callable[[Dict[str, float]], float]] = None,
n_trials: int = 20,
direction: str = "minimize",
backend: Optional[Union["str", HPSearchBackend]] = None,
hp_name: Optional[Callable[["optuna.Trial"], str]] = None,
**kwargs,
) -> BestRun:
"""
Launch an hyperparameter search using ``optuna`` or ``Ray Tune``. The optimized quantity is determined by
:obj:`compute_objective`, which defaults to a function returning the evaluation loss when no metric is
provided, the sum of all metrics otherwise.
.. warning::
To use this method, you need to have provided a ``model_init`` when initializing your
:class:`~transformers.Trainer`: we need to reinitialize the model at each new run. This is incompatible
with the ``optimizers`` argument, so you need to subclass :class:`~transformers.Trainer` and override the
method :meth:`~transformers.Trainer.create_optimizer_and_scheduler` for custom optimizer/scheduler.
Args:
hp_space (:obj:`Callable[["optuna.Trial"], Dict[str, float]]`, `optional`):
A function that defines the hyperparameter search space. Will default to
:func:`~transformers.trainer_utils.default_hp_space_optuna` or
:func:`~transformers.trainer_utils.default_hp_space_ray` depending on your backend.
compute_objective (:obj:`Callable[[Dict[str, float]], float]`, `optional`):
A function computing the objective to minimize or maximize from the metrics returned by the
:obj:`evaluate` method. Will default to :func:`~transformers.trainer_utils.default_compute_objective`.
n_trials (:obj:`int`, `optional`, defaults to 100):
The number of trial runs to test.
direction(:obj:`str`, `optional`, defaults to :obj:`"minimize"`):
Whether to optimize greater or lower objects. Can be :obj:`"minimize"` or :obj:`"maximize"`, you should
pick :obj:`"minimize"` when optimizing the validation loss, :obj:`"maximize"` when optimizing one or
several metrics.
backend(:obj:`str` or :class:`~transformers.training_utils.HPSearchBackend`, `optional`):
The backend to use for hyperparameter search. Will default to optuna or Ray Tune, depending on which
one is installed. If both are installed, will default to optuna.
kwargs:
Additional keyword arguments passed along to :obj:`optuna.create_study` or :obj:`ray.tune.run`. For
more information see:
- the documentation of `optuna.create_study
<https://optuna.readthedocs.io/en/stable/reference/generated/optuna.study.create_study.html>`__
- the documentation of `tune.run
<https://docs.ray.io/en/latest/tune/api_docs/execution.html#tune-run>`__
Returns:
:class:`transformers.trainer_utils.BestRun`: All the information about the best run.
"""
if backend is None:
backend = default_hp_search_backend()
if backend is None:
raise RuntimeError(
"At least one of optuna or ray should be installed. "
"To install optuna run `pip install optuna`."
"To install ray run `pip install ray[tune]`."
)
backend = HPSearchBackend(backend)
if backend == HPSearchBackend.OPTUNA and not is_optuna_available():
raise RuntimeError("You picked the optuna backend, but it is not installed. Use `pip install optuna`.")
if backend == HPSearchBackend.RAY and not is_ray_tune_available():
raise RuntimeError(
"You picked the Ray Tune backend, but it is not installed. Use `pip install 'ray[tune]'`."
)
self.hp_search_backend = backend
if self.model_init is None:
raise RuntimeError(
"To use hyperparameter search, you need to pass your model through a model_init function."
)
self.hp_space = default_hp_space[backend] if hp_space is None else hp_space
self.hp_name = hp_name
self.compute_objective = default_compute_objective if compute_objective is None else compute_objective
run_hp_search = run_hp_search_optuna if backend == HPSearchBackend.OPTUNA else run_hp_search_ray
best_run = run_hp_search(self, n_trials, direction, **kwargs)
self.hp_search_backend = None
return best_run
def log(self, logs: Dict[str, float]) -> None:
"""
Log :obj:`logs` on the various objects watching training.
Subclass and override this method to inject custom behavior.
Args:
logs (:obj:`Dict[str, float]`):
The values to log.
"""
if self.state.epoch is not None:
logs["epoch"] = round(self.state.epoch, 2)
output = {**logs, **{"step": self.state.global_step}}
self.state.log_history.append(output)
self.control = self.callback_handler.on_log(self.args, self.state, self.control, logs)
def _prepare_inputs(self, inputs: Dict[str, Union[torch.Tensor, Any]]) -> Dict[str, Union[torch.Tensor, Any]]:
"""
Prepare :obj:`inputs` before feeding them to the model, converting them to tensors if they are not already and
handling potential state.
"""
for k, v in inputs.items():
if isinstance(v, torch.Tensor):
inputs[k] = v.to(self.args.device)
if self.args.past_index >= 0 and self._past is not None:
inputs["mems"] = self._past
return inputs
def training_step(self, model: nn.Module, inputs: Dict[str, Union[torch.Tensor, Any]]) -> torch.Tensor:
"""
Perform a training step on a batch of inputs.
Subclass and override to inject custom behavior.
Args:
model (:obj:`nn.Module`):
The model to train.
inputs (:obj:`Dict[str, Union[torch.Tensor, Any]]`):
The inputs and targets of the model.
The dictionary will be unpacked before being fed to the model. Most models expect the targets under the
argument :obj:`labels`. Check your model's documentation for all accepted arguments.
Return:
:obj:`torch.Tensor`: The tensor with training loss on this batch.
"""
model.train()
inputs = self._prepare_inputs(inputs)
if self.use_amp:
with autocast():
loss = self.compute_loss(model, inputs)
else:
loss = self.compute_loss(model, inputs)
if self.args.n_gpu > 1:
loss = loss.mean() # mean() to average on multi-gpu parallel training
if self.args.gradient_accumulation_steps > 1 and not self.deepspeed:
# deepspeed handles loss scaling by gradient_accumulation_steps in its `backward`
loss = loss / self.args.gradient_accumulation_steps
if self.use_amp:
self.scaler.scale(loss).backward()
elif self.use_apex:
with amp.scale_loss(loss, self.optimizer) as scaled_loss:
scaled_loss.backward()
elif self.deepspeed:
# loss gets scaled under gradient_accumulation_steps in deepspeed
loss = self.deepspeed.backward(loss)
else:
loss.backward()
return loss.detach()
def compute_loss(self, model, inputs, return_outputs=False):
"""
How the loss is computed by Trainer. By default, all models return the loss in the first element.
Subclass and override for custom behavior.
"""
if self.label_smoother is not None and "labels" in inputs:
labels = inputs.pop("labels")
else:
labels = None
outputs = model(**inputs)
# Save past state if it exists
# TODO: this needs to be fixed and made cleaner later.
if self.args.past_index >= 0:
self._past = outputs[self.args.past_index]
if labels is not None:
loss = self.label_smoother(outputs, labels)
else:
# We don't use .loss here since the model may return tuples instead of ModelOutput.
loss = outputs["loss"] if isinstance(outputs, dict) else outputs[0]
return (loss, outputs) if return_outputs else loss
def is_local_process_zero(self) -> bool:
"""
Whether or not this process is the local (e.g., on one machine if training in a distributed fashion on several
machines) main process.
"""
if is_torch_tpu_available():
return xm.is_master_ordinal(local=True)
else:
return self.args.local_rank in [-1, 0]
def is_world_process_zero(self) -> bool:
"""
Whether or not this process is the global main process (when training in a distributed fashion on several
machines, this is only going to be :obj:`True` for one process).
"""
if is_torch_tpu_available():
return xm.is_master_ordinal(local=False)
else:
return self.args.local_rank == -1 or dist.get_rank() == 0
def save_model(self, output_dir: Optional[str] = None):
"""
Will save the model, so you can reload it using :obj:`from_pretrained()`.
Will only save from the main process.
"""
if is_torch_tpu_available():
self._save_tpu(output_dir)
else:
if self.is_world_process_zero():
self._save(output_dir)
if self.args.local_rank != -1:
dist.barrier()
def _save_tpu(self, output_dir: Optional[str] = None):
output_dir = output_dir if output_dir is not None else self.args.output_dir
logger.info("Saving model checkpoint to %s", output_dir)
if xm.is_master_ordinal():
os.makedirs(output_dir, exist_ok=True)
torch.save(self.args, os.path.join(output_dir, "training_args.bin"))
# Save a trained model and configuration using `save_pretrained()`.
# They can then be reloaded using `from_pretrained()`
xm.rendezvous("saving_checkpoint")
if not isinstance(self.model, PreTrainedModel):
if isinstance(unwrap_model(self.model), PreTrainedModel):
unwrap_model(self.model).save_pretrained(
output_dir,
save_config=self.is_world_process_zero(),
state_dict=self.model.state_dict(),
save_function=xm.save,
)
else:
logger.info("Trainer.model is not a `PreTrainedModel`, only saving its state dict.")
state_dict = self.model.state_dict()
xm.save(state_dict, os.path.join(output_dir, WEIGHTS_NAME))
else:
self.model.save_pretrained(output_dir, save_config=self.is_world_process_zero(), save_function=xm.save)
if self.tokenizer is not None and self.is_world_process_zero():
self.tokenizer.save_pretrained(output_dir)
def _save(self, output_dir: Optional[str] = None):
# If we are executing this function, we are the process zero, so we don't check for that.
output_dir = output_dir if output_dir is not None else self.args.output_dir
os.makedirs(output_dir, exist_ok=True)
logger.info("Saving model checkpoint to %s", output_dir)
# Save a trained model and configuration using `save_pretrained()`.
# They can then be reloaded using `from_pretrained()`
if not isinstance(self.model, PreTrainedModel):
if isinstance(unwrap_model(self.model), PreTrainedModel):
unwrap_model(self.model).save_pretrained(output_dir, state_dict=self.model.state_dict())
else:
logger.info("Trainer.model is not a `PreTrainedModel`, only saving its state dict.")
state_dict = self.model.state_dict()
torch.save(state_dict, os.path.join(output_dir, WEIGHTS_NAME))
else:
self.model.save_pretrained(output_dir)
if self.tokenizer is not None:
self.tokenizer.save_pretrained(output_dir)
# Good practice: save your training arguments together with the trained model
torch.save(self.args, os.path.join(output_dir, "training_args.bin"))
def store_flos(self):
# Storing the number of floating-point operations that went into the model
if self._total_flos is not None:
if self.args.local_rank != -1:
self.state.total_flos = distributed_broadcast_scalars([self._total_flos]).sum().item()
else:
self.state.total_flos = self._total_flos
def _sorted_checkpoints(
self, output_dir=None, checkpoint_prefix=PREFIX_CHECKPOINT_DIR, use_mtime=False
) -> List[str]:
ordering_and_checkpoint_path = []
glob_checkpoints = [str(x) for x in Path(output_dir).glob(f"{checkpoint_prefix}-*")]
for path in glob_checkpoints:
if use_mtime:
ordering_and_checkpoint_path.append((os.path.getmtime(path), path))
else:
regex_match = re.match(f".*{checkpoint_prefix}-([0-9]+)", path)
if regex_match and regex_match.groups():
ordering_and_checkpoint_path.append((int(regex_match.groups()[0]), path))
checkpoints_sorted = sorted(ordering_and_checkpoint_path)
checkpoints_sorted = [checkpoint[1] for checkpoint in checkpoints_sorted]
# Make sure we don't delete the best model.
if self.state.best_model_checkpoint is not None:
best_model_index = checkpoints_sorted.index(str(Path(self.state.best_model_checkpoint)))
checkpoints_sorted[best_model_index], checkpoints_sorted[-1] = (
checkpoints_sorted[-1],
checkpoints_sorted[best_model_index],
)
return checkpoints_sorted
def _rotate_checkpoints(self, use_mtime=False, output_dir=None) -> None:
if self.args.save_total_limit is None or self.args.save_total_limit <= 0:
return
# Check if we should delete older checkpoint(s)
checkpoints_sorted = self._sorted_checkpoints(use_mtime=use_mtime, output_dir=output_dir)
if len(checkpoints_sorted) <= self.args.save_total_limit:
return
number_of_checkpoints_to_delete = max(0, len(checkpoints_sorted) - self.args.save_total_limit)
checkpoints_to_be_deleted = checkpoints_sorted[:number_of_checkpoints_to_delete]
for checkpoint in checkpoints_to_be_deleted:
logger.info("Deleting older checkpoint [{}] due to args.save_total_limit".format(checkpoint))
shutil.rmtree(checkpoint)
def evaluate(
self,
eval_dataset: Optional[Dataset] = None,
ignore_keys: Optional[List[str]] = None,
metric_key_prefix: str = "eval",
) -> Dict[str, float]:
"""
Run evaluation and returns metrics.
The calling script will be responsible for providing a method to compute metrics, as they are task-dependent
(pass it to the init :obj:`compute_metrics` argument).
You can also subclass and override this method to inject custom behavior.
Args:
eval_dataset (:obj:`Dataset`, `optional`):
Pass a dataset if you wish to override :obj:`self.eval_dataset`. If it is an :obj:`datasets.Dataset`,
columns not accepted by the ``model.forward()`` method are automatically removed. It must implement the
:obj:`__len__` method.
ignore_keys (:obj:`Lst[str]`, `optional`):
A list of keys in the output of your model (if it is a dictionary) that should be ignored when
gathering predictions.
metric_key_prefix (:obj:`str`, `optional`, defaults to :obj:`"eval"`):
An optional prefix to be used as the metrics key prefix. For example the metrics "bleu" will be named
"eval_bleu" if the prefix is "eval" (default)
Returns:
A dictionary containing the evaluation loss and the potential metrics computed from the predictions. The
dictionary also contains the epoch number which comes from the training state.
"""
# memory metrics - must set up as early as possible
self._memory_tracker.start()
if eval_dataset is not None and not isinstance(eval_dataset, collections.abc.Sized):
raise ValueError("eval_dataset must implement __len__")
eval_dataloader = self.get_eval_dataloader(eval_dataset)
start_time = time.time()
output = self.prediction_loop(
eval_dataloader,
description="Evaluation",
# No point gathering the predictions if there are no metrics, otherwise we defer to
# self.args.prediction_loss_only
prediction_loss_only=True if self.compute_metrics is None else None,
ignore_keys=ignore_keys,
metric_key_prefix=metric_key_prefix,
)
n_samples = len(eval_dataset if eval_dataset is not None else self.eval_dataset)
output.metrics.update(speed_metrics(metric_key_prefix, start_time, n_samples))
self.log(output.metrics)
if self.args.tpu_metrics_debug or self.args.debug:
# tpu-comment: Logging debug metrics for PyTorch/XLA (compile, execute times, ops, etc.)
xm.master_print(met.metrics_report())
self.control = self.callback_handler.on_evaluate(self.args, self.state, self.control, output.metrics)
self._memory_tracker.stop_and_update_metrics(output.metrics)
return output.metrics
def predict(
self, test_dataset: Dataset, ignore_keys: Optional[List[str]] = None, metric_key_prefix: str = "eval"
) -> PredictionOutput:
"""
Run prediction and returns predictions and potential metrics.
Depending on the dataset and your use case, your test dataset may contain labels. In that case, this method
will also return metrics, like in :obj:`evaluate()`.
Args:
test_dataset (:obj:`Dataset`):
Dataset to run the predictions on. If it is an :obj:`datasets.Dataset`, columns not accepted by the
``model.forward()`` method are automatically removed. Has to implement the method :obj:`__len__`
ignore_keys (:obj:`Lst[str]`, `optional`):
A list of keys in the output of your model (if it is a dictionary) that should be ignored when
gathering predictions.
metric_key_prefix (:obj:`str`, `optional`, defaults to :obj:`"eval"`):
An optional prefix to be used as the metrics key prefix. For example the metrics "bleu" will be named
"eval_bleu" if the prefix is "eval" (default)
.. note::
If your predictions or labels have different sequence length (for instance because you're doing dynamic
padding in a token classification task) the predictions will be padded (on the right) to allow for
concatenation into one array. The padding index is -100.
Returns: `NamedTuple` A namedtuple with the following keys:
- predictions (:obj:`np.ndarray`): The predictions on :obj:`test_dataset`.
- label_ids (:obj:`np.ndarray`, `optional`): The labels (if the dataset contained some).
- metrics (:obj:`Dict[str, float]`, `optional`): The potential dictionary of metrics (if the dataset
contained labels).
"""
# memory metrics - must set up as early as possible
self._memory_tracker.start()
if test_dataset is not None and not isinstance(test_dataset, collections.abc.Sized):
raise ValueError("test_dataset must implement __len__")
test_dataloader = self.get_test_dataloader(test_dataset)
start_time = time.time()
output = self.prediction_loop(
test_dataloader, description="Prediction", ignore_keys=ignore_keys, metric_key_prefix=metric_key_prefix
)
output.metrics.update(speed_metrics(metric_key_prefix, start_time, len(test_dataset)))
self._memory_tracker.stop_and_update_metrics(output.metrics)
return output
def prediction_loop(
self,
dataloader: DataLoader,
description: str,
prediction_loss_only: Optional[bool] = None,
ignore_keys: Optional[List[str]] = None,
metric_key_prefix: str = "eval",
) -> PredictionOutput:
"""
Prediction/evaluation loop, shared by :obj:`Trainer.evaluate()` and :obj:`Trainer.predict()`.
Works both with or without labels.
"""
if not isinstance(dataloader.dataset, collections.abc.Sized):
raise ValueError("dataset must implement __len__")
prediction_loss_only = (
prediction_loss_only if prediction_loss_only is not None else self.args.prediction_loss_only
)
if self.args.deepspeed and not self.args.do_train:
# no harm, but flagging to the user that deepspeed config is ignored for eval
# flagging only for when --do_train wasn't passed as only then it's redundant
logger.info("Detected the deepspeed argument but it will not be used for evaluation")
model = self._wrap_model(self.model, training=False)
# if full fp16 is wanted on eval and this ``evaluation`` or ``predict`` isn't called while
# ``train`` is running, half it first and then put on device
if not self.is_in_train and self.args.fp16_full_eval:
model = model.half().to(self.args.device)
batch_size = dataloader.batch_size
num_examples = self.num_examples(dataloader)
logger.info("***** Running %s *****", description)
logger.info(" Num examples = %d", num_examples)
logger.info(" Batch size = %d", batch_size)
losses_host: torch.Tensor = None
preds_host: Union[torch.Tensor, List[torch.Tensor]] = None
labels_host: Union[torch.Tensor, List[torch.Tensor]] = None
world_size = 1
if is_torch_tpu_available():
world_size = xm.xrt_world_size()
elif self.args.local_rank != -1:
world_size = dist.get_world_size()
world_size = max(1, world_size)
eval_losses_gatherer = DistributedTensorGatherer(world_size, num_examples, make_multiple_of=batch_size)
if not prediction_loss_only:
preds_gatherer = DistributedTensorGatherer(world_size, num_examples)
labels_gatherer = DistributedTensorGatherer(world_size, num_examples)
model.eval()
if is_torch_tpu_available():
dataloader = pl.ParallelLoader(dataloader, [self.args.device]).per_device_loader(self.args.device)
if self.args.past_index >= 0:
self._past = None
self.callback_handler.eval_dataloader = dataloader
for step, inputs in enumerate(dataloader):
loss, logits, labels = self.prediction_step(model, inputs, prediction_loss_only, ignore_keys=ignore_keys)
if loss is not None:
losses = loss.repeat(batch_size)
losses_host = losses if losses_host is None else torch.cat((losses_host, losses), dim=0)
if logits is not None:
preds_host = logits if preds_host is None else nested_concat(preds_host, logits, padding_index=-100)
if labels is not None:
labels_host = labels if labels_host is None else nested_concat(labels_host, labels, padding_index=-100)
self.control = self.callback_handler.on_prediction_step(self.args, self.state, self.control)
# Gather all tensors and put them back on the CPU if we have done enough accumulation steps.
if self.args.eval_accumulation_steps is not None and (step + 1) % self.args.eval_accumulation_steps == 0:
eval_losses_gatherer.add_arrays(self._gather_and_numpify(losses_host, "eval_losses"))
if not prediction_loss_only:
preds_gatherer.add_arrays(self._gather_and_numpify(preds_host, "eval_preds"))
labels_gatherer.add_arrays(self._gather_and_numpify(labels_host, "eval_label_ids"))
# Set back to None to begin a new accumulation
losses_host, preds_host, labels_host = None, None, None
if self.args.past_index and hasattr(self, "_past"):
# Clean the state at the end of the evaluation loop
delattr(self, "_past")
# Gather all remaining tensors and put them back on the CPU
eval_losses_gatherer.add_arrays(self._gather_and_numpify(losses_host, "eval_losses"))
if not prediction_loss_only:
preds_gatherer.add_arrays(self._gather_and_numpify(preds_host, "eval_preds"))
labels_gatherer.add_arrays(self._gather_and_numpify(labels_host, "eval_label_ids"))
eval_loss = eval_losses_gatherer.finalize()
preds = preds_gatherer.finalize() if not prediction_loss_only else None
label_ids = labels_gatherer.finalize() if not prediction_loss_only else None
if self.compute_metrics is not None and preds is not None and label_ids is not None:
metrics = self.compute_metrics(EvalPrediction(predictions=preds, label_ids=label_ids))
else:
metrics = {}
if eval_loss is not None:
metrics[f"{metric_key_prefix}_loss"] = eval_loss.mean().item()
# Prefix all keys with metric_key_prefix + '_'
for key in list(metrics.keys()):
if not key.startswith(f"{metric_key_prefix}_"):
metrics[f"{metric_key_prefix}_{key}"] = metrics.pop(key)
return PredictionOutput(predictions=preds, label_ids=label_ids, metrics=metrics)
def _gather_and_numpify(self, tensors, name):
"""
Gather value of `tensors` (tensor or list/tuple of nested tensors) and convert them to numpy before
concatenating them to `gathered`
"""
if tensors is None:
return
if is_torch_tpu_available():
tensors = nested_xla_mesh_reduce(tensors, name)
elif self.args.local_rank != -1:
tensors = distributed_concat(tensors)
return nested_numpify(tensors)
def prediction_step(
self,
model: nn.Module,
inputs: Dict[str, Union[torch.Tensor, Any]],
prediction_loss_only: bool,
ignore_keys: Optional[List[str]] = None,
) -> Tuple[Optional[float], Optional[torch.Tensor], Optional[torch.Tensor]]:
"""
Perform an evaluation step on :obj:`model` using obj:`inputs`.
Subclass and override to inject custom behavior.
Args:
model (:obj:`nn.Module`):
The model to evaluate.
inputs (:obj:`Dict[str, Union[torch.Tensor, Any]]`):
The inputs and targets of the model.
The dictionary will be unpacked before being fed to the model. Most models expect the targets under the
argument :obj:`labels`. Check your model's documentation for all accepted arguments.
prediction_loss_only (:obj:`bool`):
Whether or not to return the loss only.
ignore_keys (:obj:`Lst[str]`, `optional`):
A list of keys in the output of your model (if it is a dictionary) that should be ignored when
gathering predictions.
Return:
Tuple[Optional[float], Optional[torch.Tensor], Optional[torch.Tensor]]: A tuple with the loss, logits and
labels (each being optional).
"""
has_labels = all(inputs.get(k) is not None for k in self.label_names)
inputs = self._prepare_inputs(inputs)
if ignore_keys is None:
if hasattr(self.model, "config"):
ignore_keys = getattr(self.model.config, "keys_to_ignore_at_inference", [])
else:
ignore_keys = []
# labels may be popped when computing the loss (label smoothing for instance) so we grab them first.
if has_labels:
labels = nested_detach(tuple(inputs.get(name) for name in self.label_names))
if len(labels) == 1:
labels = labels[0]
else:
labels = None
with torch.no_grad():
if has_labels:
loss, outputs = self.compute_loss(model, inputs, return_outputs=True)
loss = loss.mean().detach()
if isinstance(outputs, dict):
logits = tuple(v for k, v in outputs.items() if k not in ignore_keys + ["loss"])
else:
logits = outputs[1:]
else:
loss = None
if self.use_amp:
with autocast():
outputs = model(**inputs)
else:
outputs = model(**inputs)
if isinstance(outputs, dict):
logits = tuple(v for k, v in outputs.items() if k not in ignore_keys)
else:
logits = outputs
# TODO: this needs to be fixed and made cleaner later.
if self.args.past_index >= 0:
self._past = outputs[self.args.past_index - 1]
if prediction_loss_only:
return (loss, None, None)
logits = nested_detach(logits)
if len(logits) == 1:
logits = logits[0]
return (loss, logits, labels)
def floating_point_ops(self, inputs: Dict[str, Union[torch.Tensor, Any]]):
"""
For models that inherit from :class:`~transformers.PreTrainedModel`, uses that method to compute the number of
floating point operations for every backward + forward pass. If using another model, either implement such a
method in the model or subclass and override this method.
Args:
inputs (:obj:`Dict[str, Union[torch.Tensor, Any]]`):
The inputs and targets of the model.
Returns:
:obj:`int`: The number of floating-point operations.
"""
if hasattr(self.model, "floating_point_ops"):
return self.model.floating_point_ops(inputs)
else:
return 0
| 47.261634 | 190 | 0.641941 |
import collections
import gc
import inspect
import math
import os
import re
import shutil
import time
import warnings
from pathlib import Path
from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple, Union
from .integrations import (
default_hp_search_backend,
get_reporting_integration_callbacks,
hp_params,
is_fairscale_available,
is_optuna_available,
is_ray_tune_available,
run_hp_search_optuna,
run_hp_search_ray,
init_deepspeed,
)
import numpy as np
import torch
from packaging import version
from torch import nn
from torch.utils.data.dataloader import DataLoader
from torch.utils.data.dataset import Dataset
from torch.utils.data.distributed import DistributedSampler
from torch.utils.data.sampler import RandomSampler, SequentialSampler
from .data.data_collator import DataCollator, DataCollatorWithPadding, default_data_collator
from .file_utils import (
WEIGHTS_NAME,
is_apex_available,
is_datasets_available,
is_in_notebook,
is_sagemaker_distributed_available,
is_torch_tpu_available,
)
from .modeling_utils import PreTrainedModel, unwrap_model
from .optimization import Adafactor, AdamW, get_scheduler
from .tokenization_utils_base import PreTrainedTokenizerBase
from .trainer_callback import (
CallbackHandler,
DefaultFlowCallback,
PrinterCallback,
ProgressCallback,
TrainerCallback,
TrainerControl,
TrainerState,
)
from .trainer_pt_utils import (
DistributedLengthGroupedSampler,
DistributedTensorGatherer,
LabelSmoother,
LengthGroupedSampler,
SequentialDistributedSampler,
distributed_broadcast_scalars,
distributed_concat,
nested_concat,
nested_detach,
nested_numpify,
nested_xla_mesh_reduce,
reissue_pt_warnings,
)
from .trainer_utils import (
PREFIX_CHECKPOINT_DIR,
BestRun,
EvalPrediction,
HPSearchBackend,
PredictionOutput,
ShardedDDPOption,
TrainerMemoryTracker,
TrainOutput,
default_compute_objective,
default_hp_space,
get_last_checkpoint,
set_seed,
speed_metrics,
)
from .training_args import ParallelMode, TrainingArguments
from .utils import logging
from .utils.modeling_auto_mapping import MODEL_FOR_QUESTION_ANSWERING_MAPPING_NAMES
_is_native_amp_available = False
DEFAULT_CALLBACKS = [DefaultFlowCallback]
DEFAULT_PROGRESS_CALLBACK = ProgressCallback
if is_in_notebook():
from .utils.notebook import NotebookProgressCallback
DEFAULT_PROGRESS_CALLBACK = NotebookProgressCallback
if is_apex_available():
from apex import amp
if version.parse(torch.__version__) >= version.parse("1.6"):
_is_native_amp_available = True
from torch.cuda.amp import autocast
if is_datasets_available():
import datasets
if is_torch_tpu_available():
import torch_xla.core.xla_model as xm
import torch_xla.debug.metrics as met
import torch_xla.distributed.parallel_loader as pl
if is_fairscale_available():
import fairscale
from fairscale.nn.data_parallel import ShardedDataParallel as ShardedDDP
from fairscale.optim import OSS
from fairscale.optim.grad_scaler import ShardedGradScaler
if version.parse(fairscale.__version__) >= version.parse("0.3"):
from fairscale.nn.data_parallel import FullyShardedDataParallel as FullyShardedDDP
else:
FullyShardedDDP = None
if is_sagemaker_distributed_available():
import smdistributed.dataparallel.torch.distributed as dist
from smdistributed.dataparallel.torch.parallel.distributed import DistributedDataParallel as DDP
else:
import torch.distributed as dist
if TYPE_CHECKING:
import optuna
logger = logging.get_logger(__name__)
class Trainer:
from .trainer_pt_utils import _get_learning_rate, log_metrics, metrics_format, save_metrics, save_state
def __init__(
self,
model: Union[PreTrainedModel, torch.nn.Module] = None,
args: TrainingArguments = None,
data_collator: Optional[DataCollator] = None,
train_dataset: Optional[Dataset] = None,
eval_dataset: Optional[Dataset] = None,
tokenizer: Optional["PreTrainedTokenizerBase"] = None,
model_init: Callable[[], PreTrainedModel] = None,
compute_metrics: Optional[Callable[[EvalPrediction], Dict]] = None,
callbacks: Optional[List[TrainerCallback]] = None,
optimizers: Tuple[torch.optim.Optimizer, torch.optim.lr_scheduler.LambdaLR] = (None, None),
):
if args is None:
output_dir = "tmp_trainer"
logger.info(f"No `TrainingArguments` passed, using `output_dir={output_dir}`.")
args = TrainingArguments(output_dir=output_dir)
self.args = args
set_seed(self.args.seed)
self.hp_name = None
self.deepspeed = None
self.is_in_train = False
self._memory_tracker = TrainerMemoryTracker(self.args.skip_memory_metrics)
self._memory_tracker.start()
args._setup_devices
if model is None:
if model_init is not None:
self.model_init = model_init
model = self.call_model_init()
else:
raise RuntimeError("`Trainer` requires either a `model` or `model_init` argument")
else:
if model_init is not None:
warnings.warn(
"`Trainer` requires either a `model` or `model_init` argument, but not both. "
"`model_init` will overwrite your model when calling the `train` method. This will become a fatal error in the next release.",
FutureWarning,
)
self.model_init = model_init
if hasattr(model, "is_parallelizable") and model.is_parallelizable and model.model_parallel:
self.is_model_parallel = True
else:
self.is_model_parallel = False
self.sharded_ddp = None
if len(args.sharded_ddp) > 0:
if args.deepspeed:
raise ValueError(
"Using --sharded_ddp xxx together with --deepspeed is not possible, deactivate one of those flags."
)
if args.local_rank == -1:
raise ValueError("Using sharded DDP only works in distributed training.")
elif not is_fairscale_available():
raise ImportError("Sharded DDP training requires fairscale: `pip install fairscale`.")
elif ShardedDDPOption.SIMPLE not in args.sharded_ddp and FullyShardedDDP is None:
raise ImportError(
"Sharded DDP in a mode other than simple training requires fairscale version >= 0.3, found "
f"{fairscale.__version__}. Upgrade your fairscale library: `pip install --upgrade fairscale`."
)
elif ShardedDDPOption.SIMPLE in args.sharded_ddp:
self.sharded_ddp = ShardedDDPOption.SIMPLE
elif ShardedDDPOption.ZERO_DP_2 in args.sharded_ddp:
self.sharded_ddp = ShardedDDPOption.ZERO_DP_2
elif ShardedDDPOption.ZERO_DP_3 in args.sharded_ddp:
self.sharded_ddp = ShardedDDPOption.ZERO_DP_3
self.place_model_on_device = args.place_model_on_device
if (
self.is_model_parallel
or (args.deepspeed and args.do_train)
or (args.fp16_full_eval and not args.do_train)
or (self.sharded_ddp in [ShardedDDPOption.ZERO_DP_2, ShardedDDPOption.ZERO_DP_3])
):
self.place_model_on_device = False
default_collator = default_data_collator if tokenizer is None else DataCollatorWithPadding(tokenizer)
self.data_collator = data_collator if data_collator is not None else default_collator
self.train_dataset = train_dataset
self.eval_dataset = eval_dataset
self.tokenizer = tokenizer
# and we only use deepspeed for training at the moment
if self.place_model_on_device:
model = model.to(args.device)
# Force n_gpu to 1 to avoid DataParallel as MP will manage the GPUs
if self.is_model_parallel:
self.args._n_gpu = 1
# later use `self.model is self.model_wrapped` to check if it's wrapped or not
self.model_wrapped = model
self.model = model
self.compute_metrics = compute_metrics
self.optimizer, self.lr_scheduler = optimizers
if model_init is not None and (self.optimizer is not None or self.lr_scheduler is not None):
raise RuntimeError(
"Passing a `model_init` is incompatible with providing the `optimizers` argument."
"You should subclass `Trainer` and override the `create_optimizer_and_scheduler` method."
)
default_callbacks = DEFAULT_CALLBACKS + get_reporting_integration_callbacks(self.args.report_to)
callbacks = default_callbacks if callbacks is None else default_callbacks + callbacks
self.callback_handler = CallbackHandler(
callbacks, self.model, self.tokenizer, self.optimizer, self.lr_scheduler
)
self.add_callback(PrinterCallback if self.args.disable_tqdm else DEFAULT_PROGRESS_CALLBACK)
self._loggers_initialized = False
if self.is_world_process_zero():
os.makedirs(self.args.output_dir, exist_ok=True)
if not callable(self.data_collator) and callable(getattr(self.data_collator, "collate_batch", None)):
raise ValueError("The `data_collator` should be a simple callable (function, class with `__call__`).")
if args.max_steps > 0:
logger.info("max_steps is given, it will override any value given in num_train_epochs")
if train_dataset is not None and not isinstance(train_dataset, collections.abc.Sized) and args.max_steps <= 0:
raise ValueError("train_dataset does not implement __len__, max_steps has to be specified")
if eval_dataset is not None and not isinstance(eval_dataset, collections.abc.Sized):
raise ValueError("eval_dataset must implement __len__")
self._signature_columns = None
if is_datasets_available():
if isinstance(train_dataset, datasets.Dataset):
self._remove_unused_columns(self.train_dataset, description="training")
if isinstance(eval_dataset, datasets.Dataset):
self._remove_unused_columns(self.eval_dataset, description="evaluation")
self.use_apex = False
self.use_amp = False
self.fp16_backend = None
if args.fp16:
if args.fp16_backend == "auto":
self.fp16_backend = "amp" if _is_native_amp_available else "apex"
else:
self.fp16_backend = args.fp16_backend
logger.info(f"Using {self.fp16_backend} fp16 backend")
if args.fp16 and not args.deepspeed:
if self.fp16_backend == "amp":
self.use_amp = True
self.scaler = ShardedGradScaler() if self.sharded_ddp is not None else torch.cuda.amp.GradScaler()
else:
if not is_apex_available():
raise ImportError(
"Using FP16 with APEX but APEX is not installed, please refer to https://www.github.com/nvidia/apex."
)
self.use_apex = True
if self.args.label_smoothing_factor != 0:
self.label_smoother = LabelSmoother(epsilon=self.args.label_smoothing_factor)
else:
self.label_smoother = None
self.state = TrainerState()
self.control = TrainerControl()
self._total_flos = None
self.hp_search_backend = None
self.use_tune_checkpoints = False
default_label_names = (
["start_positions", "end_positions"]
if type(self.model).__name__ in MODEL_FOR_QUESTION_ANSWERING_MAPPING_NAMES.values()
else ["labels"]
)
self.label_names = default_label_names if self.args.label_names is None else self.args.label_names
self.control = self.callback_handler.on_init_end(self.args, self.state, self.control)
self._memory_tracker.stop_and_update_metrics()
def add_callback(self, callback):
self.callback_handler.add_callback(callback)
def pop_callback(self, callback):
return self.callback_handler.pop_callback(callback)
def remove_callback(self, callback):
self.callback_handler.remove_callback(callback)
def _remove_unused_columns(self, dataset: "datasets.Dataset", description: Optional[str] = None):
if not self.args.remove_unused_columns:
return
if self._signature_columns is None:
signature = inspect.signature(self.model.forward)
self._signature_columns = list(signature.parameters.keys())
self._signature_columns += ["label", "label_ids"]
columns = [k for k in self._signature_columns if k in dataset.column_names]
ignored_columns = list(set(dataset.column_names) - set(self._signature_columns))
if len(ignored_columns) > 0:
dset_description = "" if description is None else f"in the {description} set "
logger.info(
f"The following columns {dset_description} don't have a corresponding argument in "
f"`{self.model.__class__.__name__}.forward` and have been ignored: {', '.join(ignored_columns)}."
)
dataset.set_format(type=dataset.format["type"], columns=columns, format_kwargs=dataset.format["format_kwargs"])
def _get_train_sampler(self) -> Optional[torch.utils.data.sampler.Sampler]:
if isinstance(self.train_dataset, torch.utils.data.IterableDataset) or not isinstance(
self.train_dataset, collections.abc.Sized
):
return None
# Gather the number of processes and this process index.
if self.args.parallel_mode == ParallelMode.TPU:
num_processes = xm.xrt_world_size()
process_index = xm.get_ordinal()
elif (
self.args.parallel_mode == ParallelMode.DISTRIBUTED
or self.args.parallel_mode == ParallelMode.SAGEMAKER_DISTRIBUTED
):
num_processes = dist.get_world_size()
process_index = dist.get_rank()
else:
num_processes = 1
process_index = 0
# Build the sampler.
if self.args.group_by_length:
model_input_name = self.tokenizer.model_input_names[0] if self.tokenizer is not None else None
if num_processes <= 1:
return LengthGroupedSampler(
self.train_dataset, self.args.train_batch_size, model_input_name=model_input_name
)
else:
return DistributedLengthGroupedSampler(
self.train_dataset,
self.args.train_batch_size,
num_replicas=num_processes,
rank=process_index,
model_input_name=model_input_name,
)
else:
if num_processes <= 1:
return RandomSampler(self.train_dataset)
else:
return DistributedSampler(self.train_dataset, num_replicas=num_processes, rank=process_index)
def get_train_dataloader(self) -> DataLoader:
if self.train_dataset is None:
raise ValueError("Trainer: training requires a train_dataset.")
train_sampler = self._get_train_sampler()
return DataLoader(
self.train_dataset,
batch_size=self.args.train_batch_size,
sampler=train_sampler,
collate_fn=self.data_collator,
drop_last=self.args.dataloader_drop_last,
num_workers=self.args.dataloader_num_workers,
pin_memory=self.args.dataloader_pin_memory,
)
def _get_eval_sampler(self, eval_dataset: Dataset) -> Optional[torch.utils.data.sampler.Sampler]:
if is_torch_tpu_available():
return SequentialDistributedSampler(eval_dataset, num_replicas=xm.xrt_world_size(), rank=xm.get_ordinal())
elif self.args.local_rank != -1:
return SequentialDistributedSampler(eval_dataset)
else:
return SequentialSampler(eval_dataset)
def get_eval_dataloader(self, eval_dataset: Optional[Dataset] = None) -> DataLoader:
if eval_dataset is None and self.eval_dataset is None:
raise ValueError("Trainer: evaluation requires an eval_dataset.")
elif eval_dataset is not None and not isinstance(eval_dataset, collections.abc.Sized):
raise ValueError("eval_dataset must implement __len__")
elif is_datasets_available() and isinstance(eval_dataset, datasets.Dataset):
self._remove_unused_columns(eval_dataset, description="evaluation")
eval_dataset = eval_dataset if eval_dataset is not None else self.eval_dataset
eval_sampler = self._get_eval_sampler(eval_dataset)
return DataLoader(
eval_dataset,
sampler=eval_sampler,
batch_size=self.args.eval_batch_size,
collate_fn=self.data_collator,
drop_last=self.args.dataloader_drop_last,
num_workers=self.args.dataloader_num_workers,
pin_memory=self.args.dataloader_pin_memory,
)
def get_test_dataloader(self, test_dataset: Dataset) -> DataLoader:
if not isinstance(test_dataset, collections.abc.Sized):
raise ValueError("test_dataset must implement __len__")
elif is_datasets_available() and isinstance(test_dataset, datasets.Dataset):
self._remove_unused_columns(test_dataset, description="test")
test_sampler = self._get_eval_sampler(test_dataset)
# We use the same batch_size as for eval.
return DataLoader(
test_dataset,
sampler=test_sampler,
batch_size=self.args.eval_batch_size,
collate_fn=self.data_collator,
drop_last=self.args.dataloader_drop_last,
pin_memory=self.args.dataloader_pin_memory,
)
def create_optimizer_and_scheduler(self, num_training_steps: int):
if self.optimizer is None:
no_decay = ["bias", "LayerNorm.weight"]
optimizer_grouped_parameters = [
{
"params": [p for n, p in self.model.named_parameters() if not any(nd in n for nd in no_decay)],
"weight_decay": self.args.weight_decay,
},
{
"params": [p for n, p in self.model.named_parameters() if any(nd in n for nd in no_decay)],
"weight_decay": 0.0,
},
]
optimizer_cls = Adafactor if self.args.adafactor else AdamW
if self.args.adafactor:
optimizer_cls = Adafactor
optimizer_kwargs = {"scale_parameter": False, "relative_step": False}
else:
optimizer_cls = AdamW
optimizer_kwargs = {
"betas": (self.args.adam_beta1, self.args.adam_beta2),
"eps": self.args.adam_epsilon,
}
optimizer_kwargs["lr"] = self.args.learning_rate
if self.sharded_ddp == ShardedDDPOption.SIMPLE:
self.optimizer = OSS(
params=optimizer_grouped_parameters,
optim=optimizer_cls,
**optimizer_kwargs,
)
else:
self.optimizer = optimizer_cls(optimizer_grouped_parameters, **optimizer_kwargs)
if self.lr_scheduler is None:
warmup_steps = (
self.args.warmup_steps
if self.args.warmup_steps > 0
else math.ceil(num_training_steps * self.args.warmup_ratio)
)
self.lr_scheduler = get_scheduler(
self.args.lr_scheduler_type,
self.optimizer,
num_warmup_steps=warmup_steps,
num_training_steps=num_training_steps,
)
def num_examples(self, dataloader: DataLoader) -> int:
return len(dataloader.dataset)
def _hp_search_setup(self, trial: Union["optuna.Trial", Dict[str, Any]]):
self._trial = trial
if self.hp_search_backend is None or trial is None:
return
params = self.hp_space(trial) if self.hp_search_backend == HPSearchBackend.OPTUNA else trial
for key, value in params.items():
if not hasattr(self.args, key):
raise AttributeError(
f"Trying to set {key} in the hyperparameter search but there is no corresponding field in `TrainingArguments`."
)
old_attr = getattr(self.args, key, None)
# Casting value to the proper type
if old_attr is not None:
value = type(old_attr)(value)
setattr(self.args, key, value)
if self.hp_search_backend == HPSearchBackend.OPTUNA:
logger.info("Trial:", trial.params)
def _report_to_hp_search(
self, trial: Union["optuna.Trial", Dict[str, Any]], epoch: int, metrics: Dict[str, float]
):
if self.hp_search_backend is None or trial is None:
return
self.objective = self.compute_objective(metrics.copy())
if self.hp_search_backend == HPSearchBackend.OPTUNA:
import optuna
trial.report(self.objective, epoch)
if trial.should_prune():
raise optuna.TrialPruned()
elif self.hp_search_backend == HPSearchBackend.RAY:
from ray import tune
if self.control.should_save:
self._tune_save_checkpoint()
tune.report(objective=self.objective, **metrics)
def _tune_save_checkpoint(self):
from ray import tune
if not self.use_tune_checkpoints:
return
with tune.checkpoint_dir(step=self.state.global_step) as checkpoint_dir:
output_dir = os.path.join(checkpoint_dir, f"{PREFIX_CHECKPOINT_DIR}-{self.state.global_step}")
self.save_model(output_dir)
if self.is_world_process_zero():
self.state.save_to_json(os.path.join(output_dir, "trainer_state.json"))
torch.save(self.optimizer.state_dict(), os.path.join(output_dir, "optimizer.pt"))
torch.save(self.lr_scheduler.state_dict(), os.path.join(output_dir, "scheduler.pt"))
def call_model_init(self, trial=None):
model_init_argcount = len(inspect.signature(self.model_init).parameters)
if model_init_argcount == 0:
model = self.model_init()
elif model_init_argcount == 1:
model = self.model_init(trial)
else:
raise RuntimeError("model_init should have 0 or 1 argument.")
if model is None:
raise RuntimeError("model_init should not return None.")
return model
def _wrap_model(self, model, training=True):
# already initialized its own DDP and AMP
if self.deepspeed:
return self.deepspeed
# Mixed precision training with apex (torch < 1.6)
if self.use_apex and training:
model, self.optimizer = amp.initialize(model, self.optimizer, opt_level=self.args.fp16_opt_level)
# Multi-gpu training (should be after apex fp16 initialization)
if self.args.n_gpu > 1:
model = torch.nn.DataParallel(model)
# Note: in torch.distributed mode, there's no point in wrapping the model
if not training:
return model
# Distributed training (should be after apex fp16 initialization)
if self.sharded_ddp is not None:
# Sharded DDP!
if self.sharded_ddp == ShardedDDPOption.SIMPLE:
model = ShardedDDP(model, self.optimizer)
else:
mixed_precision = self.args.fp16
cpu_offload = ShardedDDPOption.OFFLOAD in self.args.sharded_ddp
zero_3 = self.sharded_ddp == ShardedDDPOption.ZERO_DP_3
# XXX: Breaking the self.model convention but I see no way around it for now.
self.model = model = FullyShardedDDP(
model, mixed_precision=mixed_precision, reshard_after_forward=zero_3, cpu_offload=cpu_offload
).to(self.args.device)
elif is_sagemaker_distributed_available():
model = DDP(model, device_ids=[dist.get_local_rank()], broadcast_buffers=False)
elif self.args.local_rank != -1:
if self.args.ddp_find_unused_parameters is not None:
find_unused_parameters = self.args.ddp_find_unused_parameters
elif isinstance(model, PreTrainedModel):
# find_unused_parameters breaks checkpointing as per
# https://github.com/huggingface/transformers/pull/4659#issuecomment-643356021
find_unused_parameters = not getattr(model.config, "gradient_checkpointing", False)
else:
find_unused_parameters = True
model = torch.nn.parallel.DistributedDataParallel(
model,
device_ids=[self.args.local_rank],
output_device=self.args.local_rank,
find_unused_parameters=find_unused_parameters,
)
return model
def train(
self,
resume_from_checkpoint: Optional[Union[str, bool]] = None,
trial: Union["optuna.Trial", Dict[str, Any]] = None,
**kwargs,
):
# memory metrics - must set up as early as possible
self._memory_tracker.start()
self.is_in_train = True
if "model_path" in kwargs:
resume_from_checkpoint = kwargs.pop("model_path")
warnings.warn(
"`model_path` is deprecated and will be removed in a future version. Use `resume_from_checkpoint` "
"instead.",
FutureWarning,
)
if len(kwargs) > 0:
raise TypeError(f"train() received got unexpected keyword arguments: {', '.join(list(kwargs.keys()))}.")
# This might change the seed so needs to run first.
self._hp_search_setup(trial)
# Model re-init
model_reloaded = False
if self.model_init is not None:
# Seed must be set before instantiating the model when using model_init.
set_seed(self.args.seed)
self.model = self.call_model_init(trial)
model_reloaded = True
# Reinitializes optimizer and scheduler
self.optimizer, self.lr_scheduler = None, None
# Load potential model checkpoint
if isinstance(resume_from_checkpoint, bool) and resume_from_checkpoint:
resume_from_checkpoint = get_last_checkpoint(self.args.output_dir)
if resume_from_checkpoint is None:
raise ValueError(f"No valid checkpoint found in output directory ({self.args.output_dir})")
if resume_from_checkpoint is not None and os.path.isfile(os.path.join(resume_from_checkpoint, WEIGHTS_NAME)):
logger.info(f"Loading model from {resume_from_checkpoint}).")
if isinstance(self.model, PreTrainedModel):
self.model = self.model.from_pretrained(resume_from_checkpoint)
model_reloaded = True
else:
state_dict = torch.load(os.path.join(resume_from_checkpoint, WEIGHTS_NAME))
self.model.load_state_dict(state_dict)
# If model was re-initialized, put it on the right device and update self.model_wrapped
if model_reloaded:
if self.place_model_on_device:
self.model = self.model.to(self.args.device)
self.model_wrapped = self.model
# Keeping track whether we can can len() on the dataset or not
train_dataset_is_sized = isinstance(self.train_dataset, collections.abc.Sized)
# Data loader and number of training steps
train_dataloader = self.get_train_dataloader()
# Setting up training control variables:
# number of training epochs: num_train_epochs
# number of training steps per epoch: num_update_steps_per_epoch
# total number of training steps to execute: max_steps
if train_dataset_is_sized:
num_update_steps_per_epoch = len(train_dataloader) // self.args.gradient_accumulation_steps
num_update_steps_per_epoch = max(num_update_steps_per_epoch, 1)
if self.args.max_steps > 0:
max_steps = self.args.max_steps
num_train_epochs = self.args.max_steps // num_update_steps_per_epoch + int(
self.args.max_steps % num_update_steps_per_epoch > 0
)
else:
max_steps = math.ceil(self.args.num_train_epochs * num_update_steps_per_epoch)
num_train_epochs = math.ceil(self.args.num_train_epochs)
else:
# see __init__. max_steps is set when the dataset has no __len__
max_steps = self.args.max_steps
num_train_epochs = 1
num_update_steps_per_epoch = max_steps
delay_optimizer_creation = self.sharded_ddp is not None and self.sharded_ddp != ShardedDDPOption.SIMPLE
if self.args.deepspeed:
model, optimizer, lr_scheduler = init_deepspeed(self, num_training_steps=max_steps)
self.model = model.module
self.model_wrapped = model # will get further wrapped in DDP
self.deepspeed = model # DeepSpeedEngine object
self.optimizer = optimizer
self.lr_scheduler = lr_scheduler
elif not delay_optimizer_creation:
self.create_optimizer_and_scheduler(num_training_steps=max_steps)
self.state = TrainerState()
self.state.is_hyper_param_search = trial is not None
# Check if saved optimizer or scheduler states exist
self._load_optimizer_and_scheduler(resume_from_checkpoint)
model = self._wrap_model(self.model_wrapped)
# for the rest of this function `model` is the outside model, whether it was wrapped or not
if model is not self.model:
self.model_wrapped = model
if delay_optimizer_creation:
self.create_optimizer_and_scheduler(num_training_steps=max_steps)
# important: at this point:
# self.model is the Transformers Model
# self.model_wrapped is DDP(Transformers Model), Deepspeed(Transformers Model), etc.
# Train!
if is_torch_tpu_available():
world_size = xm.xrt_world_size()
elif self.args.local_rank != -1:
world_size = dist.get_world_size()
else:
world_size = 1
total_train_batch_size = self.args.train_batch_size * self.args.gradient_accumulation_steps * world_size
num_examples = (
self.num_examples(train_dataloader)
if train_dataset_is_sized
else total_train_batch_size * self.args.max_steps
)
logger.info("***** Running training *****")
logger.info(f" Num examples = {num_examples}")
logger.info(f" Num Epochs = {num_train_epochs}")
logger.info(f" Instantaneous batch size per device = {self.args.per_device_train_batch_size}")
logger.info(f" Total train batch size (w. parallel, distributed & accumulation) = {total_train_batch_size}")
logger.info(f" Gradient Accumulation steps = {self.args.gradient_accumulation_steps}")
logger.info(f" Total optimization steps = {max_steps}")
self.state.epoch = 0
start_time = time.time()
epochs_trained = 0
steps_trained_in_current_epoch = 0
# Check if continuing training from a checkpoint
if resume_from_checkpoint is not None and os.path.isfile(
os.path.join(resume_from_checkpoint, "trainer_state.json")
):
self.state = TrainerState.load_from_json(os.path.join(resume_from_checkpoint, "trainer_state.json"))
epochs_trained = self.state.global_step // num_update_steps_per_epoch
if not self.args.ignore_data_skip:
steps_trained_in_current_epoch = self.state.global_step % (num_update_steps_per_epoch)
steps_trained_in_current_epoch *= self.args.gradient_accumulation_steps
else:
steps_trained_in_current_epoch = 0
logger.info(" Continuing training from checkpoint, will skip to saved global_step")
logger.info(f" Continuing training from epoch {epochs_trained}")
logger.info(f" Continuing training from global step {self.state.global_step}")
if not self.args.ignore_data_skip:
logger.info(
f" Will skip the first {epochs_trained} epochs then the first {steps_trained_in_current_epoch} "
"batches in the first epoch."
)
# Update the references
self.callback_handler.model = self.model
self.callback_handler.optimizer = self.optimizer
self.callback_handler.lr_scheduler = self.lr_scheduler
self.callback_handler.train_dataloader = train_dataloader
self.state.trial_name = self.hp_name(trial) if self.hp_name is not None else None
self.state.trial_params = hp_params(trial) if trial is not None else None
# This should be the same if the state has been saved but in case the training arguments changed, it's safer
self.state.max_steps = max_steps
self.state.num_train_epochs = num_train_epochs
self.state.is_local_process_zero = self.is_local_process_zero()
self.state.is_world_process_zero = self.is_world_process_zero()
tr_loss = torch.tensor(0.0).to(self.args.device)
self._total_loss_scalar = 0.0
self._globalstep_last_logged = self.state.global_step
self._total_flos = self.state.total_flos
model.zero_grad()
self.control = self.callback_handler.on_train_begin(self.args, self.state, self.control)
if not self.args.ignore_data_skip:
for epoch in range(epochs_trained):
for _ in train_dataloader:
break
for epoch in range(epochs_trained, num_train_epochs):
if isinstance(train_dataloader, DataLoader) and isinstance(train_dataloader.sampler, DistributedSampler):
train_dataloader.sampler.set_epoch(epoch)
if is_torch_tpu_available():
parallel_loader = pl.ParallelLoader(train_dataloader, [self.args.device]).per_device_loader(
self.args.device
)
epoch_iterator = parallel_loader
else:
epoch_iterator = train_dataloader
if self.args.past_index >= 0:
self._past = None
steps_in_epoch = (
len(epoch_iterator)
if train_dataset_is_sized
else self.args.max_steps * self.args.gradient_accumulation_steps
)
self.control = self.callback_handler.on_epoch_begin(self.args, self.state, self.control)
for step, inputs in enumerate(epoch_iterator):
if steps_trained_in_current_epoch > 0:
steps_trained_in_current_epoch -= 1
continue
if (step + 1) % self.args.gradient_accumulation_steps == 0:
self.control = self.callback_handler.on_step_begin(self.args, self.state, self.control)
if (
((step + 1) % self.args.gradient_accumulation_steps != 0)
and self.args.local_rank != -1
and self.args._no_sync_in_gradient_accumulation
):
with model.no_sync():
tr_loss += self.training_step(model, inputs)
else:
tr_loss += self.training_step(model, inputs)
self._total_flos += float(self.floating_point_ops(inputs))
if self.deepspeed:
self.deepspeed.step()
if (step + 1) % self.args.gradient_accumulation_steps == 0 or (
steps_in_epoch <= self.args.gradient_accumulation_steps
and (step + 1) == steps_in_epoch
):
if self.args.max_grad_norm is not None and self.args.max_grad_norm > 0 and not self.deepspeed:
if self.use_amp:
self.scaler.unscale_(self.optimizer)
if hasattr(self.optimizer, "clip_grad_norm"):
self.optimizer.clip_grad_norm(self.args.max_grad_norm)
elif hasattr(model, "clip_grad_norm_"):
model.clip_grad_norm_(self.args.max_grad_norm)
else:
torch.nn.utils.clip_grad_norm_(
amp.master_params(self.optimizer) if self.use_apex else model.parameters(),
self.args.max_grad_norm,
)
if self.deepspeed:
pass
elif is_torch_tpu_available():
xm.optimizer_step(self.optimizer)
elif self.use_amp:
self.scaler.step(self.optimizer)
self.scaler.update()
else:
self.optimizer.step()
if not self.deepspeed:
self.lr_scheduler.step()
model.zero_grad()
self.state.global_step += 1
self.state.epoch = epoch + (step + 1) / steps_in_epoch
self.control = self.callback_handler.on_step_end(self.args, self.state, self.control)
self._maybe_log_save_evaluate(tr_loss, model, trial, epoch)
if self.control.should_epoch_stop or self.control.should_training_stop:
break
self.control = self.callback_handler.on_epoch_end(self.args, self.state, self.control)
self._maybe_log_save_evaluate(tr_loss, model, trial, epoch)
if self.args.tpu_metrics_debug or self.args.debug:
if is_torch_tpu_available():
xm.master_print(met.metrics_report())
else:
logger.warning(
"You enabled PyTorch/XLA debug metrics but you don't have a TPU "
"configured. Check your training configuration if this is unexpected."
)
if self.control.should_training_stop:
break
if self.args.past_index and hasattr(self, "_past"):
# Clean the state at the end of training
delattr(self, "_past")
logger.info("\n\nTraining completed. Do not forget to share your model on huggingface.co/models =)\n\n")
if self.args.load_best_model_at_end and self.state.best_model_checkpoint is not None:
logger.info(
f"Loading best model from {self.state.best_model_checkpoint} (score: {self.state.best_metric})."
)
if isinstance(self.model, PreTrainedModel):
self.model = self.model.from_pretrained(self.state.best_model_checkpoint)
if self.place_model_on_device:
self.model = self.model.to(self.args.device)
else:
state_dict = torch.load(os.path.join(self.state.best_model_checkpoint, WEIGHTS_NAME))
self.model.load_state_dict(state_dict)
if self.deepspeed:
self.deepspeed.load_checkpoint(
self.state.best_model_checkpoint, load_optimizer_states=False, load_lr_scheduler_states=False
)
metrics = speed_metrics("train", start_time, self.state.max_steps)
if self._total_flos is not None:
self.store_flos()
metrics["total_flos"] = self.state.total_flos
self.log(metrics)
self.control = self.callback_handler.on_train_end(self.args, self.state, self.control)
# add remaining tr_loss
self._total_loss_scalar += tr_loss.item()
if self.deepspeed:
# free up any memory that might be useful for eval
self.deepspeed = None
self.optimizer = None
self.lr_scheduler = None
self.model_wrapped = self.model
gc.collect() # force memory release
# to restore normal behavior outside of train replay the place_model_on_device logic w/o deepspeed
self.place_model_on_device = self.args.place_model_on_device
if self.is_model_parallel:
self.place_model_on_device = False
self.is_in_train = False
self._memory_tracker.stop_and_update_metrics(metrics)
return TrainOutput(self.state.global_step, self._total_loss_scalar / self.state.global_step, metrics)
def _maybe_log_save_evaluate(self, tr_loss, model, trial, epoch):
if self.control.should_log:
logs: Dict[str, float] = {}
tr_loss_scalar = tr_loss.item()
# reset tr_loss to zero
tr_loss -= tr_loss
logs["loss"] = round(tr_loss_scalar / (self.state.global_step - self._globalstep_last_logged), 4)
logs["learning_rate"] = self._get_learning_rate()
self._total_loss_scalar += tr_loss_scalar
self._globalstep_last_logged = self.state.global_step
self.log(logs)
metrics = None
if self.control.should_evaluate:
metrics = self.evaluate()
self._report_to_hp_search(trial, epoch, metrics)
if self.control.should_save:
self._save_checkpoint(model, trial, metrics=metrics)
self.control = self.callback_handler.on_save(self.args, self.state, self.control)
def _save_checkpoint(self, model, trial, metrics=None):
# In all cases, including ddp/dp/deepspeed, self.model is always a reference to the model we
# want to save except FullyShardedDDP.
# assert unwrap_model(model) is self.model, "internal model should be a reference to self.model"
# Save model checkpoint
checkpoint_folder = f"{PREFIX_CHECKPOINT_DIR}-{self.state.global_step}"
if self.hp_search_backend is not None and trial is not None:
if self.hp_search_backend == HPSearchBackend.OPTUNA:
run_id = trial.number
else:
from ray import tune
run_id = tune.get_trial_id()
run_name = self.hp_name(trial) if self.hp_name is not None else f"run-{run_id}"
run_dir = os.path.join(self.args.output_dir, run_name)
else:
run_dir = self.args.output_dir
self.store_flos()
output_dir = os.path.join(run_dir, checkpoint_folder)
self.save_model(output_dir)
if self.deepspeed:
self.deepspeed.save_checkpoint(output_dir)
# Save optimizer and scheduler
if self.sharded_ddp == ShardedDDPOption.SIMPLE:
self.optimizer.consolidate_state_dict()
if is_torch_tpu_available():
xm.rendezvous("saving_optimizer_states")
xm.save(self.optimizer.state_dict(), os.path.join(output_dir, "optimizer.pt"))
with warnings.catch_warnings(record=True) as caught_warnings:
xm.save(self.lr_scheduler.state_dict(), os.path.join(output_dir, "scheduler.pt"))
reissue_pt_warnings(caught_warnings)
elif self.is_world_process_zero() and not self.deepspeed:
# deepspeed.save_checkpoint above saves model/optim/sched
torch.save(self.optimizer.state_dict(), os.path.join(output_dir, "optimizer.pt"))
with warnings.catch_warnings(record=True) as caught_warnings:
torch.save(self.lr_scheduler.state_dict(), os.path.join(output_dir, "scheduler.pt"))
reissue_pt_warnings(caught_warnings)
# Determine the new best metric / best model checkpoint
if metrics is not None and self.args.metric_for_best_model is not None:
metric_to_check = self.args.metric_for_best_model
if not metric_to_check.startswith("eval_"):
metric_to_check = f"eval_{metric_to_check}"
metric_value = metrics[metric_to_check]
operator = np.greater if self.args.greater_is_better else np.less
if (
self.state.best_metric is None
or self.state.best_model_checkpoint is None
or operator(metric_value, self.state.best_metric)
):
self.state.best_metric = metric_value
self.state.best_model_checkpoint = output_dir
# Save the Trainer state
if self.is_world_process_zero():
self.state.save_to_json(os.path.join(output_dir, "trainer_state.json"))
# Maybe delete some older checkpoints.
if self.is_world_process_zero():
self._rotate_checkpoints(use_mtime=True, output_dir=run_dir)
def _load_optimizer_and_scheduler(self, checkpoint):
if checkpoint is None:
return
if os.path.isfile(os.path.join(checkpoint, "optimizer.pt")) and os.path.isfile(
os.path.join(checkpoint, "scheduler.pt")
):
# Load in optimizer and scheduler states
if is_torch_tpu_available():
# On TPU we have to take some extra precautions to properly load the states on the right device.
optimizer_state = torch.load(os.path.join(checkpoint, "optimizer.pt"), map_location="cpu")
with warnings.catch_warnings(record=True) as caught_warnings:
lr_scheduler_state = torch.load(os.path.join(checkpoint, "scheduler.pt"), map_location="cpu")
reissue_pt_warnings(caught_warnings)
xm.send_cpu_data_to_device(optimizer_state, self.args.device)
xm.send_cpu_data_to_device(lr_scheduler_state, self.args.device)
self.optimizer.load_state_dict(optimizer_state)
self.lr_scheduler.load_state_dict(lr_scheduler_state)
else:
self.optimizer.load_state_dict(
torch.load(os.path.join(checkpoint, "optimizer.pt"), map_location=self.args.device)
)
with warnings.catch_warnings(record=True) as caught_warnings:
self.lr_scheduler.load_state_dict(torch.load(os.path.join(checkpoint, "scheduler.pt")))
reissue_pt_warnings(caught_warnings)
if self.deepspeed:
# Not sure how to check if there is a saved deepspeed checkpoint, but since it just return None if it fails to find a deepspeed checkpoint this is sort of a check-n-load function
self.deepspeed.load_checkpoint(checkpoint, load_optimizer_states=True, load_lr_scheduler_states=True)
def hyperparameter_search(
self,
hp_space: Optional[Callable[["optuna.Trial"], Dict[str, float]]] = None,
compute_objective: Optional[Callable[[Dict[str, float]], float]] = None,
n_trials: int = 20,
direction: str = "minimize",
backend: Optional[Union["str", HPSearchBackend]] = None,
hp_name: Optional[Callable[["optuna.Trial"], str]] = None,
**kwargs,
) -> BestRun:
if backend is None:
backend = default_hp_search_backend()
if backend is None:
raise RuntimeError(
"At least one of optuna or ray should be installed. "
"To install optuna run `pip install optuna`."
"To install ray run `pip install ray[tune]`."
)
backend = HPSearchBackend(backend)
if backend == HPSearchBackend.OPTUNA and not is_optuna_available():
raise RuntimeError("You picked the optuna backend, but it is not installed. Use `pip install optuna`.")
if backend == HPSearchBackend.RAY and not is_ray_tune_available():
raise RuntimeError(
"You picked the Ray Tune backend, but it is not installed. Use `pip install 'ray[tune]'`."
)
self.hp_search_backend = backend
if self.model_init is None:
raise RuntimeError(
"To use hyperparameter search, you need to pass your model through a model_init function."
)
self.hp_space = default_hp_space[backend] if hp_space is None else hp_space
self.hp_name = hp_name
self.compute_objective = default_compute_objective if compute_objective is None else compute_objective
run_hp_search = run_hp_search_optuna if backend == HPSearchBackend.OPTUNA else run_hp_search_ray
best_run = run_hp_search(self, n_trials, direction, **kwargs)
self.hp_search_backend = None
return best_run
def log(self, logs: Dict[str, float]) -> None:
if self.state.epoch is not None:
logs["epoch"] = round(self.state.epoch, 2)
output = {**logs, **{"step": self.state.global_step}}
self.state.log_history.append(output)
self.control = self.callback_handler.on_log(self.args, self.state, self.control, logs)
def _prepare_inputs(self, inputs: Dict[str, Union[torch.Tensor, Any]]) -> Dict[str, Union[torch.Tensor, Any]]:
for k, v in inputs.items():
if isinstance(v, torch.Tensor):
inputs[k] = v.to(self.args.device)
if self.args.past_index >= 0 and self._past is not None:
inputs["mems"] = self._past
return inputs
def training_step(self, model: nn.Module, inputs: Dict[str, Union[torch.Tensor, Any]]) -> torch.Tensor:
model.train()
inputs = self._prepare_inputs(inputs)
if self.use_amp:
with autocast():
loss = self.compute_loss(model, inputs)
else:
loss = self.compute_loss(model, inputs)
if self.args.n_gpu > 1:
loss = loss.mean() # mean() to average on multi-gpu parallel training
if self.args.gradient_accumulation_steps > 1 and not self.deepspeed:
# deepspeed handles loss scaling by gradient_accumulation_steps in its `backward`
loss = loss / self.args.gradient_accumulation_steps
if self.use_amp:
self.scaler.scale(loss).backward()
elif self.use_apex:
with amp.scale_loss(loss, self.optimizer) as scaled_loss:
scaled_loss.backward()
elif self.deepspeed:
# loss gets scaled under gradient_accumulation_steps in deepspeed
loss = self.deepspeed.backward(loss)
else:
loss.backward()
return loss.detach()
def compute_loss(self, model, inputs, return_outputs=False):
if self.label_smoother is not None and "labels" in inputs:
labels = inputs.pop("labels")
else:
labels = None
outputs = model(**inputs)
# Save past state if it exists
# TODO: this needs to be fixed and made cleaner later.
if self.args.past_index >= 0:
self._past = outputs[self.args.past_index]
if labels is not None:
loss = self.label_smoother(outputs, labels)
else:
# We don't use .loss here since the model may return tuples instead of ModelOutput.
loss = outputs["loss"] if isinstance(outputs, dict) else outputs[0]
return (loss, outputs) if return_outputs else loss
def is_local_process_zero(self) -> bool:
if is_torch_tpu_available():
return xm.is_master_ordinal(local=True)
else:
return self.args.local_rank in [-1, 0]
def is_world_process_zero(self) -> bool:
if is_torch_tpu_available():
return xm.is_master_ordinal(local=False)
else:
return self.args.local_rank == -1 or dist.get_rank() == 0
def save_model(self, output_dir: Optional[str] = None):
if is_torch_tpu_available():
self._save_tpu(output_dir)
else:
if self.is_world_process_zero():
self._save(output_dir)
if self.args.local_rank != -1:
dist.barrier()
def _save_tpu(self, output_dir: Optional[str] = None):
output_dir = output_dir if output_dir is not None else self.args.output_dir
logger.info("Saving model checkpoint to %s", output_dir)
if xm.is_master_ordinal():
os.makedirs(output_dir, exist_ok=True)
torch.save(self.args, os.path.join(output_dir, "training_args.bin"))
xm.rendezvous("saving_checkpoint")
if not isinstance(self.model, PreTrainedModel):
if isinstance(unwrap_model(self.model), PreTrainedModel):
unwrap_model(self.model).save_pretrained(
output_dir,
save_config=self.is_world_process_zero(),
state_dict=self.model.state_dict(),
save_function=xm.save,
)
else:
logger.info("Trainer.model is not a `PreTrainedModel`, only saving its state dict.")
state_dict = self.model.state_dict()
xm.save(state_dict, os.path.join(output_dir, WEIGHTS_NAME))
else:
self.model.save_pretrained(output_dir, save_config=self.is_world_process_zero(), save_function=xm.save)
if self.tokenizer is not None and self.is_world_process_zero():
self.tokenizer.save_pretrained(output_dir)
def _save(self, output_dir: Optional[str] = None):
output_dir = output_dir if output_dir is not None else self.args.output_dir
os.makedirs(output_dir, exist_ok=True)
logger.info("Saving model checkpoint to %s", output_dir)
# Save a trained model and configuration using `save_pretrained()`.
# They can then be reloaded using `from_pretrained()`
if not isinstance(self.model, PreTrainedModel):
if isinstance(unwrap_model(self.model), PreTrainedModel):
unwrap_model(self.model).save_pretrained(output_dir, state_dict=self.model.state_dict())
else:
logger.info("Trainer.model is not a `PreTrainedModel`, only saving its state dict.")
state_dict = self.model.state_dict()
torch.save(state_dict, os.path.join(output_dir, WEIGHTS_NAME))
else:
self.model.save_pretrained(output_dir)
if self.tokenizer is not None:
self.tokenizer.save_pretrained(output_dir)
# Good practice: save your training arguments together with the trained model
torch.save(self.args, os.path.join(output_dir, "training_args.bin"))
def store_flos(self):
# Storing the number of floating-point operations that went into the model
if self._total_flos is not None:
if self.args.local_rank != -1:
self.state.total_flos = distributed_broadcast_scalars([self._total_flos]).sum().item()
else:
self.state.total_flos = self._total_flos
def _sorted_checkpoints(
self, output_dir=None, checkpoint_prefix=PREFIX_CHECKPOINT_DIR, use_mtime=False
) -> List[str]:
ordering_and_checkpoint_path = []
glob_checkpoints = [str(x) for x in Path(output_dir).glob(f"{checkpoint_prefix}-*")]
for path in glob_checkpoints:
if use_mtime:
ordering_and_checkpoint_path.append((os.path.getmtime(path), path))
else:
regex_match = re.match(f".*{checkpoint_prefix}-([0-9]+)", path)
if regex_match and regex_match.groups():
ordering_and_checkpoint_path.append((int(regex_match.groups()[0]), path))
checkpoints_sorted = sorted(ordering_and_checkpoint_path)
checkpoints_sorted = [checkpoint[1] for checkpoint in checkpoints_sorted]
# Make sure we don't delete the best model.
if self.state.best_model_checkpoint is not None:
best_model_index = checkpoints_sorted.index(str(Path(self.state.best_model_checkpoint)))
checkpoints_sorted[best_model_index], checkpoints_sorted[-1] = (
checkpoints_sorted[-1],
checkpoints_sorted[best_model_index],
)
return checkpoints_sorted
def _rotate_checkpoints(self, use_mtime=False, output_dir=None) -> None:
if self.args.save_total_limit is None or self.args.save_total_limit <= 0:
return
checkpoints_sorted = self._sorted_checkpoints(use_mtime=use_mtime, output_dir=output_dir)
if len(checkpoints_sorted) <= self.args.save_total_limit:
return
number_of_checkpoints_to_delete = max(0, len(checkpoints_sorted) - self.args.save_total_limit)
checkpoints_to_be_deleted = checkpoints_sorted[:number_of_checkpoints_to_delete]
for checkpoint in checkpoints_to_be_deleted:
logger.info("Deleting older checkpoint [{}] due to args.save_total_limit".format(checkpoint))
shutil.rmtree(checkpoint)
def evaluate(
self,
eval_dataset: Optional[Dataset] = None,
ignore_keys: Optional[List[str]] = None,
metric_key_prefix: str = "eval",
) -> Dict[str, float]:
self._memory_tracker.start()
if eval_dataset is not None and not isinstance(eval_dataset, collections.abc.Sized):
raise ValueError("eval_dataset must implement __len__")
eval_dataloader = self.get_eval_dataloader(eval_dataset)
start_time = time.time()
output = self.prediction_loop(
eval_dataloader,
description="Evaluation",
prediction_loss_only=True if self.compute_metrics is None else None,
ignore_keys=ignore_keys,
metric_key_prefix=metric_key_prefix,
)
n_samples = len(eval_dataset if eval_dataset is not None else self.eval_dataset)
output.metrics.update(speed_metrics(metric_key_prefix, start_time, n_samples))
self.log(output.metrics)
if self.args.tpu_metrics_debug or self.args.debug:
xm.master_print(met.metrics_report())
self.control = self.callback_handler.on_evaluate(self.args, self.state, self.control, output.metrics)
self._memory_tracker.stop_and_update_metrics(output.metrics)
return output.metrics
def predict(
self, test_dataset: Dataset, ignore_keys: Optional[List[str]] = None, metric_key_prefix: str = "eval"
) -> PredictionOutput:
self._memory_tracker.start()
if test_dataset is not None and not isinstance(test_dataset, collections.abc.Sized):
raise ValueError("test_dataset must implement __len__")
test_dataloader = self.get_test_dataloader(test_dataset)
start_time = time.time()
output = self.prediction_loop(
test_dataloader, description="Prediction", ignore_keys=ignore_keys, metric_key_prefix=metric_key_prefix
)
output.metrics.update(speed_metrics(metric_key_prefix, start_time, len(test_dataset)))
self._memory_tracker.stop_and_update_metrics(output.metrics)
return output
def prediction_loop(
self,
dataloader: DataLoader,
description: str,
prediction_loss_only: Optional[bool] = None,
ignore_keys: Optional[List[str]] = None,
metric_key_prefix: str = "eval",
) -> PredictionOutput:
if not isinstance(dataloader.dataset, collections.abc.Sized):
raise ValueError("dataset must implement __len__")
prediction_loss_only = (
prediction_loss_only if prediction_loss_only is not None else self.args.prediction_loss_only
)
if self.args.deepspeed and not self.args.do_train:
logger.info("Detected the deepspeed argument but it will not be used for evaluation")
model = self._wrap_model(self.model, training=False)
# ``train`` is running, half it first and then put on device
if not self.is_in_train and self.args.fp16_full_eval:
model = model.half().to(self.args.device)
batch_size = dataloader.batch_size
num_examples = self.num_examples(dataloader)
logger.info("***** Running %s *****", description)
logger.info(" Num examples = %d", num_examples)
logger.info(" Batch size = %d", batch_size)
losses_host: torch.Tensor = None
preds_host: Union[torch.Tensor, List[torch.Tensor]] = None
labels_host: Union[torch.Tensor, List[torch.Tensor]] = None
world_size = 1
if is_torch_tpu_available():
world_size = xm.xrt_world_size()
elif self.args.local_rank != -1:
world_size = dist.get_world_size()
world_size = max(1, world_size)
eval_losses_gatherer = DistributedTensorGatherer(world_size, num_examples, make_multiple_of=batch_size)
if not prediction_loss_only:
preds_gatherer = DistributedTensorGatherer(world_size, num_examples)
labels_gatherer = DistributedTensorGatherer(world_size, num_examples)
model.eval()
if is_torch_tpu_available():
dataloader = pl.ParallelLoader(dataloader, [self.args.device]).per_device_loader(self.args.device)
if self.args.past_index >= 0:
self._past = None
self.callback_handler.eval_dataloader = dataloader
for step, inputs in enumerate(dataloader):
loss, logits, labels = self.prediction_step(model, inputs, prediction_loss_only, ignore_keys=ignore_keys)
if loss is not None:
losses = loss.repeat(batch_size)
losses_host = losses if losses_host is None else torch.cat((losses_host, losses), dim=0)
if logits is not None:
preds_host = logits if preds_host is None else nested_concat(preds_host, logits, padding_index=-100)
if labels is not None:
labels_host = labels if labels_host is None else nested_concat(labels_host, labels, padding_index=-100)
self.control = self.callback_handler.on_prediction_step(self.args, self.state, self.control)
# Gather all tensors and put them back on the CPU if we have done enough accumulation steps.
if self.args.eval_accumulation_steps is not None and (step + 1) % self.args.eval_accumulation_steps == 0:
eval_losses_gatherer.add_arrays(self._gather_and_numpify(losses_host, "eval_losses"))
if not prediction_loss_only:
preds_gatherer.add_arrays(self._gather_and_numpify(preds_host, "eval_preds"))
labels_gatherer.add_arrays(self._gather_and_numpify(labels_host, "eval_label_ids"))
# Set back to None to begin a new accumulation
losses_host, preds_host, labels_host = None, None, None
if self.args.past_index and hasattr(self, "_past"):
# Clean the state at the end of the evaluation loop
delattr(self, "_past")
# Gather all remaining tensors and put them back on the CPU
eval_losses_gatherer.add_arrays(self._gather_and_numpify(losses_host, "eval_losses"))
if not prediction_loss_only:
preds_gatherer.add_arrays(self._gather_and_numpify(preds_host, "eval_preds"))
labels_gatherer.add_arrays(self._gather_and_numpify(labels_host, "eval_label_ids"))
eval_loss = eval_losses_gatherer.finalize()
preds = preds_gatherer.finalize() if not prediction_loss_only else None
label_ids = labels_gatherer.finalize() if not prediction_loss_only else None
if self.compute_metrics is not None and preds is not None and label_ids is not None:
metrics = self.compute_metrics(EvalPrediction(predictions=preds, label_ids=label_ids))
else:
metrics = {}
if eval_loss is not None:
metrics[f"{metric_key_prefix}_loss"] = eval_loss.mean().item()
# Prefix all keys with metric_key_prefix + '_'
for key in list(metrics.keys()):
if not key.startswith(f"{metric_key_prefix}_"):
metrics[f"{metric_key_prefix}_{key}"] = metrics.pop(key)
return PredictionOutput(predictions=preds, label_ids=label_ids, metrics=metrics)
def _gather_and_numpify(self, tensors, name):
if tensors is None:
return
if is_torch_tpu_available():
tensors = nested_xla_mesh_reduce(tensors, name)
elif self.args.local_rank != -1:
tensors = distributed_concat(tensors)
return nested_numpify(tensors)
def prediction_step(
self,
model: nn.Module,
inputs: Dict[str, Union[torch.Tensor, Any]],
prediction_loss_only: bool,
ignore_keys: Optional[List[str]] = None,
) -> Tuple[Optional[float], Optional[torch.Tensor], Optional[torch.Tensor]]:
has_labels = all(inputs.get(k) is not None for k in self.label_names)
inputs = self._prepare_inputs(inputs)
if ignore_keys is None:
if hasattr(self.model, "config"):
ignore_keys = getattr(self.model.config, "keys_to_ignore_at_inference", [])
else:
ignore_keys = []
# labels may be popped when computing the loss (label smoothing for instance) so we grab them first.
if has_labels:
labels = nested_detach(tuple(inputs.get(name) for name in self.label_names))
if len(labels) == 1:
labels = labels[0]
else:
labels = None
with torch.no_grad():
if has_labels:
loss, outputs = self.compute_loss(model, inputs, return_outputs=True)
loss = loss.mean().detach()
if isinstance(outputs, dict):
logits = tuple(v for k, v in outputs.items() if k not in ignore_keys + ["loss"])
else:
logits = outputs[1:]
else:
loss = None
if self.use_amp:
with autocast():
outputs = model(**inputs)
else:
outputs = model(**inputs)
if isinstance(outputs, dict):
logits = tuple(v for k, v in outputs.items() if k not in ignore_keys)
else:
logits = outputs
# TODO: this needs to be fixed and made cleaner later.
if self.args.past_index >= 0:
self._past = outputs[self.args.past_index - 1]
if prediction_loss_only:
return (loss, None, None)
logits = nested_detach(logits)
if len(logits) == 1:
logits = logits[0]
return (loss, logits, labels)
def floating_point_ops(self, inputs: Dict[str, Union[torch.Tensor, Any]]):
if hasattr(self.model, "floating_point_ops"):
return self.model.floating_point_ops(inputs)
else:
return 0
| true | true |
f7fe0a9f0087348944231663b7374366e13bbd29 | 13,719 | py | Python | pyspotter/pyspotter.py | samapriya/pyspotter | d9400a3ed2d60a7c7facdb8cae76f532b675415d | [
"MIT"
] | 2 | 2021-12-27T12:55:34.000Z | 2022-02-13T13:54:18.000Z | pyspotter/pyspotter.py | samapriya/pyspotter | d9400a3ed2d60a7c7facdb8cae76f532b675415d | [
"MIT"
] | null | null | null | pyspotter/pyspotter.py | samapriya/pyspotter | d9400a3ed2d60a7c7facdb8cae76f532b675415d | [
"MIT"
] | null | null | null | #!/usr/bin/python
# -*- coding: utf-8 -*-
__copyright__ = """
MIT License
Copyright (c) 2021 Samapriya Roy
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
__license__ = "MIT License"
import requests
import json
import sys
import pkg_resources
import argparse
import time
import csv
import getpass
import os
import pytz
from itertools import groupby
from dateutil import parser
from os.path import expanduser
from bs4 import BeautifulSoup
from timezonefinder import TimezoneFinder
class Solution:
def compareVersion(self, version1, version2):
versions1 = [int(v) for v in version1.split(".")]
versions2 = [int(v) for v in version2.split(".")]
for i in range(max(len(versions1), len(versions2))):
v1 = versions1[i] if i < len(versions1) else 0
v2 = versions2[i] if i < len(versions2) else 0
if v1 > v2:
return 1
elif v1 < v2:
return -1
return 0
ob1 = Solution()
# Get package version
def pyspotter_version():
url = "https://pypi.org/project/pyspotter/"
source = requests.get(url)
html_content = source.text
soup = BeautifulSoup(html_content, "html.parser")
company = soup.find("h1")
vcheck = ob1.compareVersion(
company.string.strip().split(" ")[-1],
pkg_resources.get_distribution("pyspotter").version,
)
if vcheck == 1:
print(
"\n"
+ "========================================================================="
)
print(
"Current version of pyspotter is {} upgrade to lastest version: {}".format(
pkg_resources.get_distribution("pyspotter").version,
company.string.strip().split(" ")[-1],
)
)
print(
"========================================================================="
)
elif vcheck == -1:
print(
"\n"
+ "========================================================================="
)
print(
"Possibly running staging code {} compared to pypi release {}".format(
pkg_resources.get_distribution("pyspotter").version,
company.string.strip().split(" ")[-1],
)
)
print(
"========================================================================="
)
pyspotter_version()
# set credentials
def auth(usr):
headers = {
"authority": "api.sofarocean.com",
"sec-ch-ua": '" Not;A Brand";v="99", "Google Chrome";v="91", "Chromium";v="91"',
"accept": "application/json, text/plain, */*",
"sec-ch-ua-mobile": "?0",
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36",
"content-type": "application/x-www-form-urlencoded",
"origin": "https://weather.sofarocean.com",
"sec-fetch-site": "same-site",
"sec-fetch-mode": "cors",
"sec-fetch-dest": "empty",
"referer": "https://weather.sofarocean.com/",
"accept-language": "en-US,en;q=0.9",
}
home = expanduser("~/sofarocean.json")
if usr is None:
usr = input("Enter email: ")
pwd = getpass.getpass("Enter password: ")
data = {"username": usr, "password": pwd, "skipRedirect": "true"}
response = requests.post(
"https://api.sofarocean.com/login/", headers=headers, data=data
)
if response.status_code == 200:
print("Authentication successful")
data = {"token": response.json()["token"]}
with open(home, "w") as outfile:
json.dump(data, outfile)
else:
print(f"Authentication failed with error {response.status_code}")
def auth_from_parser(args):
auth(usr=args.username)
def reset():
home = expanduser("~/sofarocean.json")
usr = input("Enter email: ")
if not os.path.exists(home):
auth(usr)
with open(home) as json_file:
data = json.load(json_file)
token = data.get("token")
else:
with open(home) as json_file:
data = json.load(json_file)
token = data.get("token")
headers = {
"token": token,
}
response = requests.post(
f"https://api.sofarocean.com/users/{usr}/tokens/", headers=headers
)
if response.status_code == 200:
print("Token reset successful")
data = {"token": response.json()["token"]}
with open(home, "w") as outfile:
json.dump(data, outfile)
else:
print("Token reset failed")
def reset_from_parser(args):
reset()
def tokenize():
home = expanduser("~/sofarocean.json")
if not os.path.exists(home):
auth(usr=None)
with open(home) as json_file:
data = json.load(json_file)
token = data.get("token")
else:
with open(home) as json_file:
data = json.load(json_file)
token = data.get("token")
return token
def devlist():
headers = {
"token": tokenize(),
}
response = requests.get("https://api.sofarocean.com/api/devices", headers=headers)
response = response.json()
print(f"Total of {response['message']}" + "\n")
for device in response["data"]["devices"]:
print(device["spotterId"])
def devlist_from_parser(args):
devlist()
def spot_check(spot_id):
if not spot_id.startswith("SPOT-"):
spot_id = f"SPOT-{spot_id}"
dic = {}
obj = TimezoneFinder()
headers = {
"token": tokenize(),
}
response = requests.get(
f"https://api.sofarocean.com/api/latest-data?spotterId={spot_id}",
headers=headers,
)
if response.status_code == 200:
spotter = response.json()
print(f"Fetching info for Spotter {spot_id}" + "\n")
for key, value in spotter["data"].items():
if key != "frequencyData" and key != "track" and key != "waves":
dic[key] = value
# print(key,value)
latitude = spotter["data"]["waves"][-1]["latitude"]
longitude = spotter["data"]["waves"][-1]["longitude"]
time_zone = obj.timezone_at(lat=float(latitude), lng=float(longitude))
tz = pytz.timezone(time_zone)
now_utc = parser.parse(spotter["data"]["waves"][-1]["timestamp"])
now_kl = now_utc.replace(tzinfo=pytz.utc).astimezone(tz)
dic["last updated (UTC time)"] = str(now_utc)
dic["last updated (spotter local time)"] = str(now_kl)
dic["latitude"] = spotter["data"]["waves"][-1]["latitude"]
dic["longitude"] = spotter["data"]["waves"][-1]["longitude"]
print(json.dumps(dic, indent=2, sort_keys=False))
else:
print(
f"Spot check failed with error code {response.status_code}: {response.json()['message']}"
)
def spotcheck_from_parser(args):
spot_check(spot_id=args.sid)
def spot_data(spot_id, dtype, folder): #'SPOT-0222'
waves_list = []
wind_list = []
sst_list = []
if not spot_id.startswith("SPOT-"):
spot_id = f"SPOT-{spot_id}"
obj = TimezoneFinder()
params = {
"spotterId": [spot_id],
"includeSurfaceTempData": True,
"includeWindData": True,
}
headers = {
"token": tokenize(),
}
response = requests.get(
"https://api.sofarocean.com/api/wave-data", headers=headers, params=params
)
if response.status_code == 200:
spotter = response.json()
print("\n" + f"Fetching info for Spotter {spot_id}" + "\n")
if (
not "surfaceTemp" in spotter["data"]
or len(spotter["data"]["surfaceTemp"]) == 0
and dtype == "sst"
):
sys.exit("No surfaceTemp data found")
else:
for readings in spotter["data"]["surfaceTemp"]:
readings["date"] = readings["timestamp"].split("T")[0]
readings["spotter_id"] = spot_id
sst_list.append(readings)
if (
not "waves" in spotter["data"]
or len(spotter["data"]["waves"]) == 0
and dtype == "wave"
):
sys.exit("No waves data found")
else:
for readings in spotter["data"]["waves"]:
readings["date"] = readings["timestamp"].split("T")[0]
readings["spotter_id"] = spot_id
waves_list.append(readings)
if (
not "wind" in spotter["data"]
or len(spotter["data"]["wind"]) == 0
and dtype == "wind"
):
sys.exit("No wind data found")
else:
for readings in spotter["data"]["wind"]:
readings["date"] = readings["timestamp"].split("T")[0]
readings["spotter_id"] = spot_id
wind_list.append(readings)
else:
sys.exit(
f"Failed with status_code: {response.status_code}: {response.json()['message']}"
)
if dtype == "wave":
csv_columns = [
"significantWaveHeight",
"peakPeriod",
"meanPeriod",
"peakDirection",
"peakDirectionalSpread",
"meanDirection",
"meanDirectionalSpread",
"timestamp",
"latitude",
"longitude",
"date",
"spotter_id",
]
main_list = waves_list
elif dtype == "wind":
csv_columns = [
"speed",
"direction",
"seasurfaceId",
"latitude",
"longitude",
"timestamp",
"date",
"spotter_id",
]
main_list = wind_list
elif dtype == "sst":
csv_columns = [
"degrees",
"latitude",
"longitude",
"timestamp",
"date",
"spotter_id",
]
main_list = sst_list
# define a fuction for key
def key_func(k):
return k["date"]
# sort INFO data by 'company' key.
INFO = sorted(main_list, key=key_func)
for key, value in groupby(INFO, key_func):
print(f"Processing {spot_id}_{key}_{dtype}.csv")
dict_data = list(value)
try:
with open(
os.path.join(folder, f"{spot_id}_{key}_{dtype}.csv"), "w"
) as csvfile:
writer = csv.DictWriter(
csvfile, fieldnames=csv_columns, delimiter=",", lineterminator="\n"
)
writer.writeheader()
for data in dict_data:
writer.writerow(data)
except IOError:
print("I/O error")
def spot_data_from_parser(args):
spot_data(spot_id=args.sid, dtype=args.dtype, folder=args.folder)
def main(args=None):
parser = argparse.ArgumentParser(description="Simple CLI for Sofarocean API")
subparsers = parser.add_subparsers()
parser_auth = subparsers.add_parser(
"auth", help="Authenticates and saves your API token"
)
optional_named = parser_auth.add_argument_group("Optional named arguments")
optional_named.add_argument("--username", help="Username", default=None)
parser_auth.set_defaults(func=auth_from_parser)
parser_reset = subparsers.add_parser("reset", help="Regenerates your API token")
parser_reset.set_defaults(func=reset_from_parser)
parser_devlist = subparsers.add_parser(
"devlist", help="Print lists of devices available under your account"
)
parser_devlist.set_defaults(func=devlist_from_parser)
parser_spotcheck = subparsers.add_parser(
"spot-check", help="Spot check a Spotter location and time"
)
required_named = parser_spotcheck.add_argument_group("Required named arguments.")
required_named.add_argument("--sid", help="Spotter ID", required=True)
parser_spotcheck.set_defaults(func=spotcheck_from_parser)
parser_spot_data = subparsers.add_parser(
"spot-data", help="Export Spotter Data based on Spotter ID & grouped by date"
)
required_named = parser_spot_data.add_argument_group("Required named arguments.")
required_named.add_argument("--sid", help="Spotter ID", required=True)
required_named.add_argument(
"--dtype", help="Data type: wind/wave/sst", required=True
)
required_named.add_argument(
"--folder", help="Folder to export CSV data", required=True
)
parser_spot_data.set_defaults(func=spot_data_from_parser)
args = parser.parse_args()
try:
func = args.func
except AttributeError:
parser.error("too few arguments")
func(args)
if __name__ == "__main__":
main()
| 32.586698 | 140 | 0.577885 |
__copyright__ = """
MIT License
Copyright (c) 2021 Samapriya Roy
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
__license__ = "MIT License"
import requests
import json
import sys
import pkg_resources
import argparse
import time
import csv
import getpass
import os
import pytz
from itertools import groupby
from dateutil import parser
from os.path import expanduser
from bs4 import BeautifulSoup
from timezonefinder import TimezoneFinder
class Solution:
def compareVersion(self, version1, version2):
versions1 = [int(v) for v in version1.split(".")]
versions2 = [int(v) for v in version2.split(".")]
for i in range(max(len(versions1), len(versions2))):
v1 = versions1[i] if i < len(versions1) else 0
v2 = versions2[i] if i < len(versions2) else 0
if v1 > v2:
return 1
elif v1 < v2:
return -1
return 0
ob1 = Solution()
def pyspotter_version():
url = "https://pypi.org/project/pyspotter/"
source = requests.get(url)
html_content = source.text
soup = BeautifulSoup(html_content, "html.parser")
company = soup.find("h1")
vcheck = ob1.compareVersion(
company.string.strip().split(" ")[-1],
pkg_resources.get_distribution("pyspotter").version,
)
if vcheck == 1:
print(
"\n"
+ "========================================================================="
)
print(
"Current version of pyspotter is {} upgrade to lastest version: {}".format(
pkg_resources.get_distribution("pyspotter").version,
company.string.strip().split(" ")[-1],
)
)
print(
"========================================================================="
)
elif vcheck == -1:
print(
"\n"
+ "========================================================================="
)
print(
"Possibly running staging code {} compared to pypi release {}".format(
pkg_resources.get_distribution("pyspotter").version,
company.string.strip().split(" ")[-1],
)
)
print(
"========================================================================="
)
pyspotter_version()
def auth(usr):
headers = {
"authority": "api.sofarocean.com",
"sec-ch-ua": '" Not;A Brand";v="99", "Google Chrome";v="91", "Chromium";v="91"',
"accept": "application/json, text/plain, */*",
"sec-ch-ua-mobile": "?0",
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36",
"content-type": "application/x-www-form-urlencoded",
"origin": "https://weather.sofarocean.com",
"sec-fetch-site": "same-site",
"sec-fetch-mode": "cors",
"sec-fetch-dest": "empty",
"referer": "https://weather.sofarocean.com/",
"accept-language": "en-US,en;q=0.9",
}
home = expanduser("~/sofarocean.json")
if usr is None:
usr = input("Enter email: ")
pwd = getpass.getpass("Enter password: ")
data = {"username": usr, "password": pwd, "skipRedirect": "true"}
response = requests.post(
"https://api.sofarocean.com/login/", headers=headers, data=data
)
if response.status_code == 200:
print("Authentication successful")
data = {"token": response.json()["token"]}
with open(home, "w") as outfile:
json.dump(data, outfile)
else:
print(f"Authentication failed with error {response.status_code}")
def auth_from_parser(args):
auth(usr=args.username)
def reset():
home = expanduser("~/sofarocean.json")
usr = input("Enter email: ")
if not os.path.exists(home):
auth(usr)
with open(home) as json_file:
data = json.load(json_file)
token = data.get("token")
else:
with open(home) as json_file:
data = json.load(json_file)
token = data.get("token")
headers = {
"token": token,
}
response = requests.post(
f"https://api.sofarocean.com/users/{usr}/tokens/", headers=headers
)
if response.status_code == 200:
print("Token reset successful")
data = {"token": response.json()["token"]}
with open(home, "w") as outfile:
json.dump(data, outfile)
else:
print("Token reset failed")
def reset_from_parser(args):
reset()
def tokenize():
home = expanduser("~/sofarocean.json")
if not os.path.exists(home):
auth(usr=None)
with open(home) as json_file:
data = json.load(json_file)
token = data.get("token")
else:
with open(home) as json_file:
data = json.load(json_file)
token = data.get("token")
return token
def devlist():
headers = {
"token": tokenize(),
}
response = requests.get("https://api.sofarocean.com/api/devices", headers=headers)
response = response.json()
print(f"Total of {response['message']}" + "\n")
for device in response["data"]["devices"]:
print(device["spotterId"])
def devlist_from_parser(args):
devlist()
def spot_check(spot_id):
if not spot_id.startswith("SPOT-"):
spot_id = f"SPOT-{spot_id}"
dic = {}
obj = TimezoneFinder()
headers = {
"token": tokenize(),
}
response = requests.get(
f"https://api.sofarocean.com/api/latest-data?spotterId={spot_id}",
headers=headers,
)
if response.status_code == 200:
spotter = response.json()
print(f"Fetching info for Spotter {spot_id}" + "\n")
for key, value in spotter["data"].items():
if key != "frequencyData" and key != "track" and key != "waves":
dic[key] = value
latitude = spotter["data"]["waves"][-1]["latitude"]
longitude = spotter["data"]["waves"][-1]["longitude"]
time_zone = obj.timezone_at(lat=float(latitude), lng=float(longitude))
tz = pytz.timezone(time_zone)
now_utc = parser.parse(spotter["data"]["waves"][-1]["timestamp"])
now_kl = now_utc.replace(tzinfo=pytz.utc).astimezone(tz)
dic["last updated (UTC time)"] = str(now_utc)
dic["last updated (spotter local time)"] = str(now_kl)
dic["latitude"] = spotter["data"]["waves"][-1]["latitude"]
dic["longitude"] = spotter["data"]["waves"][-1]["longitude"]
print(json.dumps(dic, indent=2, sort_keys=False))
else:
print(
f"Spot check failed with error code {response.status_code}: {response.json()['message']}"
)
def spotcheck_from_parser(args):
spot_check(spot_id=args.sid)
def spot_data(spot_id, dtype, folder):
waves_list = []
wind_list = []
sst_list = []
if not spot_id.startswith("SPOT-"):
spot_id = f"SPOT-{spot_id}"
obj = TimezoneFinder()
params = {
"spotterId": [spot_id],
"includeSurfaceTempData": True,
"includeWindData": True,
}
headers = {
"token": tokenize(),
}
response = requests.get(
"https://api.sofarocean.com/api/wave-data", headers=headers, params=params
)
if response.status_code == 200:
spotter = response.json()
print("\n" + f"Fetching info for Spotter {spot_id}" + "\n")
if (
not "surfaceTemp" in spotter["data"]
or len(spotter["data"]["surfaceTemp"]) == 0
and dtype == "sst"
):
sys.exit("No surfaceTemp data found")
else:
for readings in spotter["data"]["surfaceTemp"]:
readings["date"] = readings["timestamp"].split("T")[0]
readings["spotter_id"] = spot_id
sst_list.append(readings)
if (
not "waves" in spotter["data"]
or len(spotter["data"]["waves"]) == 0
and dtype == "wave"
):
sys.exit("No waves data found")
else:
for readings in spotter["data"]["waves"]:
readings["date"] = readings["timestamp"].split("T")[0]
readings["spotter_id"] = spot_id
waves_list.append(readings)
if (
not "wind" in spotter["data"]
or len(spotter["data"]["wind"]) == 0
and dtype == "wind"
):
sys.exit("No wind data found")
else:
for readings in spotter["data"]["wind"]:
readings["date"] = readings["timestamp"].split("T")[0]
readings["spotter_id"] = spot_id
wind_list.append(readings)
else:
sys.exit(
f"Failed with status_code: {response.status_code}: {response.json()['message']}"
)
if dtype == "wave":
csv_columns = [
"significantWaveHeight",
"peakPeriod",
"meanPeriod",
"peakDirection",
"peakDirectionalSpread",
"meanDirection",
"meanDirectionalSpread",
"timestamp",
"latitude",
"longitude",
"date",
"spotter_id",
]
main_list = waves_list
elif dtype == "wind":
csv_columns = [
"speed",
"direction",
"seasurfaceId",
"latitude",
"longitude",
"timestamp",
"date",
"spotter_id",
]
main_list = wind_list
elif dtype == "sst":
csv_columns = [
"degrees",
"latitude",
"longitude",
"timestamp",
"date",
"spotter_id",
]
main_list = sst_list
def key_func(k):
return k["date"]
INFO = sorted(main_list, key=key_func)
for key, value in groupby(INFO, key_func):
print(f"Processing {spot_id}_{key}_{dtype}.csv")
dict_data = list(value)
try:
with open(
os.path.join(folder, f"{spot_id}_{key}_{dtype}.csv"), "w"
) as csvfile:
writer = csv.DictWriter(
csvfile, fieldnames=csv_columns, delimiter=",", lineterminator="\n"
)
writer.writeheader()
for data in dict_data:
writer.writerow(data)
except IOError:
print("I/O error")
def spot_data_from_parser(args):
spot_data(spot_id=args.sid, dtype=args.dtype, folder=args.folder)
def main(args=None):
parser = argparse.ArgumentParser(description="Simple CLI for Sofarocean API")
subparsers = parser.add_subparsers()
parser_auth = subparsers.add_parser(
"auth", help="Authenticates and saves your API token"
)
optional_named = parser_auth.add_argument_group("Optional named arguments")
optional_named.add_argument("--username", help="Username", default=None)
parser_auth.set_defaults(func=auth_from_parser)
parser_reset = subparsers.add_parser("reset", help="Regenerates your API token")
parser_reset.set_defaults(func=reset_from_parser)
parser_devlist = subparsers.add_parser(
"devlist", help="Print lists of devices available under your account"
)
parser_devlist.set_defaults(func=devlist_from_parser)
parser_spotcheck = subparsers.add_parser(
"spot-check", help="Spot check a Spotter location and time"
)
required_named = parser_spotcheck.add_argument_group("Required named arguments.")
required_named.add_argument("--sid", help="Spotter ID", required=True)
parser_spotcheck.set_defaults(func=spotcheck_from_parser)
parser_spot_data = subparsers.add_parser(
"spot-data", help="Export Spotter Data based on Spotter ID & grouped by date"
)
required_named = parser_spot_data.add_argument_group("Required named arguments.")
required_named.add_argument("--sid", help="Spotter ID", required=True)
required_named.add_argument(
"--dtype", help="Data type: wind/wave/sst", required=True
)
required_named.add_argument(
"--folder", help="Folder to export CSV data", required=True
)
parser_spot_data.set_defaults(func=spot_data_from_parser)
args = parser.parse_args()
try:
func = args.func
except AttributeError:
parser.error("too few arguments")
func(args)
if __name__ == "__main__":
main()
| true | true |
f7fe0b03fd919a0f12cea97eadbe8bef68a9152d | 5,179 | py | Python | methods/gan/image_translator.py | MendelXu/ANN | f4eabeb27dbba5c9bdcf83d03776bffa34995666 | [
"Apache-2.0"
] | 308 | 2019-08-11T02:12:37.000Z | 2022-03-30T07:20:41.000Z | methods/gan/image_translator.py | pinglmlcv/ANN | f4eabeb27dbba5c9bdcf83d03776bffa34995666 | [
"Apache-2.0"
] | 19 | 2019-08-22T04:57:33.000Z | 2022-03-27T10:59:23.000Z | methods/gan/image_translator.py | pinglmlcv/ANN | f4eabeb27dbba5c9bdcf83d03776bffa34995666 | [
"Apache-2.0"
] | 64 | 2019-08-17T07:09:50.000Z | 2022-03-27T11:23:39.000Z | #!/usr/bin/env python
# -*- coding:utf-8 -*-
# Author: Donny You (youansheng@gmail.com)
# Class Definition for GAN.
import time
import torch
from datasets.gan.data_loader import DataLoader
from methods.tools.runner_helper import RunnerHelper
from methods.tools.trainer import Trainer
from models.gan.model_manager import ModelManager
from utils.tools.average_meter import AverageMeter
from utils.tools.logger import Logger as Log
class ImageTranslator(object):
"""
The class for Pose Estimation. Include train, val, val & predict.
"""
def __init__(self, configer):
self.configer = configer
self.batch_time = AverageMeter()
self.data_time = AverageMeter()
self.train_losses = AverageMeter()
self.val_losses = AverageMeter()
self.model_manager = ModelManager(configer)
self.seg_data_loader = DataLoader(configer)
self.gan_net = None
self.train_loader = None
self.val_loader = None
self.optimizer = None
self.scheduler = None
self.runner_state = dict()
self._init_model()
def _init_model(self):
self.gan_net = self.model_manager.gan_model()
self.gan_net = RunnerHelper.load_net(self, self.gan_net)
self.optimizer, self.scheduler = Trainer.init(self._get_parameters(), self.configer.get('solver'))
self.train_loader = self.seg_data_loader.get_trainloader()
self.val_loader = self.seg_data_loader.get_valloader()
def _get_parameters(self):
return self.gan_net.parameters()
def train(self):
"""
Train function of every epoch during train phase.
"""
self.gan_net.train()
start_time = time.time()
# Adjust the learning rate after every epoch.
for i, data_dict in enumerate(self.train_loader):
Trainer.update(self, solver_dict=self.configer.get('solver'))
inputs = data_dict['imgA']
self.data_time.update(time.time() - start_time)
# Forward pass.
out_dict = self.gan_net(data_dict)
# outputs = self.module_utilizer.gather(outputs)
loss = out_dict['loss'].mean()
self.train_losses.update(loss.item(), inputs.size(0))
self.optimizer.zero_grad()
loss.backward()
self.optimizer.step()
# Update the vars of the train phase.
self.batch_time.update(time.time() - start_time)
start_time = time.time()
self.runner_state['iters'] += 1
# Print the log info & reset the states.
if self.runner_state['iters'] % self.configer.get('solver', 'display_iter') == 0:
Log.info('Train Epoch: {0}\tTrain Iteration: {1}\t'
'Time {batch_time.sum:.3f}s / {2}iters, ({batch_time.avg:.3f})\t'
'Data load {data_time.sum:.3f}s / {2}iters, ({data_time.avg:3f})\n'
'Learning rate = {3}\tLoss = {loss.val:.8f} (ave = {loss.avg:.8f})\n'.format(
self.runner_state['epoch'], self.runner_state['iters'],
self.configer.get('solver', 'display_iter'),
RunnerHelper.get_lr(self.optimizer), batch_time=self.batch_time,
data_time=self.data_time, loss=self.train_losses))
self.batch_time.reset()
self.data_time.reset()
self.train_losses.reset()
if self.configer.get('solver', 'lr')['metric'] == 'iters' \
and self.runner_state['iters'] == self.configer.get('solver', 'max_iters'):
break
# Check to val the current model.
if self.runner_state['iters'] % self.configer.get('solver', 'test_interval') == 0:
self.val()
self.runner_state['epoch'] += 1
def val(self, data_loader=None):
"""
Validation function during the train phase.
"""
self.gan_net.eval()
start_time = time.time()
data_loader = self.val_loader if data_loader is None else data_loader
for j, data_dict in enumerate(data_loader):
inputs = data_dict['imgA']
with torch.no_grad():
# Forward pass.
out_dict = self.gan_net(data_dict)
# Compute the loss of the val batch.
self.val_losses.update(out_dict['loss'].mean().item(), inputs.size(0))
# Update the vars of the val phase.
self.batch_time.update(time.time() - start_time)
start_time = time.time()
RunnerHelper.save_net(self, self.gan_net,
val_loss=self.val_losses.avg)
# Print the log info & reset the states.
Log.info(
'Test Time {batch_time.sum:.3f}s, ({batch_time.avg:.3f})\t'
'Loss {loss.avg:.8f}\n'.format(
batch_time=self.batch_time, loss=self.val_losses))
self.batch_time.reset()
self.val_losses.reset()
self.gan_net.train()
if __name__ == "__main__":
# Test class for pose estimator.
pass
| 36.992857 | 106 | 0.592585 |
import time
import torch
from datasets.gan.data_loader import DataLoader
from methods.tools.runner_helper import RunnerHelper
from methods.tools.trainer import Trainer
from models.gan.model_manager import ModelManager
from utils.tools.average_meter import AverageMeter
from utils.tools.logger import Logger as Log
class ImageTranslator(object):
def __init__(self, configer):
self.configer = configer
self.batch_time = AverageMeter()
self.data_time = AverageMeter()
self.train_losses = AverageMeter()
self.val_losses = AverageMeter()
self.model_manager = ModelManager(configer)
self.seg_data_loader = DataLoader(configer)
self.gan_net = None
self.train_loader = None
self.val_loader = None
self.optimizer = None
self.scheduler = None
self.runner_state = dict()
self._init_model()
def _init_model(self):
self.gan_net = self.model_manager.gan_model()
self.gan_net = RunnerHelper.load_net(self, self.gan_net)
self.optimizer, self.scheduler = Trainer.init(self._get_parameters(), self.configer.get('solver'))
self.train_loader = self.seg_data_loader.get_trainloader()
self.val_loader = self.seg_data_loader.get_valloader()
def _get_parameters(self):
return self.gan_net.parameters()
def train(self):
self.gan_net.train()
start_time = time.time()
for i, data_dict in enumerate(self.train_loader):
Trainer.update(self, solver_dict=self.configer.get('solver'))
inputs = data_dict['imgA']
self.data_time.update(time.time() - start_time)
out_dict = self.gan_net(data_dict)
loss = out_dict['loss'].mean()
self.train_losses.update(loss.item(), inputs.size(0))
self.optimizer.zero_grad()
loss.backward()
self.optimizer.step()
self.batch_time.update(time.time() - start_time)
start_time = time.time()
self.runner_state['iters'] += 1
if self.runner_state['iters'] % self.configer.get('solver', 'display_iter') == 0:
Log.info('Train Epoch: {0}\tTrain Iteration: {1}\t'
'Time {batch_time.sum:.3f}s / {2}iters, ({batch_time.avg:.3f})\t'
'Data load {data_time.sum:.3f}s / {2}iters, ({data_time.avg:3f})\n'
'Learning rate = {3}\tLoss = {loss.val:.8f} (ave = {loss.avg:.8f})\n'.format(
self.runner_state['epoch'], self.runner_state['iters'],
self.configer.get('solver', 'display_iter'),
RunnerHelper.get_lr(self.optimizer), batch_time=self.batch_time,
data_time=self.data_time, loss=self.train_losses))
self.batch_time.reset()
self.data_time.reset()
self.train_losses.reset()
if self.configer.get('solver', 'lr')['metric'] == 'iters' \
and self.runner_state['iters'] == self.configer.get('solver', 'max_iters'):
break
if self.runner_state['iters'] % self.configer.get('solver', 'test_interval') == 0:
self.val()
self.runner_state['epoch'] += 1
def val(self, data_loader=None):
self.gan_net.eval()
start_time = time.time()
data_loader = self.val_loader if data_loader is None else data_loader
for j, data_dict in enumerate(data_loader):
inputs = data_dict['imgA']
with torch.no_grad():
out_dict = self.gan_net(data_dict)
self.val_losses.update(out_dict['loss'].mean().item(), inputs.size(0))
self.batch_time.update(time.time() - start_time)
start_time = time.time()
RunnerHelper.save_net(self, self.gan_net,
val_loss=self.val_losses.avg)
Log.info(
'Test Time {batch_time.sum:.3f}s, ({batch_time.avg:.3f})\t'
'Loss {loss.avg:.8f}\n'.format(
batch_time=self.batch_time, loss=self.val_losses))
self.batch_time.reset()
self.val_losses.reset()
self.gan_net.train()
if __name__ == "__main__":
pass
| true | true |
f7fe0b195c3d2f8089fb534e7264eaa1cf3c0fd3 | 69,204 | py | Python | intersight/model/iam_resource_limits_relationship.py | CiscoDevNet/intersight-python | 04b721f37c3044646a91c185c7259edfb991557a | [
"Apache-2.0"
] | 5 | 2021-12-16T15:13:32.000Z | 2022-03-29T16:09:54.000Z | intersight/model/iam_resource_limits_relationship.py | CiscoDevNet/intersight-python | 04b721f37c3044646a91c185c7259edfb991557a | [
"Apache-2.0"
] | 4 | 2022-01-25T19:05:51.000Z | 2022-03-29T20:18:37.000Z | intersight/model/iam_resource_limits_relationship.py | CiscoDevNet/intersight-python | 04b721f37c3044646a91c185c7259edfb991557a | [
"Apache-2.0"
] | 2 | 2020-07-07T15:01:08.000Z | 2022-01-31T04:27:35.000Z | """
Cisco Intersight
Cisco Intersight is a management platform delivered as a service with embedded analytics for your Cisco and 3rd party IT infrastructure. This platform offers an intelligent level of management that enables IT organizations to analyze, simplify, and automate their environments in more advanced ways than the prior generations of tools. Cisco Intersight provides an integrated and intuitive management experience for resources in the traditional data center as well as at the edge. With flexible deployment options to address complex security needs, getting started with Intersight is quick and easy. Cisco Intersight has deep integration with Cisco UCS and HyperFlex systems allowing for remote deployment, configuration, and ongoing maintenance. The model-based deployment works for a single system in a remote location or hundreds of systems in a data center and enables rapid, standardized configuration and deployment. It also streamlines maintaining those systems whether you are working with small or very large configurations. The Intersight OpenAPI document defines the complete set of properties that are returned in the HTTP response. From that perspective, a client can expect that no additional properties are returned, unless these properties are explicitly defined in the OpenAPI document. However, when a client uses an older version of the Intersight OpenAPI document, the server may send additional properties because the software is more recent than the client. In that case, the client may receive properties that it does not know about. Some generated SDKs perform a strict validation of the HTTP response body against the OpenAPI document. # noqa: E501
The version of the OpenAPI document: 1.0.9-4950
Contact: intersight@cisco.com
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from intersight.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
def lazy_import():
from intersight.model.display_names import DisplayNames
from intersight.model.iam_account_relationship import IamAccountRelationship
from intersight.model.iam_resource_limits import IamResourceLimits
from intersight.model.mo_base_mo_relationship import MoBaseMoRelationship
from intersight.model.mo_mo_ref import MoMoRef
from intersight.model.mo_tag import MoTag
from intersight.model.mo_version_context import MoVersionContext
globals()['DisplayNames'] = DisplayNames
globals()['IamAccountRelationship'] = IamAccountRelationship
globals()['IamResourceLimits'] = IamResourceLimits
globals()['MoBaseMoRelationship'] = MoBaseMoRelationship
globals()['MoMoRef'] = MoMoRef
globals()['MoTag'] = MoTag
globals()['MoVersionContext'] = MoVersionContext
class IamResourceLimitsRelationship(ModelComposed):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
('class_id',): {
'MO.MOREF': "mo.MoRef",
},
('object_type',): {
'AAA.AUDITRECORD': "aaa.AuditRecord",
'AAA.RETENTIONCONFIG': "aaa.RetentionConfig",
'AAA.RETENTIONPOLICY': "aaa.RetentionPolicy",
'ACCESS.POLICY': "access.Policy",
'ADAPTER.CONFIGPOLICY': "adapter.ConfigPolicy",
'ADAPTER.EXTETHINTERFACE': "adapter.ExtEthInterface",
'ADAPTER.HOSTETHINTERFACE': "adapter.HostEthInterface",
'ADAPTER.HOSTFCINTERFACE': "adapter.HostFcInterface",
'ADAPTER.HOSTISCSIINTERFACE': "adapter.HostIscsiInterface",
'ADAPTER.UNIT': "adapter.Unit",
'ADAPTER.UNITEXPANDER': "adapter.UnitExpander",
'APPLIANCE.APPSTATUS': "appliance.AppStatus",
'APPLIANCE.AUTORMAPOLICY': "appliance.AutoRmaPolicy",
'APPLIANCE.BACKUP': "appliance.Backup",
'APPLIANCE.BACKUPPOLICY': "appliance.BackupPolicy",
'APPLIANCE.CERTIFICATESETTING': "appliance.CertificateSetting",
'APPLIANCE.DATAEXPORTPOLICY': "appliance.DataExportPolicy",
'APPLIANCE.DEVICECERTIFICATE': "appliance.DeviceCertificate",
'APPLIANCE.DEVICECLAIM': "appliance.DeviceClaim",
'APPLIANCE.DEVICEUPGRADEPOLICY': "appliance.DeviceUpgradePolicy",
'APPLIANCE.DIAGSETTING': "appliance.DiagSetting",
'APPLIANCE.EXTERNALSYSLOGSETTING': "appliance.ExternalSyslogSetting",
'APPLIANCE.FILEGATEWAY': "appliance.FileGateway",
'APPLIANCE.FILESYSTEMSTATUS': "appliance.FileSystemStatus",
'APPLIANCE.GROUPSTATUS': "appliance.GroupStatus",
'APPLIANCE.IMAGEBUNDLE': "appliance.ImageBundle",
'APPLIANCE.NODEINFO': "appliance.NodeInfo",
'APPLIANCE.NODESTATUS': "appliance.NodeStatus",
'APPLIANCE.RELEASENOTE': "appliance.ReleaseNote",
'APPLIANCE.REMOTEFILEIMPORT': "appliance.RemoteFileImport",
'APPLIANCE.RESTORE': "appliance.Restore",
'APPLIANCE.SETUPINFO': "appliance.SetupInfo",
'APPLIANCE.SYSTEMINFO': "appliance.SystemInfo",
'APPLIANCE.SYSTEMSTATUS': "appliance.SystemStatus",
'APPLIANCE.UPGRADE': "appliance.Upgrade",
'APPLIANCE.UPGRADEPOLICY': "appliance.UpgradePolicy",
'ASSET.CLUSTERMEMBER': "asset.ClusterMember",
'ASSET.DEPLOYMENT': "asset.Deployment",
'ASSET.DEPLOYMENTDEVICE': "asset.DeploymentDevice",
'ASSET.DEVICECLAIM': "asset.DeviceClaim",
'ASSET.DEVICECONFIGURATION': "asset.DeviceConfiguration",
'ASSET.DEVICECONNECTORMANAGER': "asset.DeviceConnectorManager",
'ASSET.DEVICECONTRACTINFORMATION': "asset.DeviceContractInformation",
'ASSET.DEVICECONTRACTNOTIFICATION': "asset.DeviceContractNotification",
'ASSET.DEVICEREGISTRATION': "asset.DeviceRegistration",
'ASSET.SUBSCRIPTION': "asset.Subscription",
'ASSET.SUBSCRIPTIONACCOUNT': "asset.SubscriptionAccount",
'ASSET.SUBSCRIPTIONDEVICECONTRACTINFORMATION': "asset.SubscriptionDeviceContractInformation",
'ASSET.TARGET': "asset.Target",
'BIOS.BOOTDEVICE': "bios.BootDevice",
'BIOS.BOOTMODE': "bios.BootMode",
'BIOS.POLICY': "bios.Policy",
'BIOS.SYSTEMBOOTORDER': "bios.SystemBootOrder",
'BIOS.TOKENSETTINGS': "bios.TokenSettings",
'BIOS.UNIT': "bios.Unit",
'BIOS.VFSELECTMEMORYRASCONFIGURATION': "bios.VfSelectMemoryRasConfiguration",
'BOOT.CDDDEVICE': "boot.CddDevice",
'BOOT.DEVICEBOOTMODE': "boot.DeviceBootMode",
'BOOT.DEVICEBOOTSECURITY': "boot.DeviceBootSecurity",
'BOOT.HDDDEVICE': "boot.HddDevice",
'BOOT.ISCSIDEVICE': "boot.IscsiDevice",
'BOOT.NVMEDEVICE': "boot.NvmeDevice",
'BOOT.PCHSTORAGEDEVICE': "boot.PchStorageDevice",
'BOOT.PRECISIONPOLICY': "boot.PrecisionPolicy",
'BOOT.PXEDEVICE': "boot.PxeDevice",
'BOOT.SANDEVICE': "boot.SanDevice",
'BOOT.SDDEVICE': "boot.SdDevice",
'BOOT.UEFISHELLDEVICE': "boot.UefiShellDevice",
'BOOT.USBDEVICE': "boot.UsbDevice",
'BOOT.VMEDIADEVICE': "boot.VmediaDevice",
'BULK.EXPORT': "bulk.Export",
'BULK.EXPORTEDITEM': "bulk.ExportedItem",
'BULK.MOCLONER': "bulk.MoCloner",
'BULK.MOMERGER': "bulk.MoMerger",
'BULK.REQUEST': "bulk.Request",
'BULK.SUBREQUESTOBJ': "bulk.SubRequestObj",
'CAPABILITY.ADAPTERUNITDESCRIPTOR': "capability.AdapterUnitDescriptor",
'CAPABILITY.CATALOG': "capability.Catalog",
'CAPABILITY.CHASSISDESCRIPTOR': "capability.ChassisDescriptor",
'CAPABILITY.CHASSISMANUFACTURINGDEF': "capability.ChassisManufacturingDef",
'CAPABILITY.CIMCFIRMWAREDESCRIPTOR': "capability.CimcFirmwareDescriptor",
'CAPABILITY.EQUIPMENTPHYSICALDEF': "capability.EquipmentPhysicalDef",
'CAPABILITY.EQUIPMENTSLOTARRAY': "capability.EquipmentSlotArray",
'CAPABILITY.FANMODULEDESCRIPTOR': "capability.FanModuleDescriptor",
'CAPABILITY.FANMODULEMANUFACTURINGDEF': "capability.FanModuleManufacturingDef",
'CAPABILITY.IOCARDCAPABILITYDEF': "capability.IoCardCapabilityDef",
'CAPABILITY.IOCARDDESCRIPTOR': "capability.IoCardDescriptor",
'CAPABILITY.IOCARDMANUFACTURINGDEF': "capability.IoCardManufacturingDef",
'CAPABILITY.PORTGROUPAGGREGATIONDEF': "capability.PortGroupAggregationDef",
'CAPABILITY.PSUDESCRIPTOR': "capability.PsuDescriptor",
'CAPABILITY.PSUMANUFACTURINGDEF': "capability.PsuManufacturingDef",
'CAPABILITY.SERVERMODELSCAPABILITYDEF': "capability.ServerModelsCapabilityDef",
'CAPABILITY.SERVERSCHEMADESCRIPTOR': "capability.ServerSchemaDescriptor",
'CAPABILITY.SIOCMODULECAPABILITYDEF': "capability.SiocModuleCapabilityDef",
'CAPABILITY.SIOCMODULEDESCRIPTOR': "capability.SiocModuleDescriptor",
'CAPABILITY.SIOCMODULEMANUFACTURINGDEF': "capability.SiocModuleManufacturingDef",
'CAPABILITY.SWITCHCAPABILITY': "capability.SwitchCapability",
'CAPABILITY.SWITCHDESCRIPTOR': "capability.SwitchDescriptor",
'CAPABILITY.SWITCHMANUFACTURINGDEF': "capability.SwitchManufacturingDef",
'CERTIFICATEMANAGEMENT.POLICY': "certificatemanagement.Policy",
'CHASSIS.CONFIGCHANGEDETAIL': "chassis.ConfigChangeDetail",
'CHASSIS.CONFIGIMPORT': "chassis.ConfigImport",
'CHASSIS.CONFIGRESULT': "chassis.ConfigResult",
'CHASSIS.CONFIGRESULTENTRY': "chassis.ConfigResultEntry",
'CHASSIS.IOMPROFILE': "chassis.IomProfile",
'CHASSIS.PROFILE': "chassis.Profile",
'CLOUD.AWSBILLINGUNIT': "cloud.AwsBillingUnit",
'CLOUD.AWSKEYPAIR': "cloud.AwsKeyPair",
'CLOUD.AWSNETWORKINTERFACE': "cloud.AwsNetworkInterface",
'CLOUD.AWSORGANIZATIONALUNIT': "cloud.AwsOrganizationalUnit",
'CLOUD.AWSSECURITYGROUP': "cloud.AwsSecurityGroup",
'CLOUD.AWSSUBNET': "cloud.AwsSubnet",
'CLOUD.AWSVIRTUALMACHINE': "cloud.AwsVirtualMachine",
'CLOUD.AWSVOLUME': "cloud.AwsVolume",
'CLOUD.AWSVPC': "cloud.AwsVpc",
'CLOUD.COLLECTINVENTORY': "cloud.CollectInventory",
'CLOUD.REGIONS': "cloud.Regions",
'CLOUD.SKUCONTAINERTYPE': "cloud.SkuContainerType",
'CLOUD.SKUDATABASETYPE': "cloud.SkuDatabaseType",
'CLOUD.SKUINSTANCETYPE': "cloud.SkuInstanceType",
'CLOUD.SKUNETWORKTYPE': "cloud.SkuNetworkType",
'CLOUD.SKUREGIONRATECARDS': "cloud.SkuRegionRateCards",
'CLOUD.SKUVOLUMETYPE': "cloud.SkuVolumeType",
'CLOUD.TFCAGENTPOOL': "cloud.TfcAgentpool",
'CLOUD.TFCORGANIZATION': "cloud.TfcOrganization",
'CLOUD.TFCWORKSPACE': "cloud.TfcWorkspace",
'COMM.HTTPPROXYPOLICY': "comm.HttpProxyPolicy",
'COMPUTE.BIOSPOSTPOLICY': "compute.BiosPostPolicy",
'COMPUTE.BLADE': "compute.Blade",
'COMPUTE.BLADEIDENTITY': "compute.BladeIdentity",
'COMPUTE.BOARD': "compute.Board",
'COMPUTE.MAPPING': "compute.Mapping",
'COMPUTE.PHYSICALSUMMARY': "compute.PhysicalSummary",
'COMPUTE.RACKUNIT': "compute.RackUnit",
'COMPUTE.RACKUNITIDENTITY': "compute.RackUnitIdentity",
'COMPUTE.SERVERPOWERPOLICY': "compute.ServerPowerPolicy",
'COMPUTE.SERVERSETTING': "compute.ServerSetting",
'COMPUTE.VMEDIA': "compute.Vmedia",
'COND.ALARM': "cond.Alarm",
'COND.ALARMAGGREGATION': "cond.AlarmAggregation",
'COND.HCLSTATUS': "cond.HclStatus",
'COND.HCLSTATUSDETAIL': "cond.HclStatusDetail",
'COND.HCLSTATUSJOB': "cond.HclStatusJob",
'CONNECTORPACK.CONNECTORPACKUPGRADE': "connectorpack.ConnectorPackUpgrade",
'CONNECTORPACK.UPGRADEIMPACT': "connectorpack.UpgradeImpact",
'CONVERGEDINFRA.HEALTHCHECKDEFINITION': "convergedinfra.HealthCheckDefinition",
'CONVERGEDINFRA.HEALTHCHECKEXECUTION': "convergedinfra.HealthCheckExecution",
'CONVERGEDINFRA.POD': "convergedinfra.Pod",
'CRD.CUSTOMRESOURCE': "crd.CustomResource",
'DEVICECONNECTOR.POLICY': "deviceconnector.Policy",
'EQUIPMENT.CHASSIS': "equipment.Chassis",
'EQUIPMENT.CHASSISIDENTITY': "equipment.ChassisIdentity",
'EQUIPMENT.CHASSISOPERATION': "equipment.ChassisOperation",
'EQUIPMENT.DEVICESUMMARY': "equipment.DeviceSummary",
'EQUIPMENT.EXPANDERMODULE': "equipment.ExpanderModule",
'EQUIPMENT.FAN': "equipment.Fan",
'EQUIPMENT.FANCONTROL': "equipment.FanControl",
'EQUIPMENT.FANMODULE': "equipment.FanModule",
'EQUIPMENT.FEX': "equipment.Fex",
'EQUIPMENT.FEXIDENTITY': "equipment.FexIdentity",
'EQUIPMENT.FEXOPERATION': "equipment.FexOperation",
'EQUIPMENT.FRU': "equipment.Fru",
'EQUIPMENT.IDENTITYSUMMARY': "equipment.IdentitySummary",
'EQUIPMENT.IOCARD': "equipment.IoCard",
'EQUIPMENT.IOCARDOPERATION': "equipment.IoCardOperation",
'EQUIPMENT.IOEXPANDER': "equipment.IoExpander",
'EQUIPMENT.LOCATORLED': "equipment.LocatorLed",
'EQUIPMENT.PSU': "equipment.Psu",
'EQUIPMENT.PSUCONTROL': "equipment.PsuControl",
'EQUIPMENT.RACKENCLOSURE': "equipment.RackEnclosure",
'EQUIPMENT.RACKENCLOSURESLOT': "equipment.RackEnclosureSlot",
'EQUIPMENT.SHAREDIOMODULE': "equipment.SharedIoModule",
'EQUIPMENT.SWITCHCARD': "equipment.SwitchCard",
'EQUIPMENT.SYSTEMIOCONTROLLER': "equipment.SystemIoController",
'EQUIPMENT.TPM': "equipment.Tpm",
'EQUIPMENT.TRANSCEIVER': "equipment.Transceiver",
'ETHER.HOSTPORT': "ether.HostPort",
'ETHER.NETWORKPORT': "ether.NetworkPort",
'ETHER.PHYSICALPORT': "ether.PhysicalPort",
'ETHER.PORTCHANNEL': "ether.PortChannel",
'EXTERNALSITE.AUTHORIZATION': "externalsite.Authorization",
'FABRIC.APPLIANCEPCROLE': "fabric.AppliancePcRole",
'FABRIC.APPLIANCEROLE': "fabric.ApplianceRole",
'FABRIC.CONFIGCHANGEDETAIL': "fabric.ConfigChangeDetail",
'FABRIC.CONFIGRESULT': "fabric.ConfigResult",
'FABRIC.CONFIGRESULTENTRY': "fabric.ConfigResultEntry",
'FABRIC.ELEMENTIDENTITY': "fabric.ElementIdentity",
'FABRIC.ESTIMATEIMPACT': "fabric.EstimateImpact",
'FABRIC.ETHNETWORKCONTROLPOLICY': "fabric.EthNetworkControlPolicy",
'FABRIC.ETHNETWORKGROUPPOLICY': "fabric.EthNetworkGroupPolicy",
'FABRIC.ETHNETWORKPOLICY': "fabric.EthNetworkPolicy",
'FABRIC.FCNETWORKPOLICY': "fabric.FcNetworkPolicy",
'FABRIC.FCSTORAGEROLE': "fabric.FcStorageRole",
'FABRIC.FCUPLINKPCROLE': "fabric.FcUplinkPcRole",
'FABRIC.FCUPLINKROLE': "fabric.FcUplinkRole",
'FABRIC.FCOEUPLINKPCROLE': "fabric.FcoeUplinkPcRole",
'FABRIC.FCOEUPLINKROLE': "fabric.FcoeUplinkRole",
'FABRIC.FLOWCONTROLPOLICY': "fabric.FlowControlPolicy",
'FABRIC.LINKAGGREGATIONPOLICY': "fabric.LinkAggregationPolicy",
'FABRIC.LINKCONTROLPOLICY': "fabric.LinkControlPolicy",
'FABRIC.MULTICASTPOLICY': "fabric.MulticastPolicy",
'FABRIC.PCMEMBER': "fabric.PcMember",
'FABRIC.PCOPERATION': "fabric.PcOperation",
'FABRIC.PORTMODE': "fabric.PortMode",
'FABRIC.PORTOPERATION': "fabric.PortOperation",
'FABRIC.PORTPOLICY': "fabric.PortPolicy",
'FABRIC.SERVERROLE': "fabric.ServerRole",
'FABRIC.SWITCHCLUSTERPROFILE': "fabric.SwitchClusterProfile",
'FABRIC.SWITCHCONTROLPOLICY': "fabric.SwitchControlPolicy",
'FABRIC.SWITCHPROFILE': "fabric.SwitchProfile",
'FABRIC.SYSTEMQOSPOLICY': "fabric.SystemQosPolicy",
'FABRIC.UPLINKPCROLE': "fabric.UplinkPcRole",
'FABRIC.UPLINKROLE': "fabric.UplinkRole",
'FABRIC.VLAN': "fabric.Vlan",
'FABRIC.VSAN': "fabric.Vsan",
'FAULT.INSTANCE': "fault.Instance",
'FC.PHYSICALPORT': "fc.PhysicalPort",
'FC.PORTCHANNEL': "fc.PortChannel",
'FCPOOL.FCBLOCK': "fcpool.FcBlock",
'FCPOOL.LEASE': "fcpool.Lease",
'FCPOOL.POOL': "fcpool.Pool",
'FCPOOL.POOLMEMBER': "fcpool.PoolMember",
'FCPOOL.UNIVERSE': "fcpool.Universe",
'FEEDBACK.FEEDBACKPOST': "feedback.FeedbackPost",
'FIRMWARE.BIOSDESCRIPTOR': "firmware.BiosDescriptor",
'FIRMWARE.BOARDCONTROLLERDESCRIPTOR': "firmware.BoardControllerDescriptor",
'FIRMWARE.CHASSISUPGRADE': "firmware.ChassisUpgrade",
'FIRMWARE.CIMCDESCRIPTOR': "firmware.CimcDescriptor",
'FIRMWARE.DIMMDESCRIPTOR': "firmware.DimmDescriptor",
'FIRMWARE.DISTRIBUTABLE': "firmware.Distributable",
'FIRMWARE.DISTRIBUTABLEMETA': "firmware.DistributableMeta",
'FIRMWARE.DRIVEDESCRIPTOR': "firmware.DriveDescriptor",
'FIRMWARE.DRIVERDISTRIBUTABLE': "firmware.DriverDistributable",
'FIRMWARE.EULA': "firmware.Eula",
'FIRMWARE.FIRMWARESUMMARY': "firmware.FirmwareSummary",
'FIRMWARE.GPUDESCRIPTOR': "firmware.GpuDescriptor",
'FIRMWARE.HBADESCRIPTOR': "firmware.HbaDescriptor",
'FIRMWARE.IOMDESCRIPTOR': "firmware.IomDescriptor",
'FIRMWARE.MSWITCHDESCRIPTOR': "firmware.MswitchDescriptor",
'FIRMWARE.NXOSDESCRIPTOR': "firmware.NxosDescriptor",
'FIRMWARE.PCIEDESCRIPTOR': "firmware.PcieDescriptor",
'FIRMWARE.PSUDESCRIPTOR': "firmware.PsuDescriptor",
'FIRMWARE.RUNNINGFIRMWARE': "firmware.RunningFirmware",
'FIRMWARE.SASEXPANDERDESCRIPTOR': "firmware.SasExpanderDescriptor",
'FIRMWARE.SERVERCONFIGURATIONUTILITYDISTRIBUTABLE': "firmware.ServerConfigurationUtilityDistributable",
'FIRMWARE.STORAGECONTROLLERDESCRIPTOR': "firmware.StorageControllerDescriptor",
'FIRMWARE.SWITCHUPGRADE': "firmware.SwitchUpgrade",
'FIRMWARE.UNSUPPORTEDVERSIONUPGRADE': "firmware.UnsupportedVersionUpgrade",
'FIRMWARE.UPGRADE': "firmware.Upgrade",
'FIRMWARE.UPGRADEIMPACT': "firmware.UpgradeImpact",
'FIRMWARE.UPGRADEIMPACTSTATUS': "firmware.UpgradeImpactStatus",
'FIRMWARE.UPGRADESTATUS': "firmware.UpgradeStatus",
'FORECAST.CATALOG': "forecast.Catalog",
'FORECAST.DEFINITION': "forecast.Definition",
'FORECAST.INSTANCE': "forecast.Instance",
'GRAPHICS.CARD': "graphics.Card",
'GRAPHICS.CONTROLLER': "graphics.Controller",
'HCL.COMPATIBILITYSTATUS': "hcl.CompatibilityStatus",
'HCL.DRIVERIMAGE': "hcl.DriverImage",
'HCL.EXEMPTEDCATALOG': "hcl.ExemptedCatalog",
'HCL.HYPERFLEXSOFTWARECOMPATIBILITYINFO': "hcl.HyperflexSoftwareCompatibilityInfo",
'HCL.OPERATINGSYSTEM': "hcl.OperatingSystem",
'HCL.OPERATINGSYSTEMVENDOR': "hcl.OperatingSystemVendor",
'HCL.SUPPORTEDDRIVERNAME': "hcl.SupportedDriverName",
'HYPERFLEX.ALARM': "hyperflex.Alarm",
'HYPERFLEX.APPCATALOG': "hyperflex.AppCatalog",
'HYPERFLEX.AUTOSUPPORTPOLICY': "hyperflex.AutoSupportPolicy",
'HYPERFLEX.BACKUPCLUSTER': "hyperflex.BackupCluster",
'HYPERFLEX.CAPABILITYINFO': "hyperflex.CapabilityInfo",
'HYPERFLEX.CLUSTER': "hyperflex.Cluster",
'HYPERFLEX.CLUSTERBACKUPPOLICY': "hyperflex.ClusterBackupPolicy",
'HYPERFLEX.CLUSTERBACKUPPOLICYDEPLOYMENT': "hyperflex.ClusterBackupPolicyDeployment",
'HYPERFLEX.CLUSTERBACKUPPOLICYINVENTORY': "hyperflex.ClusterBackupPolicyInventory",
'HYPERFLEX.CLUSTERHEALTHCHECKEXECUTIONSNAPSHOT': "hyperflex.ClusterHealthCheckExecutionSnapshot",
'HYPERFLEX.CLUSTERNETWORKPOLICY': "hyperflex.ClusterNetworkPolicy",
'HYPERFLEX.CLUSTERPROFILE': "hyperflex.ClusterProfile",
'HYPERFLEX.CLUSTERREPLICATIONNETWORKPOLICY': "hyperflex.ClusterReplicationNetworkPolicy",
'HYPERFLEX.CLUSTERREPLICATIONNETWORKPOLICYDEPLOYMENT': "hyperflex.ClusterReplicationNetworkPolicyDeployment",
'HYPERFLEX.CLUSTERSTORAGEPOLICY': "hyperflex.ClusterStoragePolicy",
'HYPERFLEX.CONFIGRESULT': "hyperflex.ConfigResult",
'HYPERFLEX.CONFIGRESULTENTRY': "hyperflex.ConfigResultEntry",
'HYPERFLEX.DATAPROTECTIONPEER': "hyperflex.DataProtectionPeer",
'HYPERFLEX.DATASTORESTATISTIC': "hyperflex.DatastoreStatistic",
'HYPERFLEX.DEVICEPACKAGEDOWNLOADSTATE': "hyperflex.DevicePackageDownloadState",
'HYPERFLEX.DRIVE': "hyperflex.Drive",
'HYPERFLEX.EXTFCSTORAGEPOLICY': "hyperflex.ExtFcStoragePolicy",
'HYPERFLEX.EXTISCSISTORAGEPOLICY': "hyperflex.ExtIscsiStoragePolicy",
'HYPERFLEX.FEATURELIMITEXTERNAL': "hyperflex.FeatureLimitExternal",
'HYPERFLEX.FEATURELIMITINTERNAL': "hyperflex.FeatureLimitInternal",
'HYPERFLEX.HEALTH': "hyperflex.Health",
'HYPERFLEX.HEALTHCHECKDEFINITION': "hyperflex.HealthCheckDefinition",
'HYPERFLEX.HEALTHCHECKEXECUTION': "hyperflex.HealthCheckExecution",
'HYPERFLEX.HEALTHCHECKEXECUTIONSNAPSHOT': "hyperflex.HealthCheckExecutionSnapshot",
'HYPERFLEX.HEALTHCHECKPACKAGECHECKSUM': "hyperflex.HealthCheckPackageChecksum",
'HYPERFLEX.HXDPVERSION': "hyperflex.HxdpVersion",
'HYPERFLEX.LICENSE': "hyperflex.License",
'HYPERFLEX.LOCALCREDENTIALPOLICY': "hyperflex.LocalCredentialPolicy",
'HYPERFLEX.NODE': "hyperflex.Node",
'HYPERFLEX.NODECONFIGPOLICY': "hyperflex.NodeConfigPolicy",
'HYPERFLEX.NODEPROFILE': "hyperflex.NodeProfile",
'HYPERFLEX.PROTECTEDCLUSTER': "hyperflex.ProtectedCluster",
'HYPERFLEX.PROXYSETTINGPOLICY': "hyperflex.ProxySettingPolicy",
'HYPERFLEX.SERVERFIRMWAREVERSION': "hyperflex.ServerFirmwareVersion",
'HYPERFLEX.SERVERFIRMWAREVERSIONENTRY': "hyperflex.ServerFirmwareVersionEntry",
'HYPERFLEX.SERVERMODEL': "hyperflex.ServerModel",
'HYPERFLEX.SERVICEAUTHTOKEN': "hyperflex.ServiceAuthToken",
'HYPERFLEX.SOFTWAREDISTRIBUTIONCOMPONENT': "hyperflex.SoftwareDistributionComponent",
'HYPERFLEX.SOFTWAREDISTRIBUTIONENTRY': "hyperflex.SoftwareDistributionEntry",
'HYPERFLEX.SOFTWAREDISTRIBUTIONVERSION': "hyperflex.SoftwareDistributionVersion",
'HYPERFLEX.SOFTWAREVERSIONPOLICY': "hyperflex.SoftwareVersionPolicy",
'HYPERFLEX.STORAGECONTAINER': "hyperflex.StorageContainer",
'HYPERFLEX.SYSCONFIGPOLICY': "hyperflex.SysConfigPolicy",
'HYPERFLEX.UCSMCONFIGPOLICY': "hyperflex.UcsmConfigPolicy",
'HYPERFLEX.VCENTERCONFIGPOLICY': "hyperflex.VcenterConfigPolicy",
'HYPERFLEX.VMBACKUPINFO': "hyperflex.VmBackupInfo",
'HYPERFLEX.VMIMPORTOPERATION': "hyperflex.VmImportOperation",
'HYPERFLEX.VMRESTOREOPERATION': "hyperflex.VmRestoreOperation",
'HYPERFLEX.VMSNAPSHOTINFO': "hyperflex.VmSnapshotInfo",
'HYPERFLEX.VOLUME': "hyperflex.Volume",
'HYPERFLEX.WITNESSCONFIGURATION': "hyperflex.WitnessConfiguration",
'IAAS.CONNECTORPACK': "iaas.ConnectorPack",
'IAAS.DEVICESTATUS': "iaas.DeviceStatus",
'IAAS.DIAGNOSTICMESSAGES': "iaas.DiagnosticMessages",
'IAAS.LICENSEINFO': "iaas.LicenseInfo",
'IAAS.MOSTRUNTASKS': "iaas.MostRunTasks",
'IAAS.SERVICEREQUEST': "iaas.ServiceRequest",
'IAAS.UCSDINFO': "iaas.UcsdInfo",
'IAAS.UCSDMANAGEDINFRA': "iaas.UcsdManagedInfra",
'IAAS.UCSDMESSAGES': "iaas.UcsdMessages",
'IAM.ACCOUNT': "iam.Account",
'IAM.ACCOUNTEXPERIENCE': "iam.AccountExperience",
'IAM.APIKEY': "iam.ApiKey",
'IAM.APPREGISTRATION': "iam.AppRegistration",
'IAM.BANNERMESSAGE': "iam.BannerMessage",
'IAM.CERTIFICATE': "iam.Certificate",
'IAM.CERTIFICATEREQUEST': "iam.CertificateRequest",
'IAM.DOMAINGROUP': "iam.DomainGroup",
'IAM.ENDPOINTPRIVILEGE': "iam.EndPointPrivilege",
'IAM.ENDPOINTROLE': "iam.EndPointRole",
'IAM.ENDPOINTUSER': "iam.EndPointUser",
'IAM.ENDPOINTUSERPOLICY': "iam.EndPointUserPolicy",
'IAM.ENDPOINTUSERROLE': "iam.EndPointUserRole",
'IAM.IDP': "iam.Idp",
'IAM.IDPREFERENCE': "iam.IdpReference",
'IAM.IPACCESSMANAGEMENT': "iam.IpAccessManagement",
'IAM.IPADDRESS': "iam.IpAddress",
'IAM.LDAPGROUP': "iam.LdapGroup",
'IAM.LDAPPOLICY': "iam.LdapPolicy",
'IAM.LDAPPROVIDER': "iam.LdapProvider",
'IAM.LOCALUSERPASSWORD': "iam.LocalUserPassword",
'IAM.LOCALUSERPASSWORDPOLICY': "iam.LocalUserPasswordPolicy",
'IAM.OAUTHTOKEN': "iam.OAuthToken",
'IAM.PERMISSION': "iam.Permission",
'IAM.PRIVATEKEYSPEC': "iam.PrivateKeySpec",
'IAM.PRIVILEGE': "iam.Privilege",
'IAM.PRIVILEGESET': "iam.PrivilegeSet",
'IAM.QUALIFIER': "iam.Qualifier",
'IAM.RESOURCELIMITS': "iam.ResourceLimits",
'IAM.RESOURCEPERMISSION': "iam.ResourcePermission",
'IAM.RESOURCEROLES': "iam.ResourceRoles",
'IAM.ROLE': "iam.Role",
'IAM.SECURITYHOLDER': "iam.SecurityHolder",
'IAM.SERVICEPROVIDER': "iam.ServiceProvider",
'IAM.SESSION': "iam.Session",
'IAM.SESSIONLIMITS': "iam.SessionLimits",
'IAM.SYSTEM': "iam.System",
'IAM.TRUSTPOINT': "iam.TrustPoint",
'IAM.USER': "iam.User",
'IAM.USERGROUP': "iam.UserGroup",
'IAM.USERPREFERENCE': "iam.UserPreference",
'INVENTORY.DEVICEINFO': "inventory.DeviceInfo",
'INVENTORY.DNMOBINDING': "inventory.DnMoBinding",
'INVENTORY.GENERICINVENTORY': "inventory.GenericInventory",
'INVENTORY.GENERICINVENTORYHOLDER': "inventory.GenericInventoryHolder",
'INVENTORY.REQUEST': "inventory.Request",
'IPMIOVERLAN.POLICY': "ipmioverlan.Policy",
'IPPOOL.BLOCKLEASE': "ippool.BlockLease",
'IPPOOL.IPLEASE': "ippool.IpLease",
'IPPOOL.POOL': "ippool.Pool",
'IPPOOL.POOLMEMBER': "ippool.PoolMember",
'IPPOOL.SHADOWBLOCK': "ippool.ShadowBlock",
'IPPOOL.SHADOWPOOL': "ippool.ShadowPool",
'IPPOOL.UNIVERSE': "ippool.Universe",
'IQNPOOL.BLOCK': "iqnpool.Block",
'IQNPOOL.LEASE': "iqnpool.Lease",
'IQNPOOL.POOL': "iqnpool.Pool",
'IQNPOOL.POOLMEMBER': "iqnpool.PoolMember",
'IQNPOOL.UNIVERSE': "iqnpool.Universe",
'IWOTENANT.TENANTSTATUS': "iwotenant.TenantStatus",
'KUBERNETES.ACICNIAPIC': "kubernetes.AciCniApic",
'KUBERNETES.ACICNIPROFILE': "kubernetes.AciCniProfile",
'KUBERNETES.ACICNITENANTCLUSTERALLOCATION': "kubernetes.AciCniTenantClusterAllocation",
'KUBERNETES.ADDONDEFINITION': "kubernetes.AddonDefinition",
'KUBERNETES.ADDONPOLICY': "kubernetes.AddonPolicy",
'KUBERNETES.ADDONREPOSITORY': "kubernetes.AddonRepository",
'KUBERNETES.BAREMETALNODEPROFILE': "kubernetes.BaremetalNodeProfile",
'KUBERNETES.CATALOG': "kubernetes.Catalog",
'KUBERNETES.CLUSTER': "kubernetes.Cluster",
'KUBERNETES.CLUSTERADDONPROFILE': "kubernetes.ClusterAddonProfile",
'KUBERNETES.CLUSTERPROFILE': "kubernetes.ClusterProfile",
'KUBERNETES.CONFIGRESULT': "kubernetes.ConfigResult",
'KUBERNETES.CONFIGRESULTENTRY': "kubernetes.ConfigResultEntry",
'KUBERNETES.CONTAINERRUNTIMEPOLICY': "kubernetes.ContainerRuntimePolicy",
'KUBERNETES.DAEMONSET': "kubernetes.DaemonSet",
'KUBERNETES.DEPLOYMENT': "kubernetes.Deployment",
'KUBERNETES.INGRESS': "kubernetes.Ingress",
'KUBERNETES.NETWORKPOLICY': "kubernetes.NetworkPolicy",
'KUBERNETES.NODE': "kubernetes.Node",
'KUBERNETES.NODEGROUPPROFILE': "kubernetes.NodeGroupProfile",
'KUBERNETES.POD': "kubernetes.Pod",
'KUBERNETES.SERVICE': "kubernetes.Service",
'KUBERNETES.STATEFULSET': "kubernetes.StatefulSet",
'KUBERNETES.SYSCONFIGPOLICY': "kubernetes.SysConfigPolicy",
'KUBERNETES.TRUSTEDREGISTRIESPOLICY': "kubernetes.TrustedRegistriesPolicy",
'KUBERNETES.VERSION': "kubernetes.Version",
'KUBERNETES.VERSIONPOLICY': "kubernetes.VersionPolicy",
'KUBERNETES.VIRTUALMACHINEINFRACONFIGPOLICY': "kubernetes.VirtualMachineInfraConfigPolicy",
'KUBERNETES.VIRTUALMACHINEINFRASTRUCTUREPROVIDER': "kubernetes.VirtualMachineInfrastructureProvider",
'KUBERNETES.VIRTUALMACHINEINSTANCETYPE': "kubernetes.VirtualMachineInstanceType",
'KUBERNETES.VIRTUALMACHINENODEPROFILE': "kubernetes.VirtualMachineNodeProfile",
'KVM.POLICY': "kvm.Policy",
'KVM.SESSION': "kvm.Session",
'KVM.TUNNEL': "kvm.Tunnel",
'LICENSE.ACCOUNTLICENSEDATA': "license.AccountLicenseData",
'LICENSE.CUSTOMEROP': "license.CustomerOp",
'LICENSE.IKSCUSTOMEROP': "license.IksCustomerOp",
'LICENSE.IKSLICENSECOUNT': "license.IksLicenseCount",
'LICENSE.IWOCUSTOMEROP': "license.IwoCustomerOp",
'LICENSE.IWOLICENSECOUNT': "license.IwoLicenseCount",
'LICENSE.LICENSEINFO': "license.LicenseInfo",
'LICENSE.LICENSERESERVATIONOP': "license.LicenseReservationOp",
'LICENSE.SMARTLICENSETOKEN': "license.SmartlicenseToken",
'LS.SERVICEPROFILE': "ls.ServiceProfile",
'MACPOOL.IDBLOCK': "macpool.IdBlock",
'MACPOOL.LEASE': "macpool.Lease",
'MACPOOL.POOL': "macpool.Pool",
'MACPOOL.POOLMEMBER': "macpool.PoolMember",
'MACPOOL.UNIVERSE': "macpool.Universe",
'MANAGEMENT.CONTROLLER': "management.Controller",
'MANAGEMENT.ENTITY': "management.Entity",
'MANAGEMENT.INTERFACE': "management.Interface",
'MEMORY.ARRAY': "memory.Array",
'MEMORY.PERSISTENTMEMORYCONFIGRESULT': "memory.PersistentMemoryConfigResult",
'MEMORY.PERSISTENTMEMORYCONFIGURATION': "memory.PersistentMemoryConfiguration",
'MEMORY.PERSISTENTMEMORYNAMESPACE': "memory.PersistentMemoryNamespace",
'MEMORY.PERSISTENTMEMORYNAMESPACECONFIGRESULT': "memory.PersistentMemoryNamespaceConfigResult",
'MEMORY.PERSISTENTMEMORYPOLICY': "memory.PersistentMemoryPolicy",
'MEMORY.PERSISTENTMEMORYREGION': "memory.PersistentMemoryRegion",
'MEMORY.PERSISTENTMEMORYUNIT': "memory.PersistentMemoryUnit",
'MEMORY.UNIT': "memory.Unit",
'META.DEFINITION': "meta.Definition",
'NETWORK.ELEMENT': "network.Element",
'NETWORK.ELEMENTSUMMARY': "network.ElementSummary",
'NETWORK.FCZONEINFO': "network.FcZoneInfo",
'NETWORK.VLANPORTINFO': "network.VlanPortInfo",
'NETWORKCONFIG.POLICY': "networkconfig.Policy",
'NIAAPI.APICCCOPOST': "niaapi.ApicCcoPost",
'NIAAPI.APICFIELDNOTICE': "niaapi.ApicFieldNotice",
'NIAAPI.APICHWEOL': "niaapi.ApicHweol",
'NIAAPI.APICLATESTMAINTAINEDRELEASE': "niaapi.ApicLatestMaintainedRelease",
'NIAAPI.APICRELEASERECOMMEND': "niaapi.ApicReleaseRecommend",
'NIAAPI.APICSWEOL': "niaapi.ApicSweol",
'NIAAPI.DCNMCCOPOST': "niaapi.DcnmCcoPost",
'NIAAPI.DCNMFIELDNOTICE': "niaapi.DcnmFieldNotice",
'NIAAPI.DCNMHWEOL': "niaapi.DcnmHweol",
'NIAAPI.DCNMLATESTMAINTAINEDRELEASE': "niaapi.DcnmLatestMaintainedRelease",
'NIAAPI.DCNMRELEASERECOMMEND': "niaapi.DcnmReleaseRecommend",
'NIAAPI.DCNMSWEOL': "niaapi.DcnmSweol",
'NIAAPI.FILEDOWNLOADER': "niaapi.FileDownloader",
'NIAAPI.NIAMETADATA': "niaapi.NiaMetadata",
'NIAAPI.NIBFILEDOWNLOADER': "niaapi.NibFileDownloader",
'NIAAPI.NIBMETADATA': "niaapi.NibMetadata",
'NIAAPI.VERSIONREGEX': "niaapi.VersionRegex",
'NIATELEMETRY.AAALDAPPROVIDERDETAILS': "niatelemetry.AaaLdapProviderDetails",
'NIATELEMETRY.AAARADIUSPROVIDERDETAILS': "niatelemetry.AaaRadiusProviderDetails",
'NIATELEMETRY.AAATACACSPROVIDERDETAILS': "niatelemetry.AaaTacacsProviderDetails",
'NIATELEMETRY.APICAPPPLUGINDETAILS': "niatelemetry.ApicAppPluginDetails",
'NIATELEMETRY.APICCOREFILEDETAILS': "niatelemetry.ApicCoreFileDetails",
'NIATELEMETRY.APICDBGEXPRSEXPORTDEST': "niatelemetry.ApicDbgexpRsExportDest",
'NIATELEMETRY.APICDBGEXPRSTSSCHEDULER': "niatelemetry.ApicDbgexpRsTsScheduler",
'NIATELEMETRY.APICFANDETAILS': "niatelemetry.ApicFanDetails",
'NIATELEMETRY.APICFEXDETAILS': "niatelemetry.ApicFexDetails",
'NIATELEMETRY.APICFLASHDETAILS': "niatelemetry.ApicFlashDetails",
'NIATELEMETRY.APICNTPAUTH': "niatelemetry.ApicNtpAuth",
'NIATELEMETRY.APICPSUDETAILS': "niatelemetry.ApicPsuDetails",
'NIATELEMETRY.APICREALMDETAILS': "niatelemetry.ApicRealmDetails",
'NIATELEMETRY.APICSNMPCLIENTGRPDETAILS': "niatelemetry.ApicSnmpClientGrpDetails",
'NIATELEMETRY.APICSNMPCOMMUNITYACCESSDETAILS': "niatelemetry.ApicSnmpCommunityAccessDetails",
'NIATELEMETRY.APICSNMPCOMMUNITYDETAILS': "niatelemetry.ApicSnmpCommunityDetails",
'NIATELEMETRY.APICSNMPTRAPDETAILS': "niatelemetry.ApicSnmpTrapDetails",
'NIATELEMETRY.APICSNMPTRAPFWDSERVERDETAILS': "niatelemetry.ApicSnmpTrapFwdServerDetails",
'NIATELEMETRY.APICSNMPVERSIONTHREEDETAILS': "niatelemetry.ApicSnmpVersionThreeDetails",
'NIATELEMETRY.APICSYSLOGGRP': "niatelemetry.ApicSysLogGrp",
'NIATELEMETRY.APICSYSLOGSRC': "niatelemetry.ApicSysLogSrc",
'NIATELEMETRY.APICTRANSCEIVERDETAILS': "niatelemetry.ApicTransceiverDetails",
'NIATELEMETRY.APICUIPAGECOUNTS': "niatelemetry.ApicUiPageCounts",
'NIATELEMETRY.APPDETAILS': "niatelemetry.AppDetails",
'NIATELEMETRY.COMMONPOLICIES': "niatelemetry.CommonPolicies",
'NIATELEMETRY.DCNMFANDETAILS': "niatelemetry.DcnmFanDetails",
'NIATELEMETRY.DCNMFEXDETAILS': "niatelemetry.DcnmFexDetails",
'NIATELEMETRY.DCNMMODULEDETAILS': "niatelemetry.DcnmModuleDetails",
'NIATELEMETRY.DCNMPSUDETAILS': "niatelemetry.DcnmPsuDetails",
'NIATELEMETRY.DCNMTRANSCEIVERDETAILS': "niatelemetry.DcnmTransceiverDetails",
'NIATELEMETRY.EPG': "niatelemetry.Epg",
'NIATELEMETRY.FABRICMODULEDETAILS': "niatelemetry.FabricModuleDetails",
'NIATELEMETRY.FABRICPODPROFILE': "niatelemetry.FabricPodProfile",
'NIATELEMETRY.FABRICPODSS': "niatelemetry.FabricPodSs",
'NIATELEMETRY.FAULT': "niatelemetry.Fault",
'NIATELEMETRY.HTTPSACLCONTRACTDETAILS': "niatelemetry.HttpsAclContractDetails",
'NIATELEMETRY.HTTPSACLCONTRACTFILTERMAP': "niatelemetry.HttpsAclContractFilterMap",
'NIATELEMETRY.HTTPSACLEPGCONTRACTMAP': "niatelemetry.HttpsAclEpgContractMap",
'NIATELEMETRY.HTTPSACLEPGDETAILS': "niatelemetry.HttpsAclEpgDetails",
'NIATELEMETRY.HTTPSACLFILTERDETAILS': "niatelemetry.HttpsAclFilterDetails",
'NIATELEMETRY.LC': "niatelemetry.Lc",
'NIATELEMETRY.MSOCONTRACTDETAILS': "niatelemetry.MsoContractDetails",
'NIATELEMETRY.MSOEPGDETAILS': "niatelemetry.MsoEpgDetails",
'NIATELEMETRY.MSOSCHEMADETAILS': "niatelemetry.MsoSchemaDetails",
'NIATELEMETRY.MSOSITEDETAILS': "niatelemetry.MsoSiteDetails",
'NIATELEMETRY.MSOTENANTDETAILS': "niatelemetry.MsoTenantDetails",
'NIATELEMETRY.NEXUSDASHBOARDCONTROLLERDETAILS': "niatelemetry.NexusDashboardControllerDetails",
'NIATELEMETRY.NEXUSDASHBOARDDETAILS': "niatelemetry.NexusDashboardDetails",
'NIATELEMETRY.NEXUSDASHBOARDMEMORYDETAILS': "niatelemetry.NexusDashboardMemoryDetails",
'NIATELEMETRY.NEXUSDASHBOARDS': "niatelemetry.NexusDashboards",
'NIATELEMETRY.NIAFEATUREUSAGE': "niatelemetry.NiaFeatureUsage",
'NIATELEMETRY.NIAINVENTORY': "niatelemetry.NiaInventory",
'NIATELEMETRY.NIAINVENTORYDCNM': "niatelemetry.NiaInventoryDcnm",
'NIATELEMETRY.NIAINVENTORYFABRIC': "niatelemetry.NiaInventoryFabric",
'NIATELEMETRY.NIALICENSESTATE': "niatelemetry.NiaLicenseState",
'NIATELEMETRY.PASSWORDSTRENGTHCHECK': "niatelemetry.PasswordStrengthCheck",
'NIATELEMETRY.PODCOMMPOLICIES': "niatelemetry.PodCommPolicies",
'NIATELEMETRY.PODSNMPPOLICIES': "niatelemetry.PodSnmpPolicies",
'NIATELEMETRY.PODTIMESERVERPOLICIES': "niatelemetry.PodTimeServerPolicies",
'NIATELEMETRY.SITEINVENTORY': "niatelemetry.SiteInventory",
'NIATELEMETRY.SNMPSRC': "niatelemetry.SnmpSrc",
'NIATELEMETRY.SSHVERSIONTWO': "niatelemetry.SshVersionTwo",
'NIATELEMETRY.SUPERVISORMODULEDETAILS': "niatelemetry.SupervisorModuleDetails",
'NIATELEMETRY.SYSLOGREMOTEDEST': "niatelemetry.SyslogRemoteDest",
'NIATELEMETRY.SYSLOGSYSMSG': "niatelemetry.SyslogSysMsg",
'NIATELEMETRY.SYSLOGSYSMSGFACFILTER': "niatelemetry.SyslogSysMsgFacFilter",
'NIATELEMETRY.SYSTEMCONTROLLERDETAILS': "niatelemetry.SystemControllerDetails",
'NIATELEMETRY.TENANT': "niatelemetry.Tenant",
'NOTIFICATION.ACCOUNTSUBSCRIPTION': "notification.AccountSubscription",
'NTP.POLICY': "ntp.Policy",
'OAUTH.ACCESSTOKEN': "oauth.AccessToken",
'OAUTH.AUTHORIZATION': "oauth.Authorization",
'OPRS.DEPLOYMENT': "oprs.Deployment",
'OPRS.SYNCTARGETLISTMESSAGE': "oprs.SyncTargetListMessage",
'ORGANIZATION.ORGANIZATION': "organization.Organization",
'OS.BULKINSTALLINFO': "os.BulkInstallInfo",
'OS.CATALOG': "os.Catalog",
'OS.CONFIGURATIONFILE': "os.ConfigurationFile",
'OS.DISTRIBUTION': "os.Distribution",
'OS.INSTALL': "os.Install",
'OS.OSSUPPORT': "os.OsSupport",
'OS.SUPPORTEDVERSION': "os.SupportedVersion",
'OS.TEMPLATEFILE': "os.TemplateFile",
'OS.VALIDINSTALLTARGET': "os.ValidInstallTarget",
'PCI.COPROCESSORCARD': "pci.CoprocessorCard",
'PCI.DEVICE': "pci.Device",
'PCI.LINK': "pci.Link",
'PCI.SWITCH': "pci.Switch",
'PORT.GROUP': "port.Group",
'PORT.MACBINDING': "port.MacBinding",
'PORT.SUBGROUP': "port.SubGroup",
'POWER.CONTROLSTATE': "power.ControlState",
'POWER.POLICY': "power.Policy",
'PROCESSOR.UNIT': "processor.Unit",
'RACK.UNITPERSONALITY': "rack.UnitPersonality",
'RECOMMENDATION.CAPACITYRUNWAY': "recommendation.CapacityRunway",
'RECOMMENDATION.PHYSICALITEM': "recommendation.PhysicalItem",
'RECOVERY.BACKUPCONFIGPOLICY': "recovery.BackupConfigPolicy",
'RECOVERY.BACKUPPROFILE': "recovery.BackupProfile",
'RECOVERY.CONFIGRESULT': "recovery.ConfigResult",
'RECOVERY.CONFIGRESULTENTRY': "recovery.ConfigResultEntry",
'RECOVERY.ONDEMANDBACKUP': "recovery.OnDemandBackup",
'RECOVERY.RESTORE': "recovery.Restore",
'RECOVERY.SCHEDULECONFIGPOLICY': "recovery.ScheduleConfigPolicy",
'RESOURCE.GROUP': "resource.Group",
'RESOURCE.GROUPMEMBER': "resource.GroupMember",
'RESOURCE.LICENSERESOURCECOUNT': "resource.LicenseResourceCount",
'RESOURCE.MEMBERSHIP': "resource.Membership",
'RESOURCE.MEMBERSHIPHOLDER': "resource.MembershipHolder",
'RESOURCE.RESERVATION': "resource.Reservation",
'RESOURCEPOOL.LEASE': "resourcepool.Lease",
'RESOURCEPOOL.LEASERESOURCE': "resourcepool.LeaseResource",
'RESOURCEPOOL.POOL': "resourcepool.Pool",
'RESOURCEPOOL.POOLMEMBER': "resourcepool.PoolMember",
'RESOURCEPOOL.UNIVERSE': "resourcepool.Universe",
'RPROXY.REVERSEPROXY': "rproxy.ReverseProxy",
'SDCARD.POLICY': "sdcard.Policy",
'SDWAN.PROFILE': "sdwan.Profile",
'SDWAN.ROUTERNODE': "sdwan.RouterNode",
'SDWAN.ROUTERPOLICY': "sdwan.RouterPolicy",
'SDWAN.VMANAGEACCOUNTPOLICY': "sdwan.VmanageAccountPolicy",
'SEARCH.SEARCHITEM': "search.SearchItem",
'SEARCH.TAGITEM': "search.TagItem",
'SECURITY.UNIT': "security.Unit",
'SERVER.CONFIGCHANGEDETAIL': "server.ConfigChangeDetail",
'SERVER.CONFIGIMPORT': "server.ConfigImport",
'SERVER.CONFIGRESULT': "server.ConfigResult",
'SERVER.CONFIGRESULTENTRY': "server.ConfigResultEntry",
'SERVER.PROFILE': "server.Profile",
'SERVER.PROFILETEMPLATE': "server.ProfileTemplate",
'SMTP.POLICY': "smtp.Policy",
'SNMP.POLICY': "snmp.Policy",
'SOFTWARE.APPLIANCEDISTRIBUTABLE': "software.ApplianceDistributable",
'SOFTWARE.DOWNLOADHISTORY': "software.DownloadHistory",
'SOFTWARE.HCLMETA': "software.HclMeta",
'SOFTWARE.HYPERFLEXBUNDLEDISTRIBUTABLE': "software.HyperflexBundleDistributable",
'SOFTWARE.HYPERFLEXDISTRIBUTABLE': "software.HyperflexDistributable",
'SOFTWARE.RELEASEMETA': "software.ReleaseMeta",
'SOFTWARE.SOLUTIONDISTRIBUTABLE': "software.SolutionDistributable",
'SOFTWARE.UCSDBUNDLEDISTRIBUTABLE': "software.UcsdBundleDistributable",
'SOFTWARE.UCSDDISTRIBUTABLE': "software.UcsdDistributable",
'SOFTWAREREPOSITORY.AUTHORIZATION': "softwarerepository.Authorization",
'SOFTWAREREPOSITORY.CACHEDIMAGE': "softwarerepository.CachedImage",
'SOFTWAREREPOSITORY.CATALOG': "softwarerepository.Catalog",
'SOFTWAREREPOSITORY.CATEGORYMAPPER': "softwarerepository.CategoryMapper",
'SOFTWAREREPOSITORY.CATEGORYMAPPERMODEL': "softwarerepository.CategoryMapperModel",
'SOFTWAREREPOSITORY.CATEGORYSUPPORTCONSTRAINT': "softwarerepository.CategorySupportConstraint",
'SOFTWAREREPOSITORY.DOWNLOADSPEC': "softwarerepository.DownloadSpec",
'SOFTWAREREPOSITORY.OPERATINGSYSTEMFILE': "softwarerepository.OperatingSystemFile",
'SOFTWAREREPOSITORY.RELEASE': "softwarerepository.Release",
'SOL.POLICY': "sol.Policy",
'SSH.POLICY': "ssh.Policy",
'STORAGE.CONTROLLER': "storage.Controller",
'STORAGE.DISKGROUP': "storage.DiskGroup",
'STORAGE.DISKSLOT': "storage.DiskSlot",
'STORAGE.DRIVEGROUP': "storage.DriveGroup",
'STORAGE.ENCLOSURE': "storage.Enclosure",
'STORAGE.ENCLOSUREDISK': "storage.EnclosureDisk",
'STORAGE.ENCLOSUREDISKSLOTEP': "storage.EnclosureDiskSlotEp",
'STORAGE.FLEXFLASHCONTROLLER': "storage.FlexFlashController",
'STORAGE.FLEXFLASHCONTROLLERPROPS': "storage.FlexFlashControllerProps",
'STORAGE.FLEXFLASHPHYSICALDRIVE': "storage.FlexFlashPhysicalDrive",
'STORAGE.FLEXFLASHVIRTUALDRIVE': "storage.FlexFlashVirtualDrive",
'STORAGE.FLEXUTILCONTROLLER': "storage.FlexUtilController",
'STORAGE.FLEXUTILPHYSICALDRIVE': "storage.FlexUtilPhysicalDrive",
'STORAGE.FLEXUTILVIRTUALDRIVE': "storage.FlexUtilVirtualDrive",
'STORAGE.HITACHIARRAY': "storage.HitachiArray",
'STORAGE.HITACHICONTROLLER': "storage.HitachiController",
'STORAGE.HITACHIDISK': "storage.HitachiDisk",
'STORAGE.HITACHIHOST': "storage.HitachiHost",
'STORAGE.HITACHIHOSTLUN': "storage.HitachiHostLun",
'STORAGE.HITACHIPARITYGROUP': "storage.HitachiParityGroup",
'STORAGE.HITACHIPOOL': "storage.HitachiPool",
'STORAGE.HITACHIPORT': "storage.HitachiPort",
'STORAGE.HITACHIVOLUME': "storage.HitachiVolume",
'STORAGE.HYPERFLEXSTORAGECONTAINER': "storage.HyperFlexStorageContainer",
'STORAGE.HYPERFLEXVOLUME': "storage.HyperFlexVolume",
'STORAGE.ITEM': "storage.Item",
'STORAGE.NETAPPAGGREGATE': "storage.NetAppAggregate",
'STORAGE.NETAPPBASEDISK': "storage.NetAppBaseDisk",
'STORAGE.NETAPPCLUSTER': "storage.NetAppCluster",
'STORAGE.NETAPPETHERNETPORT': "storage.NetAppEthernetPort",
'STORAGE.NETAPPEXPORTPOLICY': "storage.NetAppExportPolicy",
'STORAGE.NETAPPFCINTERFACE': "storage.NetAppFcInterface",
'STORAGE.NETAPPFCPORT': "storage.NetAppFcPort",
'STORAGE.NETAPPINITIATORGROUP': "storage.NetAppInitiatorGroup",
'STORAGE.NETAPPIPINTERFACE': "storage.NetAppIpInterface",
'STORAGE.NETAPPLICENSE': "storage.NetAppLicense",
'STORAGE.NETAPPLUN': "storage.NetAppLun",
'STORAGE.NETAPPLUNMAP': "storage.NetAppLunMap",
'STORAGE.NETAPPNODE': "storage.NetAppNode",
'STORAGE.NETAPPNTPSERVER': "storage.NetAppNtpServer",
'STORAGE.NETAPPSENSOR': "storage.NetAppSensor",
'STORAGE.NETAPPSTORAGEVM': "storage.NetAppStorageVm",
'STORAGE.NETAPPVOLUME': "storage.NetAppVolume",
'STORAGE.NETAPPVOLUMESNAPSHOT': "storage.NetAppVolumeSnapshot",
'STORAGE.PHYSICALDISK': "storage.PhysicalDisk",
'STORAGE.PHYSICALDISKEXTENSION': "storage.PhysicalDiskExtension",
'STORAGE.PHYSICALDISKUSAGE': "storage.PhysicalDiskUsage",
'STORAGE.PUREARRAY': "storage.PureArray",
'STORAGE.PURECONTROLLER': "storage.PureController",
'STORAGE.PUREDISK': "storage.PureDisk",
'STORAGE.PUREHOST': "storage.PureHost",
'STORAGE.PUREHOSTGROUP': "storage.PureHostGroup",
'STORAGE.PUREHOSTLUN': "storage.PureHostLun",
'STORAGE.PUREPORT': "storage.PurePort",
'STORAGE.PUREPROTECTIONGROUP': "storage.PureProtectionGroup",
'STORAGE.PUREPROTECTIONGROUPSNAPSHOT': "storage.PureProtectionGroupSnapshot",
'STORAGE.PUREREPLICATIONSCHEDULE': "storage.PureReplicationSchedule",
'STORAGE.PURESNAPSHOTSCHEDULE': "storage.PureSnapshotSchedule",
'STORAGE.PUREVOLUME': "storage.PureVolume",
'STORAGE.PUREVOLUMESNAPSHOT': "storage.PureVolumeSnapshot",
'STORAGE.SASEXPANDER': "storage.SasExpander",
'STORAGE.SASPORT': "storage.SasPort",
'STORAGE.SPAN': "storage.Span",
'STORAGE.STORAGEPOLICY': "storage.StoragePolicy",
'STORAGE.VDMEMBEREP': "storage.VdMemberEp",
'STORAGE.VIRTUALDRIVE': "storage.VirtualDrive",
'STORAGE.VIRTUALDRIVECONTAINER': "storage.VirtualDriveContainer",
'STORAGE.VIRTUALDRIVEEXTENSION': "storage.VirtualDriveExtension",
'STORAGE.VIRTUALDRIVEIDENTITY': "storage.VirtualDriveIdentity",
'SYSLOG.POLICY': "syslog.Policy",
'TAM.ADVISORYCOUNT': "tam.AdvisoryCount",
'TAM.ADVISORYDEFINITION': "tam.AdvisoryDefinition",
'TAM.ADVISORYINFO': "tam.AdvisoryInfo",
'TAM.ADVISORYINSTANCE': "tam.AdvisoryInstance",
'TAM.SECURITYADVISORY': "tam.SecurityAdvisory",
'TASK.HITACHISCOPEDINVENTORY': "task.HitachiScopedInventory",
'TASK.HYPERFLEXSCOPEDINVENTORY': "task.HyperflexScopedInventory",
'TASK.IWESCOPEDINVENTORY': "task.IweScopedInventory",
'TASK.NETAPPSCOPEDINVENTORY': "task.NetAppScopedInventory",
'TASK.PUBLICCLOUDSCOPEDINVENTORY': "task.PublicCloudScopedInventory",
'TASK.PURESCOPEDINVENTORY': "task.PureScopedInventory",
'TASK.SERVERSCOPEDINVENTORY': "task.ServerScopedInventory",
'TECHSUPPORTMANAGEMENT.COLLECTIONCONTROLPOLICY': "techsupportmanagement.CollectionControlPolicy",
'TECHSUPPORTMANAGEMENT.DOWNLOAD': "techsupportmanagement.Download",
'TECHSUPPORTMANAGEMENT.TECHSUPPORTBUNDLE': "techsupportmanagement.TechSupportBundle",
'TECHSUPPORTMANAGEMENT.TECHSUPPORTSTATUS': "techsupportmanagement.TechSupportStatus",
'TERMINAL.AUDITLOG': "terminal.AuditLog",
'TERRAFORM.EXECUTOR': "terraform.Executor",
'THERMAL.POLICY': "thermal.Policy",
'TOP.SYSTEM': "top.System",
'UCSD.BACKUPINFO': "ucsd.BackupInfo",
'UUIDPOOL.BLOCK': "uuidpool.Block",
'UUIDPOOL.POOL': "uuidpool.Pool",
'UUIDPOOL.POOLMEMBER': "uuidpool.PoolMember",
'UUIDPOOL.UNIVERSE': "uuidpool.Universe",
'UUIDPOOL.UUIDLEASE': "uuidpool.UuidLease",
'VIRTUALIZATION.CISCOHYPERVISORMANAGER': "virtualization.CiscoHypervisorManager",
'VIRTUALIZATION.ESXICONSOLE': "virtualization.EsxiConsole",
'VIRTUALIZATION.HOST': "virtualization.Host",
'VIRTUALIZATION.IWECLUSTER': "virtualization.IweCluster",
'VIRTUALIZATION.IWEDATACENTER': "virtualization.IweDatacenter",
'VIRTUALIZATION.IWEDVUPLINK': "virtualization.IweDvUplink",
'VIRTUALIZATION.IWEDVSWITCH': "virtualization.IweDvswitch",
'VIRTUALIZATION.IWEHOST': "virtualization.IweHost",
'VIRTUALIZATION.IWEHOSTINTERFACE': "virtualization.IweHostInterface",
'VIRTUALIZATION.IWEHOSTVSWITCH': "virtualization.IweHostVswitch",
'VIRTUALIZATION.IWENETWORK': "virtualization.IweNetwork",
'VIRTUALIZATION.IWEVIRTUALDISK': "virtualization.IweVirtualDisk",
'VIRTUALIZATION.IWEVIRTUALMACHINE': "virtualization.IweVirtualMachine",
'VIRTUALIZATION.IWEVIRTUALMACHINENETWORKINTERFACE': "virtualization.IweVirtualMachineNetworkInterface",
'VIRTUALIZATION.VIRTUALDISK': "virtualization.VirtualDisk",
'VIRTUALIZATION.VIRTUALMACHINE': "virtualization.VirtualMachine",
'VIRTUALIZATION.VIRTUALNETWORK': "virtualization.VirtualNetwork",
'VIRTUALIZATION.VMWARECLUSTER': "virtualization.VmwareCluster",
'VIRTUALIZATION.VMWAREDATACENTER': "virtualization.VmwareDatacenter",
'VIRTUALIZATION.VMWAREDATASTORE': "virtualization.VmwareDatastore",
'VIRTUALIZATION.VMWAREDATASTORECLUSTER': "virtualization.VmwareDatastoreCluster",
'VIRTUALIZATION.VMWAREDISTRIBUTEDNETWORK': "virtualization.VmwareDistributedNetwork",
'VIRTUALIZATION.VMWAREDISTRIBUTEDSWITCH': "virtualization.VmwareDistributedSwitch",
'VIRTUALIZATION.VMWAREFOLDER': "virtualization.VmwareFolder",
'VIRTUALIZATION.VMWAREHOST': "virtualization.VmwareHost",
'VIRTUALIZATION.VMWAREKERNELNETWORK': "virtualization.VmwareKernelNetwork",
'VIRTUALIZATION.VMWARENETWORK': "virtualization.VmwareNetwork",
'VIRTUALIZATION.VMWAREPHYSICALNETWORKINTERFACE': "virtualization.VmwarePhysicalNetworkInterface",
'VIRTUALIZATION.VMWAREUPLINKPORT': "virtualization.VmwareUplinkPort",
'VIRTUALIZATION.VMWAREVCENTER': "virtualization.VmwareVcenter",
'VIRTUALIZATION.VMWAREVIRTUALDISK': "virtualization.VmwareVirtualDisk",
'VIRTUALIZATION.VMWAREVIRTUALMACHINE': "virtualization.VmwareVirtualMachine",
'VIRTUALIZATION.VMWAREVIRTUALMACHINESNAPSHOT': "virtualization.VmwareVirtualMachineSnapshot",
'VIRTUALIZATION.VMWAREVIRTUALNETWORKINTERFACE': "virtualization.VmwareVirtualNetworkInterface",
'VIRTUALIZATION.VMWAREVIRTUALSWITCH': "virtualization.VmwareVirtualSwitch",
'VMEDIA.POLICY': "vmedia.Policy",
'VMRC.CONSOLE': "vmrc.Console",
'VNC.CONSOLE': "vnc.Console",
'VNIC.ETHADAPTERPOLICY': "vnic.EthAdapterPolicy",
'VNIC.ETHIF': "vnic.EthIf",
'VNIC.ETHNETWORKPOLICY': "vnic.EthNetworkPolicy",
'VNIC.ETHQOSPOLICY': "vnic.EthQosPolicy",
'VNIC.FCADAPTERPOLICY': "vnic.FcAdapterPolicy",
'VNIC.FCIF': "vnic.FcIf",
'VNIC.FCNETWORKPOLICY': "vnic.FcNetworkPolicy",
'VNIC.FCQOSPOLICY': "vnic.FcQosPolicy",
'VNIC.ISCSIADAPTERPOLICY': "vnic.IscsiAdapterPolicy",
'VNIC.ISCSIBOOTPOLICY': "vnic.IscsiBootPolicy",
'VNIC.ISCSISTATICTARGETPOLICY': "vnic.IscsiStaticTargetPolicy",
'VNIC.LANCONNECTIVITYPOLICY': "vnic.LanConnectivityPolicy",
'VNIC.LCPSTATUS': "vnic.LcpStatus",
'VNIC.SANCONNECTIVITYPOLICY': "vnic.SanConnectivityPolicy",
'VNIC.SCPSTATUS': "vnic.ScpStatus",
'VRF.VRF': "vrf.Vrf",
'WORKFLOW.ANSIBLEBATCHEXECUTOR': "workflow.AnsibleBatchExecutor",
'WORKFLOW.BATCHAPIEXECUTOR': "workflow.BatchApiExecutor",
'WORKFLOW.BUILDTASKMETA': "workflow.BuildTaskMeta",
'WORKFLOW.BUILDTASKMETAOWNER': "workflow.BuildTaskMetaOwner",
'WORKFLOW.CATALOG': "workflow.Catalog",
'WORKFLOW.CUSTOMDATATYPEDEFINITION': "workflow.CustomDataTypeDefinition",
'WORKFLOW.ERRORRESPONSEHANDLER': "workflow.ErrorResponseHandler",
'WORKFLOW.PENDINGDYNAMICWORKFLOWINFO': "workflow.PendingDynamicWorkflowInfo",
'WORKFLOW.ROLLBACKWORKFLOW': "workflow.RollbackWorkflow",
'WORKFLOW.SOLUTIONACTIONDEFINITION': "workflow.SolutionActionDefinition",
'WORKFLOW.SOLUTIONACTIONINSTANCE': "workflow.SolutionActionInstance",
'WORKFLOW.SOLUTIONDEFINITION': "workflow.SolutionDefinition",
'WORKFLOW.SOLUTIONINSTANCE': "workflow.SolutionInstance",
'WORKFLOW.SOLUTIONOUTPUT': "workflow.SolutionOutput",
'WORKFLOW.SSHBATCHEXECUTOR': "workflow.SshBatchExecutor",
'WORKFLOW.TASKDEBUGLOG': "workflow.TaskDebugLog",
'WORKFLOW.TASKDEFINITION': "workflow.TaskDefinition",
'WORKFLOW.TASKINFO': "workflow.TaskInfo",
'WORKFLOW.TASKMETADATA': "workflow.TaskMetadata",
'WORKFLOW.TASKNOTIFICATION': "workflow.TaskNotification",
'WORKFLOW.TEMPLATEEVALUATION': "workflow.TemplateEvaluation",
'WORKFLOW.TEMPLATEFUNCTIONMETA': "workflow.TemplateFunctionMeta",
'WORKFLOW.WORKFLOWDEFINITION': "workflow.WorkflowDefinition",
'WORKFLOW.WORKFLOWINFO': "workflow.WorkflowInfo",
'WORKFLOW.WORKFLOWMETA': "workflow.WorkflowMeta",
'WORKFLOW.WORKFLOWMETADATA': "workflow.WorkflowMetadata",
'WORKFLOW.WORKFLOWNOTIFICATION': "workflow.WorkflowNotification",
},
}
validations = {
('per_account_user_limit',): {
'inclusive_maximum': 200,
'inclusive_minimum': 1,
},
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'class_id': (str,), # noqa: E501
'moid': (str,), # noqa: E501
'selector': (str,), # noqa: E501
'link': (str,), # noqa: E501
'account_moid': (str,), # noqa: E501
'create_time': (datetime,), # noqa: E501
'domain_group_moid': (str,), # noqa: E501
'mod_time': (datetime,), # noqa: E501
'owners': ([str], none_type,), # noqa: E501
'shared_scope': (str,), # noqa: E501
'tags': ([MoTag], none_type,), # noqa: E501
'version_context': (MoVersionContext,), # noqa: E501
'ancestors': ([MoBaseMoRelationship], none_type,), # noqa: E501
'parent': (MoBaseMoRelationship,), # noqa: E501
'permission_resources': ([MoBaseMoRelationship], none_type,), # noqa: E501
'display_names': (DisplayNames,), # noqa: E501
'per_account_user_limit': (int,), # noqa: E501
'account': (IamAccountRelationship,), # noqa: E501
'object_type': (str,), # noqa: E501
}
@cached_property
def discriminator():
lazy_import()
val = {
'iam.ResourceLimits': IamResourceLimits,
'mo.MoRef': MoMoRef,
}
if not val:
return None
return {'class_id': val}
attribute_map = {
'class_id': 'ClassId', # noqa: E501
'moid': 'Moid', # noqa: E501
'selector': 'Selector', # noqa: E501
'link': 'link', # noqa: E501
'account_moid': 'AccountMoid', # noqa: E501
'create_time': 'CreateTime', # noqa: E501
'domain_group_moid': 'DomainGroupMoid', # noqa: E501
'mod_time': 'ModTime', # noqa: E501
'owners': 'Owners', # noqa: E501
'shared_scope': 'SharedScope', # noqa: E501
'tags': 'Tags', # noqa: E501
'version_context': 'VersionContext', # noqa: E501
'ancestors': 'Ancestors', # noqa: E501
'parent': 'Parent', # noqa: E501
'permission_resources': 'PermissionResources', # noqa: E501
'display_names': 'DisplayNames', # noqa: E501
'per_account_user_limit': 'PerAccountUserLimit', # noqa: E501
'account': 'Account', # noqa: E501
'object_type': 'ObjectType', # noqa: E501
}
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
'_composed_instances',
'_var_name_to_model_instances',
'_additional_properties_model_instances',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""IamResourceLimitsRelationship - a model defined in OpenAPI
Args:
Keyword Args:
class_id (str): The fully-qualified name of the instantiated, concrete type. This property is used as a discriminator to identify the type of the payload when marshaling and unmarshaling data.. defaults to "mo.MoRef", must be one of ["mo.MoRef", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
moid (str): The Moid of the referenced REST resource.. [optional] # noqa: E501
selector (str): An OData $filter expression which describes the REST resource to be referenced. This field may be set instead of 'moid' by clients. 1. If 'moid' is set this field is ignored. 1. If 'selector' is set and 'moid' is empty/absent from the request, Intersight determines the Moid of the resource matching the filter expression and populates it in the MoRef that is part of the object instance being inserted/updated to fulfill the REST request. An error is returned if the filter matches zero or more than one REST resource. An example filter string is: Serial eq '3AA8B7T11'.. [optional] # noqa: E501
link (str): A URL to an instance of the 'mo.MoRef' class.. [optional] # noqa: E501
account_moid (str): The Account ID for this managed object.. [optional] # noqa: E501
create_time (datetime): The time when this managed object was created.. [optional] # noqa: E501
domain_group_moid (str): The DomainGroup ID for this managed object.. [optional] # noqa: E501
mod_time (datetime): The time when this managed object was last modified.. [optional] # noqa: E501
owners ([str], none_type): [optional] # noqa: E501
shared_scope (str): Intersight provides pre-built workflows, tasks and policies to end users through global catalogs. Objects that are made available through global catalogs are said to have a 'shared' ownership. Shared objects are either made globally available to all end users or restricted to end users based on their license entitlement. Users can use this property to differentiate the scope (global or a specific license tier) to which a shared MO belongs.. [optional] # noqa: E501
tags ([MoTag], none_type): [optional] # noqa: E501
version_context (MoVersionContext): [optional] # noqa: E501
ancestors ([MoBaseMoRelationship], none_type): An array of relationships to moBaseMo resources.. [optional] # noqa: E501
parent (MoBaseMoRelationship): [optional] # noqa: E501
permission_resources ([MoBaseMoRelationship], none_type): An array of relationships to moBaseMo resources.. [optional] # noqa: E501
display_names (DisplayNames): [optional] # noqa: E501
per_account_user_limit (int): The maximum number of users allowed in an account. The default value is 200.. [optional] # noqa: E501
account (IamAccountRelationship): [optional] # noqa: E501
object_type (str): The fully-qualified name of the remote type referred by this relationship.. [optional] # noqa: E501
"""
class_id = kwargs.get('class_id', "mo.MoRef")
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
constant_args = {
'_check_type': _check_type,
'_path_to_item': _path_to_item,
'_spec_property_naming': _spec_property_naming,
'_configuration': _configuration,
'_visited_composed_classes': self._visited_composed_classes,
}
required_args = {
'class_id': class_id,
}
model_args = {}
model_args.update(required_args)
model_args.update(kwargs)
composed_info = validate_get_composed_info(
constant_args, model_args, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
unused_args = composed_info[3]
for var_name, var_value in required_args.items():
setattr(self, var_name, var_value)
for var_name, var_value in kwargs.items():
if var_name in unused_args and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
not self._additional_properties_model_instances:
# discard variable.
continue
setattr(self, var_name, var_value)
@cached_property
def _composed_schemas():
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error beause the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
lazy_import()
return {
'anyOf': [
],
'allOf': [
],
'oneOf': [
IamResourceLimits,
MoMoRef,
none_type,
],
}
| 62.289829 | 1,678 | 0.658907 |
import re
import sys
from intersight.model_utils import (
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
def lazy_import():
from intersight.model.display_names import DisplayNames
from intersight.model.iam_account_relationship import IamAccountRelationship
from intersight.model.iam_resource_limits import IamResourceLimits
from intersight.model.mo_base_mo_relationship import MoBaseMoRelationship
from intersight.model.mo_mo_ref import MoMoRef
from intersight.model.mo_tag import MoTag
from intersight.model.mo_version_context import MoVersionContext
globals()['DisplayNames'] = DisplayNames
globals()['IamAccountRelationship'] = IamAccountRelationship
globals()['IamResourceLimits'] = IamResourceLimits
globals()['MoBaseMoRelationship'] = MoBaseMoRelationship
globals()['MoMoRef'] = MoMoRef
globals()['MoTag'] = MoTag
globals()['MoVersionContext'] = MoVersionContext
class IamResourceLimitsRelationship(ModelComposed):
allowed_values = {
('class_id',): {
'MO.MOREF': "mo.MoRef",
},
('object_type',): {
'AAA.AUDITRECORD': "aaa.AuditRecord",
'AAA.RETENTIONCONFIG': "aaa.RetentionConfig",
'AAA.RETENTIONPOLICY': "aaa.RetentionPolicy",
'ACCESS.POLICY': "access.Policy",
'ADAPTER.CONFIGPOLICY': "adapter.ConfigPolicy",
'ADAPTER.EXTETHINTERFACE': "adapter.ExtEthInterface",
'ADAPTER.HOSTETHINTERFACE': "adapter.HostEthInterface",
'ADAPTER.HOSTFCINTERFACE': "adapter.HostFcInterface",
'ADAPTER.HOSTISCSIINTERFACE': "adapter.HostIscsiInterface",
'ADAPTER.UNIT': "adapter.Unit",
'ADAPTER.UNITEXPANDER': "adapter.UnitExpander",
'APPLIANCE.APPSTATUS': "appliance.AppStatus",
'APPLIANCE.AUTORMAPOLICY': "appliance.AutoRmaPolicy",
'APPLIANCE.BACKUP': "appliance.Backup",
'APPLIANCE.BACKUPPOLICY': "appliance.BackupPolicy",
'APPLIANCE.CERTIFICATESETTING': "appliance.CertificateSetting",
'APPLIANCE.DATAEXPORTPOLICY': "appliance.DataExportPolicy",
'APPLIANCE.DEVICECERTIFICATE': "appliance.DeviceCertificate",
'APPLIANCE.DEVICECLAIM': "appliance.DeviceClaim",
'APPLIANCE.DEVICEUPGRADEPOLICY': "appliance.DeviceUpgradePolicy",
'APPLIANCE.DIAGSETTING': "appliance.DiagSetting",
'APPLIANCE.EXTERNALSYSLOGSETTING': "appliance.ExternalSyslogSetting",
'APPLIANCE.FILEGATEWAY': "appliance.FileGateway",
'APPLIANCE.FILESYSTEMSTATUS': "appliance.FileSystemStatus",
'APPLIANCE.GROUPSTATUS': "appliance.GroupStatus",
'APPLIANCE.IMAGEBUNDLE': "appliance.ImageBundle",
'APPLIANCE.NODEINFO': "appliance.NodeInfo",
'APPLIANCE.NODESTATUS': "appliance.NodeStatus",
'APPLIANCE.RELEASENOTE': "appliance.ReleaseNote",
'APPLIANCE.REMOTEFILEIMPORT': "appliance.RemoteFileImport",
'APPLIANCE.RESTORE': "appliance.Restore",
'APPLIANCE.SETUPINFO': "appliance.SetupInfo",
'APPLIANCE.SYSTEMINFO': "appliance.SystemInfo",
'APPLIANCE.SYSTEMSTATUS': "appliance.SystemStatus",
'APPLIANCE.UPGRADE': "appliance.Upgrade",
'APPLIANCE.UPGRADEPOLICY': "appliance.UpgradePolicy",
'ASSET.CLUSTERMEMBER': "asset.ClusterMember",
'ASSET.DEPLOYMENT': "asset.Deployment",
'ASSET.DEPLOYMENTDEVICE': "asset.DeploymentDevice",
'ASSET.DEVICECLAIM': "asset.DeviceClaim",
'ASSET.DEVICECONFIGURATION': "asset.DeviceConfiguration",
'ASSET.DEVICECONNECTORMANAGER': "asset.DeviceConnectorManager",
'ASSET.DEVICECONTRACTINFORMATION': "asset.DeviceContractInformation",
'ASSET.DEVICECONTRACTNOTIFICATION': "asset.DeviceContractNotification",
'ASSET.DEVICEREGISTRATION': "asset.DeviceRegistration",
'ASSET.SUBSCRIPTION': "asset.Subscription",
'ASSET.SUBSCRIPTIONACCOUNT': "asset.SubscriptionAccount",
'ASSET.SUBSCRIPTIONDEVICECONTRACTINFORMATION': "asset.SubscriptionDeviceContractInformation",
'ASSET.TARGET': "asset.Target",
'BIOS.BOOTDEVICE': "bios.BootDevice",
'BIOS.BOOTMODE': "bios.BootMode",
'BIOS.POLICY': "bios.Policy",
'BIOS.SYSTEMBOOTORDER': "bios.SystemBootOrder",
'BIOS.TOKENSETTINGS': "bios.TokenSettings",
'BIOS.UNIT': "bios.Unit",
'BIOS.VFSELECTMEMORYRASCONFIGURATION': "bios.VfSelectMemoryRasConfiguration",
'BOOT.CDDDEVICE': "boot.CddDevice",
'BOOT.DEVICEBOOTMODE': "boot.DeviceBootMode",
'BOOT.DEVICEBOOTSECURITY': "boot.DeviceBootSecurity",
'BOOT.HDDDEVICE': "boot.HddDevice",
'BOOT.ISCSIDEVICE': "boot.IscsiDevice",
'BOOT.NVMEDEVICE': "boot.NvmeDevice",
'BOOT.PCHSTORAGEDEVICE': "boot.PchStorageDevice",
'BOOT.PRECISIONPOLICY': "boot.PrecisionPolicy",
'BOOT.PXEDEVICE': "boot.PxeDevice",
'BOOT.SANDEVICE': "boot.SanDevice",
'BOOT.SDDEVICE': "boot.SdDevice",
'BOOT.UEFISHELLDEVICE': "boot.UefiShellDevice",
'BOOT.USBDEVICE': "boot.UsbDevice",
'BOOT.VMEDIADEVICE': "boot.VmediaDevice",
'BULK.EXPORT': "bulk.Export",
'BULK.EXPORTEDITEM': "bulk.ExportedItem",
'BULK.MOCLONER': "bulk.MoCloner",
'BULK.MOMERGER': "bulk.MoMerger",
'BULK.REQUEST': "bulk.Request",
'BULK.SUBREQUESTOBJ': "bulk.SubRequestObj",
'CAPABILITY.ADAPTERUNITDESCRIPTOR': "capability.AdapterUnitDescriptor",
'CAPABILITY.CATALOG': "capability.Catalog",
'CAPABILITY.CHASSISDESCRIPTOR': "capability.ChassisDescriptor",
'CAPABILITY.CHASSISMANUFACTURINGDEF': "capability.ChassisManufacturingDef",
'CAPABILITY.CIMCFIRMWAREDESCRIPTOR': "capability.CimcFirmwareDescriptor",
'CAPABILITY.EQUIPMENTPHYSICALDEF': "capability.EquipmentPhysicalDef",
'CAPABILITY.EQUIPMENTSLOTARRAY': "capability.EquipmentSlotArray",
'CAPABILITY.FANMODULEDESCRIPTOR': "capability.FanModuleDescriptor",
'CAPABILITY.FANMODULEMANUFACTURINGDEF': "capability.FanModuleManufacturingDef",
'CAPABILITY.IOCARDCAPABILITYDEF': "capability.IoCardCapabilityDef",
'CAPABILITY.IOCARDDESCRIPTOR': "capability.IoCardDescriptor",
'CAPABILITY.IOCARDMANUFACTURINGDEF': "capability.IoCardManufacturingDef",
'CAPABILITY.PORTGROUPAGGREGATIONDEF': "capability.PortGroupAggregationDef",
'CAPABILITY.PSUDESCRIPTOR': "capability.PsuDescriptor",
'CAPABILITY.PSUMANUFACTURINGDEF': "capability.PsuManufacturingDef",
'CAPABILITY.SERVERMODELSCAPABILITYDEF': "capability.ServerModelsCapabilityDef",
'CAPABILITY.SERVERSCHEMADESCRIPTOR': "capability.ServerSchemaDescriptor",
'CAPABILITY.SIOCMODULECAPABILITYDEF': "capability.SiocModuleCapabilityDef",
'CAPABILITY.SIOCMODULEDESCRIPTOR': "capability.SiocModuleDescriptor",
'CAPABILITY.SIOCMODULEMANUFACTURINGDEF': "capability.SiocModuleManufacturingDef",
'CAPABILITY.SWITCHCAPABILITY': "capability.SwitchCapability",
'CAPABILITY.SWITCHDESCRIPTOR': "capability.SwitchDescriptor",
'CAPABILITY.SWITCHMANUFACTURINGDEF': "capability.SwitchManufacturingDef",
'CERTIFICATEMANAGEMENT.POLICY': "certificatemanagement.Policy",
'CHASSIS.CONFIGCHANGEDETAIL': "chassis.ConfigChangeDetail",
'CHASSIS.CONFIGIMPORT': "chassis.ConfigImport",
'CHASSIS.CONFIGRESULT': "chassis.ConfigResult",
'CHASSIS.CONFIGRESULTENTRY': "chassis.ConfigResultEntry",
'CHASSIS.IOMPROFILE': "chassis.IomProfile",
'CHASSIS.PROFILE': "chassis.Profile",
'CLOUD.AWSBILLINGUNIT': "cloud.AwsBillingUnit",
'CLOUD.AWSKEYPAIR': "cloud.AwsKeyPair",
'CLOUD.AWSNETWORKINTERFACE': "cloud.AwsNetworkInterface",
'CLOUD.AWSORGANIZATIONALUNIT': "cloud.AwsOrganizationalUnit",
'CLOUD.AWSSECURITYGROUP': "cloud.AwsSecurityGroup",
'CLOUD.AWSSUBNET': "cloud.AwsSubnet",
'CLOUD.AWSVIRTUALMACHINE': "cloud.AwsVirtualMachine",
'CLOUD.AWSVOLUME': "cloud.AwsVolume",
'CLOUD.AWSVPC': "cloud.AwsVpc",
'CLOUD.COLLECTINVENTORY': "cloud.CollectInventory",
'CLOUD.REGIONS': "cloud.Regions",
'CLOUD.SKUCONTAINERTYPE': "cloud.SkuContainerType",
'CLOUD.SKUDATABASETYPE': "cloud.SkuDatabaseType",
'CLOUD.SKUINSTANCETYPE': "cloud.SkuInstanceType",
'CLOUD.SKUNETWORKTYPE': "cloud.SkuNetworkType",
'CLOUD.SKUREGIONRATECARDS': "cloud.SkuRegionRateCards",
'CLOUD.SKUVOLUMETYPE': "cloud.SkuVolumeType",
'CLOUD.TFCAGENTPOOL': "cloud.TfcAgentpool",
'CLOUD.TFCORGANIZATION': "cloud.TfcOrganization",
'CLOUD.TFCWORKSPACE': "cloud.TfcWorkspace",
'COMM.HTTPPROXYPOLICY': "comm.HttpProxyPolicy",
'COMPUTE.BIOSPOSTPOLICY': "compute.BiosPostPolicy",
'COMPUTE.BLADE': "compute.Blade",
'COMPUTE.BLADEIDENTITY': "compute.BladeIdentity",
'COMPUTE.BOARD': "compute.Board",
'COMPUTE.MAPPING': "compute.Mapping",
'COMPUTE.PHYSICALSUMMARY': "compute.PhysicalSummary",
'COMPUTE.RACKUNIT': "compute.RackUnit",
'COMPUTE.RACKUNITIDENTITY': "compute.RackUnitIdentity",
'COMPUTE.SERVERPOWERPOLICY': "compute.ServerPowerPolicy",
'COMPUTE.SERVERSETTING': "compute.ServerSetting",
'COMPUTE.VMEDIA': "compute.Vmedia",
'COND.ALARM': "cond.Alarm",
'COND.ALARMAGGREGATION': "cond.AlarmAggregation",
'COND.HCLSTATUS': "cond.HclStatus",
'COND.HCLSTATUSDETAIL': "cond.HclStatusDetail",
'COND.HCLSTATUSJOB': "cond.HclStatusJob",
'CONNECTORPACK.CONNECTORPACKUPGRADE': "connectorpack.ConnectorPackUpgrade",
'CONNECTORPACK.UPGRADEIMPACT': "connectorpack.UpgradeImpact",
'CONVERGEDINFRA.HEALTHCHECKDEFINITION': "convergedinfra.HealthCheckDefinition",
'CONVERGEDINFRA.HEALTHCHECKEXECUTION': "convergedinfra.HealthCheckExecution",
'CONVERGEDINFRA.POD': "convergedinfra.Pod",
'CRD.CUSTOMRESOURCE': "crd.CustomResource",
'DEVICECONNECTOR.POLICY': "deviceconnector.Policy",
'EQUIPMENT.CHASSIS': "equipment.Chassis",
'EQUIPMENT.CHASSISIDENTITY': "equipment.ChassisIdentity",
'EQUIPMENT.CHASSISOPERATION': "equipment.ChassisOperation",
'EQUIPMENT.DEVICESUMMARY': "equipment.DeviceSummary",
'EQUIPMENT.EXPANDERMODULE': "equipment.ExpanderModule",
'EQUIPMENT.FAN': "equipment.Fan",
'EQUIPMENT.FANCONTROL': "equipment.FanControl",
'EQUIPMENT.FANMODULE': "equipment.FanModule",
'EQUIPMENT.FEX': "equipment.Fex",
'EQUIPMENT.FEXIDENTITY': "equipment.FexIdentity",
'EQUIPMENT.FEXOPERATION': "equipment.FexOperation",
'EQUIPMENT.FRU': "equipment.Fru",
'EQUIPMENT.IDENTITYSUMMARY': "equipment.IdentitySummary",
'EQUIPMENT.IOCARD': "equipment.IoCard",
'EQUIPMENT.IOCARDOPERATION': "equipment.IoCardOperation",
'EQUIPMENT.IOEXPANDER': "equipment.IoExpander",
'EQUIPMENT.LOCATORLED': "equipment.LocatorLed",
'EQUIPMENT.PSU': "equipment.Psu",
'EQUIPMENT.PSUCONTROL': "equipment.PsuControl",
'EQUIPMENT.RACKENCLOSURE': "equipment.RackEnclosure",
'EQUIPMENT.RACKENCLOSURESLOT': "equipment.RackEnclosureSlot",
'EQUIPMENT.SHAREDIOMODULE': "equipment.SharedIoModule",
'EQUIPMENT.SWITCHCARD': "equipment.SwitchCard",
'EQUIPMENT.SYSTEMIOCONTROLLER': "equipment.SystemIoController",
'EQUIPMENT.TPM': "equipment.Tpm",
'EQUIPMENT.TRANSCEIVER': "equipment.Transceiver",
'ETHER.HOSTPORT': "ether.HostPort",
'ETHER.NETWORKPORT': "ether.NetworkPort",
'ETHER.PHYSICALPORT': "ether.PhysicalPort",
'ETHER.PORTCHANNEL': "ether.PortChannel",
'EXTERNALSITE.AUTHORIZATION': "externalsite.Authorization",
'FABRIC.APPLIANCEPCROLE': "fabric.AppliancePcRole",
'FABRIC.APPLIANCEROLE': "fabric.ApplianceRole",
'FABRIC.CONFIGCHANGEDETAIL': "fabric.ConfigChangeDetail",
'FABRIC.CONFIGRESULT': "fabric.ConfigResult",
'FABRIC.CONFIGRESULTENTRY': "fabric.ConfigResultEntry",
'FABRIC.ELEMENTIDENTITY': "fabric.ElementIdentity",
'FABRIC.ESTIMATEIMPACT': "fabric.EstimateImpact",
'FABRIC.ETHNETWORKCONTROLPOLICY': "fabric.EthNetworkControlPolicy",
'FABRIC.ETHNETWORKGROUPPOLICY': "fabric.EthNetworkGroupPolicy",
'FABRIC.ETHNETWORKPOLICY': "fabric.EthNetworkPolicy",
'FABRIC.FCNETWORKPOLICY': "fabric.FcNetworkPolicy",
'FABRIC.FCSTORAGEROLE': "fabric.FcStorageRole",
'FABRIC.FCUPLINKPCROLE': "fabric.FcUplinkPcRole",
'FABRIC.FCUPLINKROLE': "fabric.FcUplinkRole",
'FABRIC.FCOEUPLINKPCROLE': "fabric.FcoeUplinkPcRole",
'FABRIC.FCOEUPLINKROLE': "fabric.FcoeUplinkRole",
'FABRIC.FLOWCONTROLPOLICY': "fabric.FlowControlPolicy",
'FABRIC.LINKAGGREGATIONPOLICY': "fabric.LinkAggregationPolicy",
'FABRIC.LINKCONTROLPOLICY': "fabric.LinkControlPolicy",
'FABRIC.MULTICASTPOLICY': "fabric.MulticastPolicy",
'FABRIC.PCMEMBER': "fabric.PcMember",
'FABRIC.PCOPERATION': "fabric.PcOperation",
'FABRIC.PORTMODE': "fabric.PortMode",
'FABRIC.PORTOPERATION': "fabric.PortOperation",
'FABRIC.PORTPOLICY': "fabric.PortPolicy",
'FABRIC.SERVERROLE': "fabric.ServerRole",
'FABRIC.SWITCHCLUSTERPROFILE': "fabric.SwitchClusterProfile",
'FABRIC.SWITCHCONTROLPOLICY': "fabric.SwitchControlPolicy",
'FABRIC.SWITCHPROFILE': "fabric.SwitchProfile",
'FABRIC.SYSTEMQOSPOLICY': "fabric.SystemQosPolicy",
'FABRIC.UPLINKPCROLE': "fabric.UplinkPcRole",
'FABRIC.UPLINKROLE': "fabric.UplinkRole",
'FABRIC.VLAN': "fabric.Vlan",
'FABRIC.VSAN': "fabric.Vsan",
'FAULT.INSTANCE': "fault.Instance",
'FC.PHYSICALPORT': "fc.PhysicalPort",
'FC.PORTCHANNEL': "fc.PortChannel",
'FCPOOL.FCBLOCK': "fcpool.FcBlock",
'FCPOOL.LEASE': "fcpool.Lease",
'FCPOOL.POOL': "fcpool.Pool",
'FCPOOL.POOLMEMBER': "fcpool.PoolMember",
'FCPOOL.UNIVERSE': "fcpool.Universe",
'FEEDBACK.FEEDBACKPOST': "feedback.FeedbackPost",
'FIRMWARE.BIOSDESCRIPTOR': "firmware.BiosDescriptor",
'FIRMWARE.BOARDCONTROLLERDESCRIPTOR': "firmware.BoardControllerDescriptor",
'FIRMWARE.CHASSISUPGRADE': "firmware.ChassisUpgrade",
'FIRMWARE.CIMCDESCRIPTOR': "firmware.CimcDescriptor",
'FIRMWARE.DIMMDESCRIPTOR': "firmware.DimmDescriptor",
'FIRMWARE.DISTRIBUTABLE': "firmware.Distributable",
'FIRMWARE.DISTRIBUTABLEMETA': "firmware.DistributableMeta",
'FIRMWARE.DRIVEDESCRIPTOR': "firmware.DriveDescriptor",
'FIRMWARE.DRIVERDISTRIBUTABLE': "firmware.DriverDistributable",
'FIRMWARE.EULA': "firmware.Eula",
'FIRMWARE.FIRMWARESUMMARY': "firmware.FirmwareSummary",
'FIRMWARE.GPUDESCRIPTOR': "firmware.GpuDescriptor",
'FIRMWARE.HBADESCRIPTOR': "firmware.HbaDescriptor",
'FIRMWARE.IOMDESCRIPTOR': "firmware.IomDescriptor",
'FIRMWARE.MSWITCHDESCRIPTOR': "firmware.MswitchDescriptor",
'FIRMWARE.NXOSDESCRIPTOR': "firmware.NxosDescriptor",
'FIRMWARE.PCIEDESCRIPTOR': "firmware.PcieDescriptor",
'FIRMWARE.PSUDESCRIPTOR': "firmware.PsuDescriptor",
'FIRMWARE.RUNNINGFIRMWARE': "firmware.RunningFirmware",
'FIRMWARE.SASEXPANDERDESCRIPTOR': "firmware.SasExpanderDescriptor",
'FIRMWARE.SERVERCONFIGURATIONUTILITYDISTRIBUTABLE': "firmware.ServerConfigurationUtilityDistributable",
'FIRMWARE.STORAGECONTROLLERDESCRIPTOR': "firmware.StorageControllerDescriptor",
'FIRMWARE.SWITCHUPGRADE': "firmware.SwitchUpgrade",
'FIRMWARE.UNSUPPORTEDVERSIONUPGRADE': "firmware.UnsupportedVersionUpgrade",
'FIRMWARE.UPGRADE': "firmware.Upgrade",
'FIRMWARE.UPGRADEIMPACT': "firmware.UpgradeImpact",
'FIRMWARE.UPGRADEIMPACTSTATUS': "firmware.UpgradeImpactStatus",
'FIRMWARE.UPGRADESTATUS': "firmware.UpgradeStatus",
'FORECAST.CATALOG': "forecast.Catalog",
'FORECAST.DEFINITION': "forecast.Definition",
'FORECAST.INSTANCE': "forecast.Instance",
'GRAPHICS.CARD': "graphics.Card",
'GRAPHICS.CONTROLLER': "graphics.Controller",
'HCL.COMPATIBILITYSTATUS': "hcl.CompatibilityStatus",
'HCL.DRIVERIMAGE': "hcl.DriverImage",
'HCL.EXEMPTEDCATALOG': "hcl.ExemptedCatalog",
'HCL.HYPERFLEXSOFTWARECOMPATIBILITYINFO': "hcl.HyperflexSoftwareCompatibilityInfo",
'HCL.OPERATINGSYSTEM': "hcl.OperatingSystem",
'HCL.OPERATINGSYSTEMVENDOR': "hcl.OperatingSystemVendor",
'HCL.SUPPORTEDDRIVERNAME': "hcl.SupportedDriverName",
'HYPERFLEX.ALARM': "hyperflex.Alarm",
'HYPERFLEX.APPCATALOG': "hyperflex.AppCatalog",
'HYPERFLEX.AUTOSUPPORTPOLICY': "hyperflex.AutoSupportPolicy",
'HYPERFLEX.BACKUPCLUSTER': "hyperflex.BackupCluster",
'HYPERFLEX.CAPABILITYINFO': "hyperflex.CapabilityInfo",
'HYPERFLEX.CLUSTER': "hyperflex.Cluster",
'HYPERFLEX.CLUSTERBACKUPPOLICY': "hyperflex.ClusterBackupPolicy",
'HYPERFLEX.CLUSTERBACKUPPOLICYDEPLOYMENT': "hyperflex.ClusterBackupPolicyDeployment",
'HYPERFLEX.CLUSTERBACKUPPOLICYINVENTORY': "hyperflex.ClusterBackupPolicyInventory",
'HYPERFLEX.CLUSTERHEALTHCHECKEXECUTIONSNAPSHOT': "hyperflex.ClusterHealthCheckExecutionSnapshot",
'HYPERFLEX.CLUSTERNETWORKPOLICY': "hyperflex.ClusterNetworkPolicy",
'HYPERFLEX.CLUSTERPROFILE': "hyperflex.ClusterProfile",
'HYPERFLEX.CLUSTERREPLICATIONNETWORKPOLICY': "hyperflex.ClusterReplicationNetworkPolicy",
'HYPERFLEX.CLUSTERREPLICATIONNETWORKPOLICYDEPLOYMENT': "hyperflex.ClusterReplicationNetworkPolicyDeployment",
'HYPERFLEX.CLUSTERSTORAGEPOLICY': "hyperflex.ClusterStoragePolicy",
'HYPERFLEX.CONFIGRESULT': "hyperflex.ConfigResult",
'HYPERFLEX.CONFIGRESULTENTRY': "hyperflex.ConfigResultEntry",
'HYPERFLEX.DATAPROTECTIONPEER': "hyperflex.DataProtectionPeer",
'HYPERFLEX.DATASTORESTATISTIC': "hyperflex.DatastoreStatistic",
'HYPERFLEX.DEVICEPACKAGEDOWNLOADSTATE': "hyperflex.DevicePackageDownloadState",
'HYPERFLEX.DRIVE': "hyperflex.Drive",
'HYPERFLEX.EXTFCSTORAGEPOLICY': "hyperflex.ExtFcStoragePolicy",
'HYPERFLEX.EXTISCSISTORAGEPOLICY': "hyperflex.ExtIscsiStoragePolicy",
'HYPERFLEX.FEATURELIMITEXTERNAL': "hyperflex.FeatureLimitExternal",
'HYPERFLEX.FEATURELIMITINTERNAL': "hyperflex.FeatureLimitInternal",
'HYPERFLEX.HEALTH': "hyperflex.Health",
'HYPERFLEX.HEALTHCHECKDEFINITION': "hyperflex.HealthCheckDefinition",
'HYPERFLEX.HEALTHCHECKEXECUTION': "hyperflex.HealthCheckExecution",
'HYPERFLEX.HEALTHCHECKEXECUTIONSNAPSHOT': "hyperflex.HealthCheckExecutionSnapshot",
'HYPERFLEX.HEALTHCHECKPACKAGECHECKSUM': "hyperflex.HealthCheckPackageChecksum",
'HYPERFLEX.HXDPVERSION': "hyperflex.HxdpVersion",
'HYPERFLEX.LICENSE': "hyperflex.License",
'HYPERFLEX.LOCALCREDENTIALPOLICY': "hyperflex.LocalCredentialPolicy",
'HYPERFLEX.NODE': "hyperflex.Node",
'HYPERFLEX.NODECONFIGPOLICY': "hyperflex.NodeConfigPolicy",
'HYPERFLEX.NODEPROFILE': "hyperflex.NodeProfile",
'HYPERFLEX.PROTECTEDCLUSTER': "hyperflex.ProtectedCluster",
'HYPERFLEX.PROXYSETTINGPOLICY': "hyperflex.ProxySettingPolicy",
'HYPERFLEX.SERVERFIRMWAREVERSION': "hyperflex.ServerFirmwareVersion",
'HYPERFLEX.SERVERFIRMWAREVERSIONENTRY': "hyperflex.ServerFirmwareVersionEntry",
'HYPERFLEX.SERVERMODEL': "hyperflex.ServerModel",
'HYPERFLEX.SERVICEAUTHTOKEN': "hyperflex.ServiceAuthToken",
'HYPERFLEX.SOFTWAREDISTRIBUTIONCOMPONENT': "hyperflex.SoftwareDistributionComponent",
'HYPERFLEX.SOFTWAREDISTRIBUTIONENTRY': "hyperflex.SoftwareDistributionEntry",
'HYPERFLEX.SOFTWAREDISTRIBUTIONVERSION': "hyperflex.SoftwareDistributionVersion",
'HYPERFLEX.SOFTWAREVERSIONPOLICY': "hyperflex.SoftwareVersionPolicy",
'HYPERFLEX.STORAGECONTAINER': "hyperflex.StorageContainer",
'HYPERFLEX.SYSCONFIGPOLICY': "hyperflex.SysConfigPolicy",
'HYPERFLEX.UCSMCONFIGPOLICY': "hyperflex.UcsmConfigPolicy",
'HYPERFLEX.VCENTERCONFIGPOLICY': "hyperflex.VcenterConfigPolicy",
'HYPERFLEX.VMBACKUPINFO': "hyperflex.VmBackupInfo",
'HYPERFLEX.VMIMPORTOPERATION': "hyperflex.VmImportOperation",
'HYPERFLEX.VMRESTOREOPERATION': "hyperflex.VmRestoreOperation",
'HYPERFLEX.VMSNAPSHOTINFO': "hyperflex.VmSnapshotInfo",
'HYPERFLEX.VOLUME': "hyperflex.Volume",
'HYPERFLEX.WITNESSCONFIGURATION': "hyperflex.WitnessConfiguration",
'IAAS.CONNECTORPACK': "iaas.ConnectorPack",
'IAAS.DEVICESTATUS': "iaas.DeviceStatus",
'IAAS.DIAGNOSTICMESSAGES': "iaas.DiagnosticMessages",
'IAAS.LICENSEINFO': "iaas.LicenseInfo",
'IAAS.MOSTRUNTASKS': "iaas.MostRunTasks",
'IAAS.SERVICEREQUEST': "iaas.ServiceRequest",
'IAAS.UCSDINFO': "iaas.UcsdInfo",
'IAAS.UCSDMANAGEDINFRA': "iaas.UcsdManagedInfra",
'IAAS.UCSDMESSAGES': "iaas.UcsdMessages",
'IAM.ACCOUNT': "iam.Account",
'IAM.ACCOUNTEXPERIENCE': "iam.AccountExperience",
'IAM.APIKEY': "iam.ApiKey",
'IAM.APPREGISTRATION': "iam.AppRegistration",
'IAM.BANNERMESSAGE': "iam.BannerMessage",
'IAM.CERTIFICATE': "iam.Certificate",
'IAM.CERTIFICATEREQUEST': "iam.CertificateRequest",
'IAM.DOMAINGROUP': "iam.DomainGroup",
'IAM.ENDPOINTPRIVILEGE': "iam.EndPointPrivilege",
'IAM.ENDPOINTROLE': "iam.EndPointRole",
'IAM.ENDPOINTUSER': "iam.EndPointUser",
'IAM.ENDPOINTUSERPOLICY': "iam.EndPointUserPolicy",
'IAM.ENDPOINTUSERROLE': "iam.EndPointUserRole",
'IAM.IDP': "iam.Idp",
'IAM.IDPREFERENCE': "iam.IdpReference",
'IAM.IPACCESSMANAGEMENT': "iam.IpAccessManagement",
'IAM.IPADDRESS': "iam.IpAddress",
'IAM.LDAPGROUP': "iam.LdapGroup",
'IAM.LDAPPOLICY': "iam.LdapPolicy",
'IAM.LDAPPROVIDER': "iam.LdapProvider",
'IAM.LOCALUSERPASSWORD': "iam.LocalUserPassword",
'IAM.LOCALUSERPASSWORDPOLICY': "iam.LocalUserPasswordPolicy",
'IAM.OAUTHTOKEN': "iam.OAuthToken",
'IAM.PERMISSION': "iam.Permission",
'IAM.PRIVATEKEYSPEC': "iam.PrivateKeySpec",
'IAM.PRIVILEGE': "iam.Privilege",
'IAM.PRIVILEGESET': "iam.PrivilegeSet",
'IAM.QUALIFIER': "iam.Qualifier",
'IAM.RESOURCELIMITS': "iam.ResourceLimits",
'IAM.RESOURCEPERMISSION': "iam.ResourcePermission",
'IAM.RESOURCEROLES': "iam.ResourceRoles",
'IAM.ROLE': "iam.Role",
'IAM.SECURITYHOLDER': "iam.SecurityHolder",
'IAM.SERVICEPROVIDER': "iam.ServiceProvider",
'IAM.SESSION': "iam.Session",
'IAM.SESSIONLIMITS': "iam.SessionLimits",
'IAM.SYSTEM': "iam.System",
'IAM.TRUSTPOINT': "iam.TrustPoint",
'IAM.USER': "iam.User",
'IAM.USERGROUP': "iam.UserGroup",
'IAM.USERPREFERENCE': "iam.UserPreference",
'INVENTORY.DEVICEINFO': "inventory.DeviceInfo",
'INVENTORY.DNMOBINDING': "inventory.DnMoBinding",
'INVENTORY.GENERICINVENTORY': "inventory.GenericInventory",
'INVENTORY.GENERICINVENTORYHOLDER': "inventory.GenericInventoryHolder",
'INVENTORY.REQUEST': "inventory.Request",
'IPMIOVERLAN.POLICY': "ipmioverlan.Policy",
'IPPOOL.BLOCKLEASE': "ippool.BlockLease",
'IPPOOL.IPLEASE': "ippool.IpLease",
'IPPOOL.POOL': "ippool.Pool",
'IPPOOL.POOLMEMBER': "ippool.PoolMember",
'IPPOOL.SHADOWBLOCK': "ippool.ShadowBlock",
'IPPOOL.SHADOWPOOL': "ippool.ShadowPool",
'IPPOOL.UNIVERSE': "ippool.Universe",
'IQNPOOL.BLOCK': "iqnpool.Block",
'IQNPOOL.LEASE': "iqnpool.Lease",
'IQNPOOL.POOL': "iqnpool.Pool",
'IQNPOOL.POOLMEMBER': "iqnpool.PoolMember",
'IQNPOOL.UNIVERSE': "iqnpool.Universe",
'IWOTENANT.TENANTSTATUS': "iwotenant.TenantStatus",
'KUBERNETES.ACICNIAPIC': "kubernetes.AciCniApic",
'KUBERNETES.ACICNIPROFILE': "kubernetes.AciCniProfile",
'KUBERNETES.ACICNITENANTCLUSTERALLOCATION': "kubernetes.AciCniTenantClusterAllocation",
'KUBERNETES.ADDONDEFINITION': "kubernetes.AddonDefinition",
'KUBERNETES.ADDONPOLICY': "kubernetes.AddonPolicy",
'KUBERNETES.ADDONREPOSITORY': "kubernetes.AddonRepository",
'KUBERNETES.BAREMETALNODEPROFILE': "kubernetes.BaremetalNodeProfile",
'KUBERNETES.CATALOG': "kubernetes.Catalog",
'KUBERNETES.CLUSTER': "kubernetes.Cluster",
'KUBERNETES.CLUSTERADDONPROFILE': "kubernetes.ClusterAddonProfile",
'KUBERNETES.CLUSTERPROFILE': "kubernetes.ClusterProfile",
'KUBERNETES.CONFIGRESULT': "kubernetes.ConfigResult",
'KUBERNETES.CONFIGRESULTENTRY': "kubernetes.ConfigResultEntry",
'KUBERNETES.CONTAINERRUNTIMEPOLICY': "kubernetes.ContainerRuntimePolicy",
'KUBERNETES.DAEMONSET': "kubernetes.DaemonSet",
'KUBERNETES.DEPLOYMENT': "kubernetes.Deployment",
'KUBERNETES.INGRESS': "kubernetes.Ingress",
'KUBERNETES.NETWORKPOLICY': "kubernetes.NetworkPolicy",
'KUBERNETES.NODE': "kubernetes.Node",
'KUBERNETES.NODEGROUPPROFILE': "kubernetes.NodeGroupProfile",
'KUBERNETES.POD': "kubernetes.Pod",
'KUBERNETES.SERVICE': "kubernetes.Service",
'KUBERNETES.STATEFULSET': "kubernetes.StatefulSet",
'KUBERNETES.SYSCONFIGPOLICY': "kubernetes.SysConfigPolicy",
'KUBERNETES.TRUSTEDREGISTRIESPOLICY': "kubernetes.TrustedRegistriesPolicy",
'KUBERNETES.VERSION': "kubernetes.Version",
'KUBERNETES.VERSIONPOLICY': "kubernetes.VersionPolicy",
'KUBERNETES.VIRTUALMACHINEINFRACONFIGPOLICY': "kubernetes.VirtualMachineInfraConfigPolicy",
'KUBERNETES.VIRTUALMACHINEINFRASTRUCTUREPROVIDER': "kubernetes.VirtualMachineInfrastructureProvider",
'KUBERNETES.VIRTUALMACHINEINSTANCETYPE': "kubernetes.VirtualMachineInstanceType",
'KUBERNETES.VIRTUALMACHINENODEPROFILE': "kubernetes.VirtualMachineNodeProfile",
'KVM.POLICY': "kvm.Policy",
'KVM.SESSION': "kvm.Session",
'KVM.TUNNEL': "kvm.Tunnel",
'LICENSE.ACCOUNTLICENSEDATA': "license.AccountLicenseData",
'LICENSE.CUSTOMEROP': "license.CustomerOp",
'LICENSE.IKSCUSTOMEROP': "license.IksCustomerOp",
'LICENSE.IKSLICENSECOUNT': "license.IksLicenseCount",
'LICENSE.IWOCUSTOMEROP': "license.IwoCustomerOp",
'LICENSE.IWOLICENSECOUNT': "license.IwoLicenseCount",
'LICENSE.LICENSEINFO': "license.LicenseInfo",
'LICENSE.LICENSERESERVATIONOP': "license.LicenseReservationOp",
'LICENSE.SMARTLICENSETOKEN': "license.SmartlicenseToken",
'LS.SERVICEPROFILE': "ls.ServiceProfile",
'MACPOOL.IDBLOCK': "macpool.IdBlock",
'MACPOOL.LEASE': "macpool.Lease",
'MACPOOL.POOL': "macpool.Pool",
'MACPOOL.POOLMEMBER': "macpool.PoolMember",
'MACPOOL.UNIVERSE': "macpool.Universe",
'MANAGEMENT.CONTROLLER': "management.Controller",
'MANAGEMENT.ENTITY': "management.Entity",
'MANAGEMENT.INTERFACE': "management.Interface",
'MEMORY.ARRAY': "memory.Array",
'MEMORY.PERSISTENTMEMORYCONFIGRESULT': "memory.PersistentMemoryConfigResult",
'MEMORY.PERSISTENTMEMORYCONFIGURATION': "memory.PersistentMemoryConfiguration",
'MEMORY.PERSISTENTMEMORYNAMESPACE': "memory.PersistentMemoryNamespace",
'MEMORY.PERSISTENTMEMORYNAMESPACECONFIGRESULT': "memory.PersistentMemoryNamespaceConfigResult",
'MEMORY.PERSISTENTMEMORYPOLICY': "memory.PersistentMemoryPolicy",
'MEMORY.PERSISTENTMEMORYREGION': "memory.PersistentMemoryRegion",
'MEMORY.PERSISTENTMEMORYUNIT': "memory.PersistentMemoryUnit",
'MEMORY.UNIT': "memory.Unit",
'META.DEFINITION': "meta.Definition",
'NETWORK.ELEMENT': "network.Element",
'NETWORK.ELEMENTSUMMARY': "network.ElementSummary",
'NETWORK.FCZONEINFO': "network.FcZoneInfo",
'NETWORK.VLANPORTINFO': "network.VlanPortInfo",
'NETWORKCONFIG.POLICY': "networkconfig.Policy",
'NIAAPI.APICCCOPOST': "niaapi.ApicCcoPost",
'NIAAPI.APICFIELDNOTICE': "niaapi.ApicFieldNotice",
'NIAAPI.APICHWEOL': "niaapi.ApicHweol",
'NIAAPI.APICLATESTMAINTAINEDRELEASE': "niaapi.ApicLatestMaintainedRelease",
'NIAAPI.APICRELEASERECOMMEND': "niaapi.ApicReleaseRecommend",
'NIAAPI.APICSWEOL': "niaapi.ApicSweol",
'NIAAPI.DCNMCCOPOST': "niaapi.DcnmCcoPost",
'NIAAPI.DCNMFIELDNOTICE': "niaapi.DcnmFieldNotice",
'NIAAPI.DCNMHWEOL': "niaapi.DcnmHweol",
'NIAAPI.DCNMLATESTMAINTAINEDRELEASE': "niaapi.DcnmLatestMaintainedRelease",
'NIAAPI.DCNMRELEASERECOMMEND': "niaapi.DcnmReleaseRecommend",
'NIAAPI.DCNMSWEOL': "niaapi.DcnmSweol",
'NIAAPI.FILEDOWNLOADER': "niaapi.FileDownloader",
'NIAAPI.NIAMETADATA': "niaapi.NiaMetadata",
'NIAAPI.NIBFILEDOWNLOADER': "niaapi.NibFileDownloader",
'NIAAPI.NIBMETADATA': "niaapi.NibMetadata",
'NIAAPI.VERSIONREGEX': "niaapi.VersionRegex",
'NIATELEMETRY.AAALDAPPROVIDERDETAILS': "niatelemetry.AaaLdapProviderDetails",
'NIATELEMETRY.AAARADIUSPROVIDERDETAILS': "niatelemetry.AaaRadiusProviderDetails",
'NIATELEMETRY.AAATACACSPROVIDERDETAILS': "niatelemetry.AaaTacacsProviderDetails",
'NIATELEMETRY.APICAPPPLUGINDETAILS': "niatelemetry.ApicAppPluginDetails",
'NIATELEMETRY.APICCOREFILEDETAILS': "niatelemetry.ApicCoreFileDetails",
'NIATELEMETRY.APICDBGEXPRSEXPORTDEST': "niatelemetry.ApicDbgexpRsExportDest",
'NIATELEMETRY.APICDBGEXPRSTSSCHEDULER': "niatelemetry.ApicDbgexpRsTsScheduler",
'NIATELEMETRY.APICFANDETAILS': "niatelemetry.ApicFanDetails",
'NIATELEMETRY.APICFEXDETAILS': "niatelemetry.ApicFexDetails",
'NIATELEMETRY.APICFLASHDETAILS': "niatelemetry.ApicFlashDetails",
'NIATELEMETRY.APICNTPAUTH': "niatelemetry.ApicNtpAuth",
'NIATELEMETRY.APICPSUDETAILS': "niatelemetry.ApicPsuDetails",
'NIATELEMETRY.APICREALMDETAILS': "niatelemetry.ApicRealmDetails",
'NIATELEMETRY.APICSNMPCLIENTGRPDETAILS': "niatelemetry.ApicSnmpClientGrpDetails",
'NIATELEMETRY.APICSNMPCOMMUNITYACCESSDETAILS': "niatelemetry.ApicSnmpCommunityAccessDetails",
'NIATELEMETRY.APICSNMPCOMMUNITYDETAILS': "niatelemetry.ApicSnmpCommunityDetails",
'NIATELEMETRY.APICSNMPTRAPDETAILS': "niatelemetry.ApicSnmpTrapDetails",
'NIATELEMETRY.APICSNMPTRAPFWDSERVERDETAILS': "niatelemetry.ApicSnmpTrapFwdServerDetails",
'NIATELEMETRY.APICSNMPVERSIONTHREEDETAILS': "niatelemetry.ApicSnmpVersionThreeDetails",
'NIATELEMETRY.APICSYSLOGGRP': "niatelemetry.ApicSysLogGrp",
'NIATELEMETRY.APICSYSLOGSRC': "niatelemetry.ApicSysLogSrc",
'NIATELEMETRY.APICTRANSCEIVERDETAILS': "niatelemetry.ApicTransceiverDetails",
'NIATELEMETRY.APICUIPAGECOUNTS': "niatelemetry.ApicUiPageCounts",
'NIATELEMETRY.APPDETAILS': "niatelemetry.AppDetails",
'NIATELEMETRY.COMMONPOLICIES': "niatelemetry.CommonPolicies",
'NIATELEMETRY.DCNMFANDETAILS': "niatelemetry.DcnmFanDetails",
'NIATELEMETRY.DCNMFEXDETAILS': "niatelemetry.DcnmFexDetails",
'NIATELEMETRY.DCNMMODULEDETAILS': "niatelemetry.DcnmModuleDetails",
'NIATELEMETRY.DCNMPSUDETAILS': "niatelemetry.DcnmPsuDetails",
'NIATELEMETRY.DCNMTRANSCEIVERDETAILS': "niatelemetry.DcnmTransceiverDetails",
'NIATELEMETRY.EPG': "niatelemetry.Epg",
'NIATELEMETRY.FABRICMODULEDETAILS': "niatelemetry.FabricModuleDetails",
'NIATELEMETRY.FABRICPODPROFILE': "niatelemetry.FabricPodProfile",
'NIATELEMETRY.FABRICPODSS': "niatelemetry.FabricPodSs",
'NIATELEMETRY.FAULT': "niatelemetry.Fault",
'NIATELEMETRY.HTTPSACLCONTRACTDETAILS': "niatelemetry.HttpsAclContractDetails",
'NIATELEMETRY.HTTPSACLCONTRACTFILTERMAP': "niatelemetry.HttpsAclContractFilterMap",
'NIATELEMETRY.HTTPSACLEPGCONTRACTMAP': "niatelemetry.HttpsAclEpgContractMap",
'NIATELEMETRY.HTTPSACLEPGDETAILS': "niatelemetry.HttpsAclEpgDetails",
'NIATELEMETRY.HTTPSACLFILTERDETAILS': "niatelemetry.HttpsAclFilterDetails",
'NIATELEMETRY.LC': "niatelemetry.Lc",
'NIATELEMETRY.MSOCONTRACTDETAILS': "niatelemetry.MsoContractDetails",
'NIATELEMETRY.MSOEPGDETAILS': "niatelemetry.MsoEpgDetails",
'NIATELEMETRY.MSOSCHEMADETAILS': "niatelemetry.MsoSchemaDetails",
'NIATELEMETRY.MSOSITEDETAILS': "niatelemetry.MsoSiteDetails",
'NIATELEMETRY.MSOTENANTDETAILS': "niatelemetry.MsoTenantDetails",
'NIATELEMETRY.NEXUSDASHBOARDCONTROLLERDETAILS': "niatelemetry.NexusDashboardControllerDetails",
'NIATELEMETRY.NEXUSDASHBOARDDETAILS': "niatelemetry.NexusDashboardDetails",
'NIATELEMETRY.NEXUSDASHBOARDMEMORYDETAILS': "niatelemetry.NexusDashboardMemoryDetails",
'NIATELEMETRY.NEXUSDASHBOARDS': "niatelemetry.NexusDashboards",
'NIATELEMETRY.NIAFEATUREUSAGE': "niatelemetry.NiaFeatureUsage",
'NIATELEMETRY.NIAINVENTORY': "niatelemetry.NiaInventory",
'NIATELEMETRY.NIAINVENTORYDCNM': "niatelemetry.NiaInventoryDcnm",
'NIATELEMETRY.NIAINVENTORYFABRIC': "niatelemetry.NiaInventoryFabric",
'NIATELEMETRY.NIALICENSESTATE': "niatelemetry.NiaLicenseState",
'NIATELEMETRY.PASSWORDSTRENGTHCHECK': "niatelemetry.PasswordStrengthCheck",
'NIATELEMETRY.PODCOMMPOLICIES': "niatelemetry.PodCommPolicies",
'NIATELEMETRY.PODSNMPPOLICIES': "niatelemetry.PodSnmpPolicies",
'NIATELEMETRY.PODTIMESERVERPOLICIES': "niatelemetry.PodTimeServerPolicies",
'NIATELEMETRY.SITEINVENTORY': "niatelemetry.SiteInventory",
'NIATELEMETRY.SNMPSRC': "niatelemetry.SnmpSrc",
'NIATELEMETRY.SSHVERSIONTWO': "niatelemetry.SshVersionTwo",
'NIATELEMETRY.SUPERVISORMODULEDETAILS': "niatelemetry.SupervisorModuleDetails",
'NIATELEMETRY.SYSLOGREMOTEDEST': "niatelemetry.SyslogRemoteDest",
'NIATELEMETRY.SYSLOGSYSMSG': "niatelemetry.SyslogSysMsg",
'NIATELEMETRY.SYSLOGSYSMSGFACFILTER': "niatelemetry.SyslogSysMsgFacFilter",
'NIATELEMETRY.SYSTEMCONTROLLERDETAILS': "niatelemetry.SystemControllerDetails",
'NIATELEMETRY.TENANT': "niatelemetry.Tenant",
'NOTIFICATION.ACCOUNTSUBSCRIPTION': "notification.AccountSubscription",
'NTP.POLICY': "ntp.Policy",
'OAUTH.ACCESSTOKEN': "oauth.AccessToken",
'OAUTH.AUTHORIZATION': "oauth.Authorization",
'OPRS.DEPLOYMENT': "oprs.Deployment",
'OPRS.SYNCTARGETLISTMESSAGE': "oprs.SyncTargetListMessage",
'ORGANIZATION.ORGANIZATION': "organization.Organization",
'OS.BULKINSTALLINFO': "os.BulkInstallInfo",
'OS.CATALOG': "os.Catalog",
'OS.CONFIGURATIONFILE': "os.ConfigurationFile",
'OS.DISTRIBUTION': "os.Distribution",
'OS.INSTALL': "os.Install",
'OS.OSSUPPORT': "os.OsSupport",
'OS.SUPPORTEDVERSION': "os.SupportedVersion",
'OS.TEMPLATEFILE': "os.TemplateFile",
'OS.VALIDINSTALLTARGET': "os.ValidInstallTarget",
'PCI.COPROCESSORCARD': "pci.CoprocessorCard",
'PCI.DEVICE': "pci.Device",
'PCI.LINK': "pci.Link",
'PCI.SWITCH': "pci.Switch",
'PORT.GROUP': "port.Group",
'PORT.MACBINDING': "port.MacBinding",
'PORT.SUBGROUP': "port.SubGroup",
'POWER.CONTROLSTATE': "power.ControlState",
'POWER.POLICY': "power.Policy",
'PROCESSOR.UNIT': "processor.Unit",
'RACK.UNITPERSONALITY': "rack.UnitPersonality",
'RECOMMENDATION.CAPACITYRUNWAY': "recommendation.CapacityRunway",
'RECOMMENDATION.PHYSICALITEM': "recommendation.PhysicalItem",
'RECOVERY.BACKUPCONFIGPOLICY': "recovery.BackupConfigPolicy",
'RECOVERY.BACKUPPROFILE': "recovery.BackupProfile",
'RECOVERY.CONFIGRESULT': "recovery.ConfigResult",
'RECOVERY.CONFIGRESULTENTRY': "recovery.ConfigResultEntry",
'RECOVERY.ONDEMANDBACKUP': "recovery.OnDemandBackup",
'RECOVERY.RESTORE': "recovery.Restore",
'RECOVERY.SCHEDULECONFIGPOLICY': "recovery.ScheduleConfigPolicy",
'RESOURCE.GROUP': "resource.Group",
'RESOURCE.GROUPMEMBER': "resource.GroupMember",
'RESOURCE.LICENSERESOURCECOUNT': "resource.LicenseResourceCount",
'RESOURCE.MEMBERSHIP': "resource.Membership",
'RESOURCE.MEMBERSHIPHOLDER': "resource.MembershipHolder",
'RESOURCE.RESERVATION': "resource.Reservation",
'RESOURCEPOOL.LEASE': "resourcepool.Lease",
'RESOURCEPOOL.LEASERESOURCE': "resourcepool.LeaseResource",
'RESOURCEPOOL.POOL': "resourcepool.Pool",
'RESOURCEPOOL.POOLMEMBER': "resourcepool.PoolMember",
'RESOURCEPOOL.UNIVERSE': "resourcepool.Universe",
'RPROXY.REVERSEPROXY': "rproxy.ReverseProxy",
'SDCARD.POLICY': "sdcard.Policy",
'SDWAN.PROFILE': "sdwan.Profile",
'SDWAN.ROUTERNODE': "sdwan.RouterNode",
'SDWAN.ROUTERPOLICY': "sdwan.RouterPolicy",
'SDWAN.VMANAGEACCOUNTPOLICY': "sdwan.VmanageAccountPolicy",
'SEARCH.SEARCHITEM': "search.SearchItem",
'SEARCH.TAGITEM': "search.TagItem",
'SECURITY.UNIT': "security.Unit",
'SERVER.CONFIGCHANGEDETAIL': "server.ConfigChangeDetail",
'SERVER.CONFIGIMPORT': "server.ConfigImport",
'SERVER.CONFIGRESULT': "server.ConfigResult",
'SERVER.CONFIGRESULTENTRY': "server.ConfigResultEntry",
'SERVER.PROFILE': "server.Profile",
'SERVER.PROFILETEMPLATE': "server.ProfileTemplate",
'SMTP.POLICY': "smtp.Policy",
'SNMP.POLICY': "snmp.Policy",
'SOFTWARE.APPLIANCEDISTRIBUTABLE': "software.ApplianceDistributable",
'SOFTWARE.DOWNLOADHISTORY': "software.DownloadHistory",
'SOFTWARE.HCLMETA': "software.HclMeta",
'SOFTWARE.HYPERFLEXBUNDLEDISTRIBUTABLE': "software.HyperflexBundleDistributable",
'SOFTWARE.HYPERFLEXDISTRIBUTABLE': "software.HyperflexDistributable",
'SOFTWARE.RELEASEMETA': "software.ReleaseMeta",
'SOFTWARE.SOLUTIONDISTRIBUTABLE': "software.SolutionDistributable",
'SOFTWARE.UCSDBUNDLEDISTRIBUTABLE': "software.UcsdBundleDistributable",
'SOFTWARE.UCSDDISTRIBUTABLE': "software.UcsdDistributable",
'SOFTWAREREPOSITORY.AUTHORIZATION': "softwarerepository.Authorization",
'SOFTWAREREPOSITORY.CACHEDIMAGE': "softwarerepository.CachedImage",
'SOFTWAREREPOSITORY.CATALOG': "softwarerepository.Catalog",
'SOFTWAREREPOSITORY.CATEGORYMAPPER': "softwarerepository.CategoryMapper",
'SOFTWAREREPOSITORY.CATEGORYMAPPERMODEL': "softwarerepository.CategoryMapperModel",
'SOFTWAREREPOSITORY.CATEGORYSUPPORTCONSTRAINT': "softwarerepository.CategorySupportConstraint",
'SOFTWAREREPOSITORY.DOWNLOADSPEC': "softwarerepository.DownloadSpec",
'SOFTWAREREPOSITORY.OPERATINGSYSTEMFILE': "softwarerepository.OperatingSystemFile",
'SOFTWAREREPOSITORY.RELEASE': "softwarerepository.Release",
'SOL.POLICY': "sol.Policy",
'SSH.POLICY': "ssh.Policy",
'STORAGE.CONTROLLER': "storage.Controller",
'STORAGE.DISKGROUP': "storage.DiskGroup",
'STORAGE.DISKSLOT': "storage.DiskSlot",
'STORAGE.DRIVEGROUP': "storage.DriveGroup",
'STORAGE.ENCLOSURE': "storage.Enclosure",
'STORAGE.ENCLOSUREDISK': "storage.EnclosureDisk",
'STORAGE.ENCLOSUREDISKSLOTEP': "storage.EnclosureDiskSlotEp",
'STORAGE.FLEXFLASHCONTROLLER': "storage.FlexFlashController",
'STORAGE.FLEXFLASHCONTROLLERPROPS': "storage.FlexFlashControllerProps",
'STORAGE.FLEXFLASHPHYSICALDRIVE': "storage.FlexFlashPhysicalDrive",
'STORAGE.FLEXFLASHVIRTUALDRIVE': "storage.FlexFlashVirtualDrive",
'STORAGE.FLEXUTILCONTROLLER': "storage.FlexUtilController",
'STORAGE.FLEXUTILPHYSICALDRIVE': "storage.FlexUtilPhysicalDrive",
'STORAGE.FLEXUTILVIRTUALDRIVE': "storage.FlexUtilVirtualDrive",
'STORAGE.HITACHIARRAY': "storage.HitachiArray",
'STORAGE.HITACHICONTROLLER': "storage.HitachiController",
'STORAGE.HITACHIDISK': "storage.HitachiDisk",
'STORAGE.HITACHIHOST': "storage.HitachiHost",
'STORAGE.HITACHIHOSTLUN': "storage.HitachiHostLun",
'STORAGE.HITACHIPARITYGROUP': "storage.HitachiParityGroup",
'STORAGE.HITACHIPOOL': "storage.HitachiPool",
'STORAGE.HITACHIPORT': "storage.HitachiPort",
'STORAGE.HITACHIVOLUME': "storage.HitachiVolume",
'STORAGE.HYPERFLEXSTORAGECONTAINER': "storage.HyperFlexStorageContainer",
'STORAGE.HYPERFLEXVOLUME': "storage.HyperFlexVolume",
'STORAGE.ITEM': "storage.Item",
'STORAGE.NETAPPAGGREGATE': "storage.NetAppAggregate",
'STORAGE.NETAPPBASEDISK': "storage.NetAppBaseDisk",
'STORAGE.NETAPPCLUSTER': "storage.NetAppCluster",
'STORAGE.NETAPPETHERNETPORT': "storage.NetAppEthernetPort",
'STORAGE.NETAPPEXPORTPOLICY': "storage.NetAppExportPolicy",
'STORAGE.NETAPPFCINTERFACE': "storage.NetAppFcInterface",
'STORAGE.NETAPPFCPORT': "storage.NetAppFcPort",
'STORAGE.NETAPPINITIATORGROUP': "storage.NetAppInitiatorGroup",
'STORAGE.NETAPPIPINTERFACE': "storage.NetAppIpInterface",
'STORAGE.NETAPPLICENSE': "storage.NetAppLicense",
'STORAGE.NETAPPLUN': "storage.NetAppLun",
'STORAGE.NETAPPLUNMAP': "storage.NetAppLunMap",
'STORAGE.NETAPPNODE': "storage.NetAppNode",
'STORAGE.NETAPPNTPSERVER': "storage.NetAppNtpServer",
'STORAGE.NETAPPSENSOR': "storage.NetAppSensor",
'STORAGE.NETAPPSTORAGEVM': "storage.NetAppStorageVm",
'STORAGE.NETAPPVOLUME': "storage.NetAppVolume",
'STORAGE.NETAPPVOLUMESNAPSHOT': "storage.NetAppVolumeSnapshot",
'STORAGE.PHYSICALDISK': "storage.PhysicalDisk",
'STORAGE.PHYSICALDISKEXTENSION': "storage.PhysicalDiskExtension",
'STORAGE.PHYSICALDISKUSAGE': "storage.PhysicalDiskUsage",
'STORAGE.PUREARRAY': "storage.PureArray",
'STORAGE.PURECONTROLLER': "storage.PureController",
'STORAGE.PUREDISK': "storage.PureDisk",
'STORAGE.PUREHOST': "storage.PureHost",
'STORAGE.PUREHOSTGROUP': "storage.PureHostGroup",
'STORAGE.PUREHOSTLUN': "storage.PureHostLun",
'STORAGE.PUREPORT': "storage.PurePort",
'STORAGE.PUREPROTECTIONGROUP': "storage.PureProtectionGroup",
'STORAGE.PUREPROTECTIONGROUPSNAPSHOT': "storage.PureProtectionGroupSnapshot",
'STORAGE.PUREREPLICATIONSCHEDULE': "storage.PureReplicationSchedule",
'STORAGE.PURESNAPSHOTSCHEDULE': "storage.PureSnapshotSchedule",
'STORAGE.PUREVOLUME': "storage.PureVolume",
'STORAGE.PUREVOLUMESNAPSHOT': "storage.PureVolumeSnapshot",
'STORAGE.SASEXPANDER': "storage.SasExpander",
'STORAGE.SASPORT': "storage.SasPort",
'STORAGE.SPAN': "storage.Span",
'STORAGE.STORAGEPOLICY': "storage.StoragePolicy",
'STORAGE.VDMEMBEREP': "storage.VdMemberEp",
'STORAGE.VIRTUALDRIVE': "storage.VirtualDrive",
'STORAGE.VIRTUALDRIVECONTAINER': "storage.VirtualDriveContainer",
'STORAGE.VIRTUALDRIVEEXTENSION': "storage.VirtualDriveExtension",
'STORAGE.VIRTUALDRIVEIDENTITY': "storage.VirtualDriveIdentity",
'SYSLOG.POLICY': "syslog.Policy",
'TAM.ADVISORYCOUNT': "tam.AdvisoryCount",
'TAM.ADVISORYDEFINITION': "tam.AdvisoryDefinition",
'TAM.ADVISORYINFO': "tam.AdvisoryInfo",
'TAM.ADVISORYINSTANCE': "tam.AdvisoryInstance",
'TAM.SECURITYADVISORY': "tam.SecurityAdvisory",
'TASK.HITACHISCOPEDINVENTORY': "task.HitachiScopedInventory",
'TASK.HYPERFLEXSCOPEDINVENTORY': "task.HyperflexScopedInventory",
'TASK.IWESCOPEDINVENTORY': "task.IweScopedInventory",
'TASK.NETAPPSCOPEDINVENTORY': "task.NetAppScopedInventory",
'TASK.PUBLICCLOUDSCOPEDINVENTORY': "task.PublicCloudScopedInventory",
'TASK.PURESCOPEDINVENTORY': "task.PureScopedInventory",
'TASK.SERVERSCOPEDINVENTORY': "task.ServerScopedInventory",
'TECHSUPPORTMANAGEMENT.COLLECTIONCONTROLPOLICY': "techsupportmanagement.CollectionControlPolicy",
'TECHSUPPORTMANAGEMENT.DOWNLOAD': "techsupportmanagement.Download",
'TECHSUPPORTMANAGEMENT.TECHSUPPORTBUNDLE': "techsupportmanagement.TechSupportBundle",
'TECHSUPPORTMANAGEMENT.TECHSUPPORTSTATUS': "techsupportmanagement.TechSupportStatus",
'TERMINAL.AUDITLOG': "terminal.AuditLog",
'TERRAFORM.EXECUTOR': "terraform.Executor",
'THERMAL.POLICY': "thermal.Policy",
'TOP.SYSTEM': "top.System",
'UCSD.BACKUPINFO': "ucsd.BackupInfo",
'UUIDPOOL.BLOCK': "uuidpool.Block",
'UUIDPOOL.POOL': "uuidpool.Pool",
'UUIDPOOL.POOLMEMBER': "uuidpool.PoolMember",
'UUIDPOOL.UNIVERSE': "uuidpool.Universe",
'UUIDPOOL.UUIDLEASE': "uuidpool.UuidLease",
'VIRTUALIZATION.CISCOHYPERVISORMANAGER': "virtualization.CiscoHypervisorManager",
'VIRTUALIZATION.ESXICONSOLE': "virtualization.EsxiConsole",
'VIRTUALIZATION.HOST': "virtualization.Host",
'VIRTUALIZATION.IWECLUSTER': "virtualization.IweCluster",
'VIRTUALIZATION.IWEDATACENTER': "virtualization.IweDatacenter",
'VIRTUALIZATION.IWEDVUPLINK': "virtualization.IweDvUplink",
'VIRTUALIZATION.IWEDVSWITCH': "virtualization.IweDvswitch",
'VIRTUALIZATION.IWEHOST': "virtualization.IweHost",
'VIRTUALIZATION.IWEHOSTINTERFACE': "virtualization.IweHostInterface",
'VIRTUALIZATION.IWEHOSTVSWITCH': "virtualization.IweHostVswitch",
'VIRTUALIZATION.IWENETWORK': "virtualization.IweNetwork",
'VIRTUALIZATION.IWEVIRTUALDISK': "virtualization.IweVirtualDisk",
'VIRTUALIZATION.IWEVIRTUALMACHINE': "virtualization.IweVirtualMachine",
'VIRTUALIZATION.IWEVIRTUALMACHINENETWORKINTERFACE': "virtualization.IweVirtualMachineNetworkInterface",
'VIRTUALIZATION.VIRTUALDISK': "virtualization.VirtualDisk",
'VIRTUALIZATION.VIRTUALMACHINE': "virtualization.VirtualMachine",
'VIRTUALIZATION.VIRTUALNETWORK': "virtualization.VirtualNetwork",
'VIRTUALIZATION.VMWARECLUSTER': "virtualization.VmwareCluster",
'VIRTUALIZATION.VMWAREDATACENTER': "virtualization.VmwareDatacenter",
'VIRTUALIZATION.VMWAREDATASTORE': "virtualization.VmwareDatastore",
'VIRTUALIZATION.VMWAREDATASTORECLUSTER': "virtualization.VmwareDatastoreCluster",
'VIRTUALIZATION.VMWAREDISTRIBUTEDNETWORK': "virtualization.VmwareDistributedNetwork",
'VIRTUALIZATION.VMWAREDISTRIBUTEDSWITCH': "virtualization.VmwareDistributedSwitch",
'VIRTUALIZATION.VMWAREFOLDER': "virtualization.VmwareFolder",
'VIRTUALIZATION.VMWAREHOST': "virtualization.VmwareHost",
'VIRTUALIZATION.VMWAREKERNELNETWORK': "virtualization.VmwareKernelNetwork",
'VIRTUALIZATION.VMWARENETWORK': "virtualization.VmwareNetwork",
'VIRTUALIZATION.VMWAREPHYSICALNETWORKINTERFACE': "virtualization.VmwarePhysicalNetworkInterface",
'VIRTUALIZATION.VMWAREUPLINKPORT': "virtualization.VmwareUplinkPort",
'VIRTUALIZATION.VMWAREVCENTER': "virtualization.VmwareVcenter",
'VIRTUALIZATION.VMWAREVIRTUALDISK': "virtualization.VmwareVirtualDisk",
'VIRTUALIZATION.VMWAREVIRTUALMACHINE': "virtualization.VmwareVirtualMachine",
'VIRTUALIZATION.VMWAREVIRTUALMACHINESNAPSHOT': "virtualization.VmwareVirtualMachineSnapshot",
'VIRTUALIZATION.VMWAREVIRTUALNETWORKINTERFACE': "virtualization.VmwareVirtualNetworkInterface",
'VIRTUALIZATION.VMWAREVIRTUALSWITCH': "virtualization.VmwareVirtualSwitch",
'VMEDIA.POLICY': "vmedia.Policy",
'VMRC.CONSOLE': "vmrc.Console",
'VNC.CONSOLE': "vnc.Console",
'VNIC.ETHADAPTERPOLICY': "vnic.EthAdapterPolicy",
'VNIC.ETHIF': "vnic.EthIf",
'VNIC.ETHNETWORKPOLICY': "vnic.EthNetworkPolicy",
'VNIC.ETHQOSPOLICY': "vnic.EthQosPolicy",
'VNIC.FCADAPTERPOLICY': "vnic.FcAdapterPolicy",
'VNIC.FCIF': "vnic.FcIf",
'VNIC.FCNETWORKPOLICY': "vnic.FcNetworkPolicy",
'VNIC.FCQOSPOLICY': "vnic.FcQosPolicy",
'VNIC.ISCSIADAPTERPOLICY': "vnic.IscsiAdapterPolicy",
'VNIC.ISCSIBOOTPOLICY': "vnic.IscsiBootPolicy",
'VNIC.ISCSISTATICTARGETPOLICY': "vnic.IscsiStaticTargetPolicy",
'VNIC.LANCONNECTIVITYPOLICY': "vnic.LanConnectivityPolicy",
'VNIC.LCPSTATUS': "vnic.LcpStatus",
'VNIC.SANCONNECTIVITYPOLICY': "vnic.SanConnectivityPolicy",
'VNIC.SCPSTATUS': "vnic.ScpStatus",
'VRF.VRF': "vrf.Vrf",
'WORKFLOW.ANSIBLEBATCHEXECUTOR': "workflow.AnsibleBatchExecutor",
'WORKFLOW.BATCHAPIEXECUTOR': "workflow.BatchApiExecutor",
'WORKFLOW.BUILDTASKMETA': "workflow.BuildTaskMeta",
'WORKFLOW.BUILDTASKMETAOWNER': "workflow.BuildTaskMetaOwner",
'WORKFLOW.CATALOG': "workflow.Catalog",
'WORKFLOW.CUSTOMDATATYPEDEFINITION': "workflow.CustomDataTypeDefinition",
'WORKFLOW.ERRORRESPONSEHANDLER': "workflow.ErrorResponseHandler",
'WORKFLOW.PENDINGDYNAMICWORKFLOWINFO': "workflow.PendingDynamicWorkflowInfo",
'WORKFLOW.ROLLBACKWORKFLOW': "workflow.RollbackWorkflow",
'WORKFLOW.SOLUTIONACTIONDEFINITION': "workflow.SolutionActionDefinition",
'WORKFLOW.SOLUTIONACTIONINSTANCE': "workflow.SolutionActionInstance",
'WORKFLOW.SOLUTIONDEFINITION': "workflow.SolutionDefinition",
'WORKFLOW.SOLUTIONINSTANCE': "workflow.SolutionInstance",
'WORKFLOW.SOLUTIONOUTPUT': "workflow.SolutionOutput",
'WORKFLOW.SSHBATCHEXECUTOR': "workflow.SshBatchExecutor",
'WORKFLOW.TASKDEBUGLOG': "workflow.TaskDebugLog",
'WORKFLOW.TASKDEFINITION': "workflow.TaskDefinition",
'WORKFLOW.TASKINFO': "workflow.TaskInfo",
'WORKFLOW.TASKMETADATA': "workflow.TaskMetadata",
'WORKFLOW.TASKNOTIFICATION': "workflow.TaskNotification",
'WORKFLOW.TEMPLATEEVALUATION': "workflow.TemplateEvaluation",
'WORKFLOW.TEMPLATEFUNCTIONMETA': "workflow.TemplateFunctionMeta",
'WORKFLOW.WORKFLOWDEFINITION': "workflow.WorkflowDefinition",
'WORKFLOW.WORKFLOWINFO': "workflow.WorkflowInfo",
'WORKFLOW.WORKFLOWMETA': "workflow.WorkflowMeta",
'WORKFLOW.WORKFLOWMETADATA': "workflow.WorkflowMetadata",
'WORKFLOW.WORKFLOWNOTIFICATION': "workflow.WorkflowNotification",
},
}
validations = {
('per_account_user_limit',): {
'inclusive_maximum': 200,
'inclusive_minimum': 1,
},
}
@cached_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,)
_nullable = False
@cached_property
def openapi_types():
lazy_import()
return {
'class_id': (str,),
'moid': (str,),
'selector': (str,),
'link': (str,),
'account_moid': (str,),
'create_time': (datetime,),
'domain_group_moid': (str,),
'mod_time': (datetime,),
'owners': ([str], none_type,),
'shared_scope': (str,),
'tags': ([MoTag], none_type,),
'version_context': (MoVersionContext,),
'ancestors': ([MoBaseMoRelationship], none_type,),
'parent': (MoBaseMoRelationship,),
'permission_resources': ([MoBaseMoRelationship], none_type,),
'display_names': (DisplayNames,),
'per_account_user_limit': (int,),
'account': (IamAccountRelationship,),
'object_type': (str,),
}
@cached_property
def discriminator():
lazy_import()
val = {
'iam.ResourceLimits': IamResourceLimits,
'mo.MoRef': MoMoRef,
}
if not val:
return None
return {'class_id': val}
attribute_map = {
'class_id': 'ClassId',
'moid': 'Moid',
'selector': 'Selector',
'link': 'link',
'account_moid': 'AccountMoid',
'create_time': 'CreateTime',
'domain_group_moid': 'DomainGroupMoid',
'mod_time': 'ModTime',
'owners': 'Owners',
'shared_scope': 'SharedScope',
'tags': 'Tags',
'version_context': 'VersionContext',
'ancestors': 'Ancestors',
'parent': 'Parent',
'permission_resources': 'PermissionResources',
'display_names': 'DisplayNames',
'per_account_user_limit': 'PerAccountUserLimit',
'account': 'Account',
'object_type': 'ObjectType',
}
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
'_composed_instances',
'_var_name_to_model_instances',
'_additional_properties_model_instances',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs):
class_id = kwargs.get('class_id', "mo.MoRef")
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
constant_args = {
'_check_type': _check_type,
'_path_to_item': _path_to_item,
'_spec_property_naming': _spec_property_naming,
'_configuration': _configuration,
'_visited_composed_classes': self._visited_composed_classes,
}
required_args = {
'class_id': class_id,
}
model_args = {}
model_args.update(required_args)
model_args.update(kwargs)
composed_info = validate_get_composed_info(
constant_args, model_args, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
unused_args = composed_info[3]
for var_name, var_value in required_args.items():
setattr(self, var_name, var_value)
for var_name, var_value in kwargs.items():
if var_name in unused_args and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
not self._additional_properties_model_instances:
continue
setattr(self, var_name, var_value)
@cached_property
def _composed_schemas():
# loading
lazy_import()
return {
'anyOf': [
],
'allOf': [
],
'oneOf': [
IamResourceLimits,
MoMoRef,
none_type,
],
}
| true | true |
f7fe0b7eb8236f1edff4d62040437bd3b9cf81d9 | 44 | py | Python | privugger/test/addition.py | itu-square/reident | a9f2a2cfb43ea0adeccbbed7ef119f5eae243bf5 | [
"Apache-2.0"
] | 2 | 2021-12-10T13:45:37.000Z | 2021-12-15T08:32:01.000Z | privugger/test/addition.py | itu-square/reident | a9f2a2cfb43ea0adeccbbed7ef119f5eae243bf5 | [
"Apache-2.0"
] | 39 | 2021-03-24T10:08:50.000Z | 2022-03-29T22:02:24.000Z | unit-test/addition.py | itu-square/privugger | 9b57605dbd1ed072feaedc17ca0cd688dbf2459a | [
"Apache-2.0"
] | null | null | null | def name(age, height):
return age+height | 22 | 22 | 0.704545 | def name(age, height):
return age+height | true | true |
f7fe0ba9f2f4cc700800ad27d22b0e0995d7ac32 | 604 | py | Python | python_practice/random_number.py | vishalvb/practice | 4c38f863408c91aa072bd20510f098043fecd043 | [
"MIT"
] | null | null | null | python_practice/random_number.py | vishalvb/practice | 4c38f863408c91aa072bd20510f098043fecd043 | [
"MIT"
] | null | null | null | python_practice/random_number.py | vishalvb/practice | 4c38f863408c91aa072bd20510f098043fecd043 | [
"MIT"
] | null | null | null | #random number generation
import random
value = random.random()
print('random value is ', value)
value = random.uniform(1,10)
print('using random.uniform', value)
value = random.randint(1,6)
print('using random.int', value)
my_list = ['hi', 'hello', 'how areyou', 'welcome']
print('using random.choice', random.choice(my_list))
colors = ['red', 'green', 'blue']
print('using random.choices', random.choices(colors, weights = [1,1,1], k=10))
deck = list(range(1,53))
print('deck is', deck)
random.shuffle(deck)
print('after shuffle', deck)
print('using random.sample', random.sample(deck, k = 5)) | 24.16 | 78 | 0.695364 |
import random
value = random.random()
print('random value is ', value)
value = random.uniform(1,10)
print('using random.uniform', value)
value = random.randint(1,6)
print('using random.int', value)
my_list = ['hi', 'hello', 'how areyou', 'welcome']
print('using random.choice', random.choice(my_list))
colors = ['red', 'green', 'blue']
print('using random.choices', random.choices(colors, weights = [1,1,1], k=10))
deck = list(range(1,53))
print('deck is', deck)
random.shuffle(deck)
print('after shuffle', deck)
print('using random.sample', random.sample(deck, k = 5)) | true | true |
f7fe0c7dd1384b5de481aeb6cbed455df73fa3c9 | 3,317 | py | Python | server/endpoints/point/monthly.py | meteostat/meteostat-server | fecb4acab34ce97121a7c9a16e3ca1b4ffb55b7a | [
"MIT"
] | 3 | 2021-04-11T03:28:45.000Z | 2022-02-03T19:55:56.000Z | server/endpoints/point/monthly.py | meteostat/meteostat-server | fecb4acab34ce97121a7c9a16e3ca1b4ffb55b7a | [
"MIT"
] | 1 | 2021-09-21T12:45:07.000Z | 2021-09-23T08:53:56.000Z | server/endpoints/point/monthly.py | meteostat/meteostat-server | fecb4acab34ce97121a7c9a16e3ca1b4ffb55b7a | [
"MIT"
] | null | null | null | """
Meteostat JSON API Server
The code is licensed under the MIT license.
"""
from datetime import datetime
import json
from flask import abort
from meteostat import Point, Monthly, units
from server import app, utils
"""
Meteostat configuration
"""
Point.radius = 120000
Monthly.threads = 4
Monthly.autoclean = False
"""
Endpoint configuration
"""
# Query parameters
parameters = [
('lat', float, None),
('lon', float, None),
('alt', int, None),
('start', str, None),
('end', str, None),
('model', bool, True),
('freq', str, None),
('units', str, None)
]
@app.route('/point/monthly')
def point_monthly():
"""
Return monthly point data in JSON format
"""
# Get query parameters
args = utils.get_parameters(parameters)
# Check if required parameters are set
if args['lat'] and args['lon'] and len(
args['start']) == 10 and len(args['end']) == 10:
try:
# Convert start & end date strings to datetime
start = datetime.strptime(args['start'], '%Y-%m-%d')
end = datetime.strptime(f'{args["end"]} 23:59:59', '%Y-%m-%d %H:%M:%S')
# Get number of days between start and end date
date_diff = (end - start).days
# Check date range
if date_diff < 0:
# Bad request
abort(400)
# Caching
now_diff = (datetime.now() - end).days
if now_diff < 90:
cache_time = 60 * 60 * 24 * 7
else:
cache_time = 60 * 60 * 24 * 30
Monthly.max_age = cache_time
# Create a point
location = Point(args['lat'], args['lon'], args['alt'])
# Get data
data = Monthly(location, start, end, model=args['model'])
# Check if any data
if data.count() > 0:
# Normalize data
data = data.normalize()
# Aggregate
if args['freq']:
data = data.aggregate(args['freq'])
# Unit conversion
if args['units'] == 'imperial':
data = data.convert(units.imperial)
elif args['units'] == 'scientific':
data = data.convert(units.scientific)
# Fetch DataFrame
data = data.fetch()
# Convert to integer
data['tsun'] = data['tsun'].astype('Int64')
# DateTime Index to String
data.index = data.index.strftime('%Y-%m-%d')
data.index.rename('date', inplace=True)
data = data.reset_index().to_json(orient="records")
else:
# No data
data = '[]'
# Inject meta data
meta = {}
meta['generated'] = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
meta['stations'] = location.stations.to_list()
# Generate output string
output = f'''{{"meta":{json.dumps(meta)},"data":{data}}}'''
# Return
return utils.send_response(output, cache_time)
except BaseException:
# Bad request
abort(400)
else:
# Bad request
abort(400)
| 25.128788 | 83 | 0.503166 |
from datetime import datetime
import json
from flask import abort
from meteostat import Point, Monthly, units
from server import app, utils
Point.radius = 120000
Monthly.threads = 4
Monthly.autoclean = False
parameters = [
('lat', float, None),
('lon', float, None),
('alt', int, None),
('start', str, None),
('end', str, None),
('model', bool, True),
('freq', str, None),
('units', str, None)
]
@app.route('/point/monthly')
def point_monthly():
args = utils.get_parameters(parameters)
if args['lat'] and args['lon'] and len(
args['start']) == 10 and len(args['end']) == 10:
try:
start = datetime.strptime(args['start'], '%Y-%m-%d')
end = datetime.strptime(f'{args["end"]} 23:59:59', '%Y-%m-%d %H:%M:%S')
date_diff = (end - start).days
if date_diff < 0:
abort(400)
now_diff = (datetime.now() - end).days
if now_diff < 90:
cache_time = 60 * 60 * 24 * 7
else:
cache_time = 60 * 60 * 24 * 30
Monthly.max_age = cache_time
location = Point(args['lat'], args['lon'], args['alt'])
data = Monthly(location, start, end, model=args['model'])
if data.count() > 0:
data = data.normalize()
if args['freq']:
data = data.aggregate(args['freq'])
if args['units'] == 'imperial':
data = data.convert(units.imperial)
elif args['units'] == 'scientific':
data = data.convert(units.scientific)
data = data.fetch()
data['tsun'] = data['tsun'].astype('Int64')
data.index = data.index.strftime('%Y-%m-%d')
data.index.rename('date', inplace=True)
data = data.reset_index().to_json(orient="records")
else:
data = '[]'
meta = {}
meta['generated'] = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
meta['stations'] = location.stations.to_list()
output = f'''{{"meta":{json.dumps(meta)},"data":{data}}}'''
return utils.send_response(output, cache_time)
except BaseException:
abort(400)
else:
abort(400)
| true | true |
f7fe0d11cf148f60bef28141d143ceef06d812d5 | 1,277 | py | Python | corehq/apps/cloudcare/tests/test_dbaccessors.py | rochakchauhan/commcare-hq | aa7ab3c2d0c51fe10f2b51b08101bb4b5a376236 | [
"BSD-3-Clause"
] | null | null | null | corehq/apps/cloudcare/tests/test_dbaccessors.py | rochakchauhan/commcare-hq | aa7ab3c2d0c51fe10f2b51b08101bb4b5a376236 | [
"BSD-3-Clause"
] | null | null | null | corehq/apps/cloudcare/tests/test_dbaccessors.py | rochakchauhan/commcare-hq | aa7ab3c2d0c51fe10f2b51b08101bb4b5a376236 | [
"BSD-3-Clause"
] | null | null | null | from django.test import TestCase
from corehq.apps.app_manager.signals import app_post_save
from corehq.apps.app_manager.tests.app_factory import AppFactory
from corehq.apps.cloudcare.dbaccessors import get_application_access_for_domain
from corehq.apps.cloudcare.models import ApplicationAccess
from corehq.util.context_managers import drop_connected_signals
class DBAccessorsTest(TestCase):
def test_get_application_access_for_domain(self):
application_access_objects = []
domain = 'application-access-dbaccessors'
try:
self.assertIsNone(get_application_access_for_domain(domain))
o = ApplicationAccess(domain=domain)
o.save()
application_access_objects.append(o)
self.assertEqual(
o.to_json(),
get_application_access_for_domain(domain).to_json()
)
o = ApplicationAccess(domain=domain)
o.save()
application_access_objects.append(o)
self.assertIn(
get_application_access_for_domain(domain).to_json(),
[o.to_json() for o in application_access_objects]
)
finally:
ApplicationAccess.get_db().bulk_delete(application_access_objects)
| 38.69697 | 79 | 0.689115 | from django.test import TestCase
from corehq.apps.app_manager.signals import app_post_save
from corehq.apps.app_manager.tests.app_factory import AppFactory
from corehq.apps.cloudcare.dbaccessors import get_application_access_for_domain
from corehq.apps.cloudcare.models import ApplicationAccess
from corehq.util.context_managers import drop_connected_signals
class DBAccessorsTest(TestCase):
def test_get_application_access_for_domain(self):
application_access_objects = []
domain = 'application-access-dbaccessors'
try:
self.assertIsNone(get_application_access_for_domain(domain))
o = ApplicationAccess(domain=domain)
o.save()
application_access_objects.append(o)
self.assertEqual(
o.to_json(),
get_application_access_for_domain(domain).to_json()
)
o = ApplicationAccess(domain=domain)
o.save()
application_access_objects.append(o)
self.assertIn(
get_application_access_for_domain(domain).to_json(),
[o.to_json() for o in application_access_objects]
)
finally:
ApplicationAccess.get_db().bulk_delete(application_access_objects)
| true | true |
f7fe0d39d149cc56dd47b3fe842830d2925e98d8 | 13,567 | py | Python | numpy_mpi_numexpr/cfd_mpi_ne.py | JiahuaZhao/HPC-Python-CFD | 4fe4db053566603232bf16bdd06f8207cdadde0a | [
"MIT"
] | null | null | null | numpy_mpi_numexpr/cfd_mpi_ne.py | JiahuaZhao/HPC-Python-CFD | 4fe4db053566603232bf16bdd06f8207cdadde0a | [
"MIT"
] | null | null | null | numpy_mpi_numexpr/cfd_mpi_ne.py | JiahuaZhao/HPC-Python-CFD | 4fe4db053566603232bf16bdd06f8207cdadde0a | [
"MIT"
] | null | null | null |
#!/usr/bin/env python
#
# CFD Calculation with MPI4PY
# ===============
#
# Simulation of inviscid flow in a 2D box using the Jacobi algorithm.
#
# Python version - uses numpy and loops
#
# Alejandro Dinkelberg
#
import os
import sys
#import mkl
import time
import mpi4py.MPI as MPI
# Import numpy
import numpy as np
import numexpr as ne
from copy import deepcopy
os.environ['NUMEXPR_MAX_THREADS'] = '128'
ne.set_num_threads(2)
#mkl.set_num_threads(128)
#ne.set_vml_num_threads(128)
#ne.set_vml_accuracy_mode('fast')
##################################################################################################################################################################
# boundary and haloSWAP
def boundarypsi(psi, m, n, b, h, w, comm):
# initialize the std values MPI
rank = comm.Get_rank()
size = comm.Get_size()
istart = m*rank + 1
istop = istart + m - 1
# BCs on bottom edge
for i in range(b+1, b+w):
if i >= istart and i <= istop:
psi[i-istart+1][0] = i-b
for i in range(b+w, m*size+1):
if i >= istart and i <= istop:
psi[i-istart+1][0] = w
# BCS on RHS
if rank == size-1:
for j in range(1, h+1):
psi[m+1][j] = w
for j in range(h+1, h+w):
psi[m+1][j]= w-j+h
def boundaryzet(zet, psi, m, n, comm):
# initialize the std values MPI
rank = comm.Get_rank()
size = comm.Get_size()
istart = m*rank + 1
istop = istart + m - 1
# set top/bottom BCs:
zet[1:m+1, 0] = 2 * (psi[1:m+1, 1] - psi[1:m+1, 0])
zet[1:m+1, n+1] = 2 * (psi[1:m+1, n] - psi[1:m+1, n+1])
# Set left BCs
if 0 == rank:
zet[0, 1:n+1] = 2 * (psi[1, 1:n+1] - psi[0, 1:n+1])
# Set right BCs
if size-1 == rank:
zet[m+1, 1:n+1] = 2 * (psi[m, 1:n+1] - psi[m+1, 1:n+1])
return zet
def haloSWAP(x, lm, n, comm):
tag = 1
status = MPI.Status()
rank = comm.Get_rank()
size = comm.Get_size()
# no need to halo swap if serial:
if size > 1:
# send right boundaries and receive left ones
if rank == 0:
comm.Send(x[lm][1:n+1], rank+1, tag)
elif rank == size-1:
comm.Recv(x[0][1:n+1], rank-1, tag, status)
else:
comm.Sendrecv(x[lm][1:n+1], rank+1, tag, x[0][1:n+1], rank-1, tag, status)
# send left boundary and receive right
if rank == 0:
comm.Recv(x[lm+1][1:n+1], rank+1, tag, status)
elif rank == size-1:
comm.Send(x[1][1:n+1], rank-1, tag)
else:
comm.Sendrecv(x[1][1:n+1], rank-1, tag, x[lm+1][1:n+1], rank+1, tag, status)
##################################################################################################################################################################
# util.py
def write_data(lm, n, scale, psi, velfile, colfile, comm):
# mpi essentials
m = lm
rank = comm.Get_rank()
size = comm.Get_size()
# calculate velocities and hue2rgd
vel = np.zeros((m,n, 2))
rgb = np.zeros((m,n,3), dtype='i')
print(psi)
for i in range(0, m-1):
for j in range(0, n-1):
vel[i][j][0] = (psi[i+1][j+2]-psi[i+1][j])/2.0
vel[i][j][1] = -(psi[i+2][j+1]-psi[i][j+1])/2.0
v1 = vel[i][j][0]
v2 = vel[i][j][1]
hue = (v1*v1 + v2*v2)**0.4 # modvsq**0.4
rgb[i][j] = hue2rgb(hue)
if 0 == rank:
# Open the specified files
velout = open(velfile, "w")
#velout.write("{0} {1}\n".format(m/scale, n/scale))
colout = open(colfile, "w")
#colout.write("{0} {1}\n".format(m, n))
for irank in range(0, size):
if 0 == rank:
comm.Recv(rgb[0][0][0:3*m*n], source=irank, tag=1, status=MPI.Status())
comm.Recv(vel[0][0][0:2*m*n], source=irank, tag=1, status=MPI.Status())
for irank in range(0, m):
ix = irank*m+i+1
for j in range(0, n):
iy = j+1
colout.write(f'{ix} {iy} {rgb[i][j][0]:d} {rgb[i][j][1]:d} {rgb[i][j][2]:d}\n')
#print(((ix-1)%scale, int((scale-1)/2), (iy-1)%scale, int((scale-1)/2)))
scale_int = int((scale-1)/2)
if ((ix-1)%scale == scale_int) and (iy-1)%scale == scale_int:
velout.write(f'{ix} {iy} {vel[i][j][0]} {vel[i][j][1]}\n')
velout.close()
colout.close()
else:
comm.Send(rgb[0][0][0:3*m*n], dest=0, tag=1)
comm.Send(vel[0][0][0:2*m*n], dest=0, tag=1)
def writeplotfile(m, n, scale):
"""
Writing the plt-file to make the gnuplot
"""
print('scalefactor', scale)
with open('cfd.plt', 'w') as f:
f.write('set size square\nset key off'
'\nunset xtics\nunset ytics\n'
)
f.write(f'set xrange[{1-scale}:{m+scale}]\nset yrange[{1-scale}:{n+scale}]\n')
f.write(f"plot \"colourmap.dat\" w rgbimage, \"velocity.dat\" u 1:2:({scale}*0.75*$3/sqrt($3**2+$4**2)):({scale}*0.75*$4/sqrt($3**2+$4**2)) with vectors lc rgb \"#7F7F7F\"")
print("\nWritten gnuplot script 'cfd.plt'\n");
def hue2rgb(hue):
rgbmax = 255
r = int(rgbmax*colfunc(hue-1.0))
g = int(rgbmax*colfunc(hue-0.5))
b = int(rgbmax*colfunc(hue))
return int(r), int(g), int(b)
def colfunc(x):
x1=0.2
x2=0.5
absx=abs(x)
if absx > x2:
return 0.0
elif absx < x1:
return 1.0
else:
return 1.0-((absx-x1)/(x2-x1))**2
############################################################################################################################################
# jacobi.py
def jacobistep(psi, m, n):
"""
Generates one step of the jacobi function for the whole grid
"""
#return 0.25 * (psi[0:m, 1:n+1]+psi[2:m+2, 1:n+1]+psi[1:m+1, 0:n] + psi[1:m+1, 2:n+2])
return ne.evaluate("0.25 * (a + b + c + d)", {'a':psi[0:m, 1:n+1],'b':psi[2:m+2, 1:n+1],'c':psi[1:m+1, 0:n],'d':psi[1:m+1, 2:n+2]})
def jacobistepvort(zet, psi, m, n, re):
#print(np.sum(zet), np.sum(psi))
#psinew = 0.25 * (psi[0:m, 1:n+1]+psi[2:m+2, 1:n+1]+psi[1:m+1, 0:n] + psi[1:m+1, 2:n+2] - zet[1:m+1, 1:n+1])
psinew = ne.evaluate("0.25 * (a + b + c + d - e)", {'a':psi[0:m, 1:n+1],'b':psi[2:m+2, 1:n+1],'c':psi[1:m+1, 0:n],'d':psi[1:m+1, 2:n+2],'e':zet[1:m+1, 1:n+1]})
#zetnew = - re/16.0 * ((psi[1:m+1, 2:n+2]-psi[1:m+1, 0:n])*(zet[2:m+2, 1:n+1]-zet[0:m, 1:n+1]) - (psi[2:m+2, 1:n+1]-psi[0:m, 1:n+1])*(zet[1:m+1, 2:n+2]-zet[1:m+1, 0:n])) + (0.25*(zet[0:m, 1:n+1]+zet[2:m+2, 1:n+1]+zet[1:m+1, 0:n]+zet[1:m+1, 2:n+2]))
zetnew = ne.evaluate("- re / 16.0 * ((d - c) * (f - g) - (b - a) * (h - i)) + (0.25 * (f + g + h + i))", {'re':re,'a':psi[0:m, 1:n+1],'b':psi[2:m+2, 1:n+1],'c':psi[1:m+1, 0:n],'d':psi[1:m+1, 2:n+2],'f':zet[2:m+2, 1:n+1],'g':zet[0:m, 1:n+1],'h':zet[1:m+1, 2:n+2],'i':zet[1:m+1, 0:n]})
return psinew, zetnew
def deltasq(psi_os_zet_temp, oldarr, m, n):
dsq = np.sum(np.power(psi_os_zet_temp - oldarr[1: m+1, 1:n+1], 2))
return float(dsq)
##################################################################MAIN#################################################
# cfd_numpy.py MPI4PY MAIN-file
def main(argv):
# Test we have the correct number of arguments
if len(argv) < 2:
sys.stdout.write("Usage: cfd.py <scalefactor> <iterations> [reynolds]\n")
sys.exit(1)
# Get the systen parameters from the arguments
scalefactor = int(argv[0])
niter = int(argv[1])
# print interval
printfreq = 1000
# Set the minimum size parameters
bbase = 10
hbase = 15
wbase = 5
mbase = 32
nbase = 32
# Set the parameters for boundary conditions
b = bbase * scalefactor
h = hbase * scalefactor
w = wbase * scalefactor
# Set the dimensions of the array
m = mbase * scalefactor
n = nbase * scalefactor
# checkreynolds
checkerr = 0
# //tolerance for convergence. <=0 means do not check
tolerance = 0
#parallelisation parameters
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
size = comm.Get_size()
# check command line and add reynolds
if len(argv) == 3:
re = float(argv[2])
irrotational = 0
if 0 == rank:
print(f"Reynolds number = {re}")
else:
re = -1
irrotational = 1
if 0 == rank:
print("Irrotational flow\n")
# irrotational?
if not irrotational:
zet = np.zeros((m + 2, n + 2))
if rank == 0:
sys.stdout.write("\n2D CFD Simulation\n")
sys.stdout.write("=================\n")
sys.stdout.write("Scale factor = {0}\n".format(scalefactor))
sys.stdout.write("Iterations = {0}\n".format(niter))
# //calculate local size
lm = int(m/size)
# bnorm
bnorm = np.array([0.0])
# consistency check
if size*lm != m:
if 0 == rank:
print(f'Error: {m} dies not divide into {size} processes')
comm.MPI_Finalize()
if 0 == rank:
print(f'Running CFD on {m}x{n} grid using {size} processes')
# Write the simulation details
sys.stdout.write("\nGrid size = {0} x {1}\n".format(m, n))
# didn't need it
#print('before', scalefactor, niter, re, irrotational)
#broadcast runtime params to other processors
#comm.bcast(scalefactor, root=0) # MPI_Bcast(&scalefactor,1,MPI_INT,0,comm);
#comm.bcast(niter, root=0) # MPI_Bcast(&numiter,1,MPI_INT,0,comm);
#comm.bcast(re, root=0) # MPI_Bcast(&re,1,MPI_DOUBLE,0,comm);
#comm.bcast(irrotational, root=0) # MPI_Bcast(&irrotational,1,MPI_INT,0,comm);
#print('after bcast', scalefactor, niter, re, irrotational)
# reynolds number
re = re / scalefactor
# //do we stop because of tolerance?
if tolerance > 0:
checkerr = 1
# Define the psi array of dimension [m+2][n+2] and set it to zero
psi = np.zeros((lm + 2, n + 2))
# Set the psi boundary conditions
boundarypsi(psi, lm, n, b, h, w, comm)
# compute normalisation factor for error
localbnorm = 0
# better than double for-loop:
localbnorm += np.sum(psi * psi) # this is not working, just keep for the moment the iterative version
# boundary swap of psi
haloSWAP(psi, lm, n, comm)
if not irrotational:
# update zeta BCs that depends on psi
boundaryzet(zet, psi, lm, n, comm)
# update normalisation
localbnorm += np.sum(zet * zet)
# boundary swap of psi
haloSWAP(zet, lm, n, comm)
comm.Allreduce(sendbuf=localbnorm, recvbuf=bnorm, op=MPI.SUM)
bnorm = np.sqrt(bnorm)
# Call the Jacobi iterative loop (and calculate timings)
if 0 == rank:
sys.stdout.write("\nStarting main Jacobi loop ...\n\n")
#barrier for accurate timing - not needed for correctness
comm.Barrier()
tstart = MPI.Wtime()
# -------------------
for iter in range(1, niter + 1):
# //calculate psi for next iteration
if irrotational:
psitmp = jacobistep(psi, lm, n)
else:
psitmp, zettmp = jacobistepvort(zet, psi, lm, n, re)
# //calculate current error if required
if checkerr or iter == niter:
localerror = deltasq(psitmp, psi, lm, n)
if not irrotational:
localerror += deltasq(zettmp, zet, lm, n)
# only rank 0 has the "error" variable!
error = comm.reduce(localerror, op=MPI.SUM)
if 0 == rank:
error = np.sqrt(error) / bnorm
# //copy back but not all!!
psi[1:lm+1, 1:n+1] = psitmp
if not irrotational:
# //copy back but not all!!
zet[1:lm+1, 1:n+1] = zettmp
# do a boundary swap
haloSWAP(psi, lm, n, comm)
if not irrotational:
haloSWAP(zet, lm, n, comm)
# update zeta BCs that depend on psi
boundaryzet(zet, psi, lm, n, comm)
# //quit early if we have reached required tolerance
if 0 == rank and checkerr and error < tolerance:
print(f"Converged on iteration {iter}")
break
# //print loop information
if (iter % printfreq == 0) and 0 == rank:
if not checkerr:
print(f"Completed iteration {iter}")
else:
print(f"Completed iteration {iter}, error = {error}\n")
if iter > niter:
iter = niter
# -------------------
#barrier for accurate timing - not needed for correctness
comm.Barrier()
tend = MPI.Wtime()
ttot = tend - tstart
titer = ttot / niter
# print out some stats
if 0 == rank:
print("\n... finished\n")
print(f"After {iter} iterations, the error is {error}\n")
print(f"Time for {iter} iterations was {ttot} seconds\n")
print(f"Each iteration took {titer} seconds\n")
# Write the output files for subsequent visualisation
#write_data(m, n, scalefactor, psi, "velocity.dat", "colourmap.dat", comm)
# generate gnuplot file
# Finish nicely
if 0 == rank:
# writeplotfile(m, n, scalefactor)
sys.exit(0)
MPI.Finalize()
##############################################################
# Function to create tidy way to have main method
if __name__ == "__main__":
main(sys.argv[1:])
##############################################################
| 31.477958 | 287 | 0.512051 |
import os
import sys
import time
import mpi4py.MPI as MPI
import numpy as np
import numexpr as ne
from copy import deepcopy
os.environ['NUMEXPR_MAX_THREADS'] = '128'
ne.set_num_threads(2)
| true | true |
f7fe0eea8b5f1048c07a7e1fe46e1c3a425d7750 | 599 | py | Python | xjson/plugins/plugin_yaml.py | mikegribov/filedjson | ee9f8408edcf8a72b8ed415237789a602ee6b579 | [
"MIT"
] | null | null | null | xjson/plugins/plugin_yaml.py | mikegribov/filedjson | ee9f8408edcf8a72b8ed415237789a602ee6b579 | [
"MIT"
] | null | null | null | xjson/plugins/plugin_yaml.py | mikegribov/filedjson | ee9f8408edcf8a72b8ed415237789a602ee6b579 | [
"MIT"
] | null | null | null |
from .base_file import BaseFilePlugin
from ..xnodes import create_xnode, XNode, XDict, XFileError
import yaml
class PluginYaml(BaseFilePlugin):
def def_extensions(self) -> set:
return {'yaml'}
def load(self, content) -> XNode:
if content.strip() == '':
result = XDict(_file=self.file)
else:
try:
result = create_xnode(None, yaml.safe_load(content), _file=self.file)
except yaml.parser.ParserError as ex:
result = XFileError(name=ex, _file=self.file)
return result
| 27.227273 | 86 | 0.592654 |
from .base_file import BaseFilePlugin
from ..xnodes import create_xnode, XNode, XDict, XFileError
import yaml
class PluginYaml(BaseFilePlugin):
def def_extensions(self) -> set:
return {'yaml'}
def load(self, content) -> XNode:
if content.strip() == '':
result = XDict(_file=self.file)
else:
try:
result = create_xnode(None, yaml.safe_load(content), _file=self.file)
except yaml.parser.ParserError as ex:
result = XFileError(name=ex, _file=self.file)
return result
| true | true |
f7fe0ff52b81c937de605d73a5c7587aa5e2b7d3 | 12,549 | py | Python | sublime/Packages/Minify/Minify.py | thrvrs/dotfiles | 362c74187b569b6c962392688b94f458a167722d | [
"MIT"
] | null | null | null | sublime/Packages/Minify/Minify.py | thrvrs/dotfiles | 362c74187b569b6c962392688b94f458a167722d | [
"MIT"
] | null | null | null | sublime/Packages/Minify/Minify.py | thrvrs/dotfiles | 362c74187b569b6c962392688b94f458a167722d | [
"MIT"
] | null | null | null | import sublime, sublime_plugin, re, os, subprocess, platform, ntpath, shlex
PLUGIN_DIR = os.getcwd() if int(sublime.version()) < 3000 else os.path.dirname(__file__)
SUBL_ASYNC = callable(getattr(sublime, 'set_timeout_async', None))
USE_SHELL = sublime.platform() == 'windows'
POPEN_ENV = ({'PATH': ':'.join(['/usr/local/bin', os.environ['PATH']])}) if sublime.platform() == 'osx' and os.path.isdir('/usr/local/bin') else None
if sublime.load_settings('Minify.sublime-settings').get('debug_mode'):
print('Minify: Sublime Platform:' + str(sublime.platform()))
print('Minify: Sublime Version:' + str(sublime.version()))
print('Minify: Python Version:' + str(platform.python_version()))
print('Minify: PLUGIN_DIR:' + str(PLUGIN_DIR))
print('Minify: SUBL_ASYNC:' + str(SUBL_ASYNC))
print('Minify: USE_SHELL:' + str(USE_SHELL))
print('Minify: POPEN_ENV:' + str(POPEN_ENV))
class MinifyUtils():
def fixStr(self, s):
return s.encode('utf8') if (type(s).__name__ == 'unicode') else s
def quoteChrs(self, s):
return s.replace("(", "^^(").replace(")", "^^)") if USE_SHELL else s
def runProgram(self, cmd, cwd = False):
if '>' in cmd:
p = subprocess.Popen(cmd, stderr=subprocess.PIPE, shell=USE_SHELL, env=POPEN_ENV)
output = p.communicate()[1]
else:
if cwd:
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=USE_SHELL, env=POPEN_ENV, cwd=cwd)
else:
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=USE_SHELL, env=POPEN_ENV)
output = p.communicate()[0]
return p.returncode, output
def get_setting(self, key):
settings = self.view.settings().get('Minify')
if settings is None or settings.get(key) is None:
settings = sublime.load_settings('Minify.sublime-settings')
return settings.get(key)
if not SUBL_ASYNC:
import threading
class RunCmdInOtherThread(MinifyUtils, threading.Thread):
def __init__(self, cmd, cwd = False):
self.cmd = cmd
self.retCode = 1
self.output = ''
self.cwd = cwd;
threading.Thread.__init__(self)
def run(self):
if not SUBL_ASYNC and self.cwd:
old_cwd = os.getcwd()
os.chdir(self.cwd)
self.retCode, self.output = self.runProgram(self.cmd)
if not SUBL_ASYNC and self.cwd:
os.chdir(old_cwd)
class ThreadHandling(MinifyUtils):
def handle_result(self, cmd, outfile, retCode, output):
if retCode:
if output:
sublime.error_message(' '.join(cmd) + '\r\n\r\n' + output.decode('utf-8'))
else:
if self.get_setting('open_file'):
sublime.active_window().open_file(outfile)
def handle_thread(self, thread, outfile):
if thread.is_alive():
sublime.set_timeout(lambda: self.handle_thread(thread, outfile), 100)
else:
self.handle_result(thread.cmd, outfile, thread.retCode, thread.output)
def run_cmd(self, cmd, outfile, cwd=False):
if self.get_setting('debug_mode'):
print('Minify: Output file:' + str(outfile))
print('Minify: Command:' + str(cmd))
if SUBL_ASYNC:
retCode, output = self.runProgram(cmd, cwd)
self.handle_result(cmd, outfile, retCode, output)
else:
thread = RunCmdInOtherThread(cmd, cwd)
thread.start()
sublime.set_timeout(lambda: self.handle_thread(thread, outfile), 100)
class PluginBase(ThreadHandling):
def is_enabled(self):
filename = self.view.file_name()
return bool(type(filename).__name__ in ('str', 'unicode') and ((re.search(r'\.(?:css|js|json|html?|svg)$', filename)) or (re.search(r'(\.[^\.]+)$', filename) and re.search(r'/(?:CSS|JavaScript|JSON|HTML)\.tmLanguage$', self.view.settings().get('syntax')))))
def run(self, edit):
if SUBL_ASYNC:
sublime.set_timeout_async(lambda: self.do_action(), 0)
else:
self.do_action()
class MinifyClass(MinifyUtils):
def minify(self):
inpfile = self.view.file_name()
cwd = False
if type(inpfile).__name__ in ('str', 'unicode') and re.search(r'\.[^\.]+$', inpfile):
if self.view.is_dirty() and self.get_setting('save_first'):
self.view.run_command('save')
if self.get_setting('auto_minify_on_save'):
return
outfile = re.sub(r'(\.[^\.]+)$', r'.min\1', inpfile, 1)
syntax = self.view.settings().get('syntax')
if self.get_setting('debug_mode'):
print('Minify: Syntax: ' + str(syntax))
if re.search(r'\.js$', inpfile) or re.search(r'/JavaScript\.tmLanguage$', syntax):
cmd = self.fixStr(self.get_setting('uglifyjs_command') or 'uglifyjs').split()
cmd.extend([self.quoteChrs(inpfile), '-o', self.quoteChrs(outfile), '-m', '-c'])
eo = self.get_setting('uglifyjs_options')
if type(eo).__name__ in ('str', 'unicode'):
cmd.extend(self.fixStr(eo).split())
if self.get_setting('source_map'):
directory, rfile = ntpath.split(outfile)
mapfile = rfile or ntpath.basename(directory)
content = ''
if self.get_setting('js_map_content'):
content = ',content="' + (self.quoteChrs(inpfile + '.map') if os.path.isfile(inpfile + '.map') else 'inline') + '"'
cmd.extend(['--source-map', "url='" + self.quoteChrs(mapfile) + ".map'" + content + ",root='',base='" + self.quoteChrs(directory) + "'"])
if self.get_setting('keep_comments'):
cmd.extend(['--comments'])
eo = self.get_setting('comments_to_keep')
if type(eo).__name__ in ('str', 'unicode'):
cmd.extend([eo])
elif re.search(r'\.json$', inpfile) or re.search(r'/JSON\.tmLanguage$', syntax):
cmd = self.fixStr(self.get_setting('minjson_command') or 'minjson').split()
cmd.extend([self.quoteChrs(inpfile), '-o', self.quoteChrs(outfile)])
elif re.search(r'\.css$', inpfile) or re.search(r'/CSS\.tmLanguage$', syntax):
minifier = self.get_setting('cssminifier') or 'clean-css'
if minifier == 'uglifycss':
cmd = self.fixStr(self.get_setting('uglifycss_command') or 'uglifycss').split()
eo = self.get_setting('uglifycss_options')
if type(eo).__name__ in ('str', 'unicode'):
cmd.extend(self.fixStr(eo).split())
cmd.extend([self.quoteChrs(inpfile), '>', self.quoteChrs(outfile)])
elif minifier == 'yui':
cmd = self.fixStr(self.get_setting('java_command') or 'java').split()
yui_compressor = self.get_setting('yui_compressor') or 'yuicompressor-2.4.7.jar'
cmd.extend(['-jar', PLUGIN_DIR + '/bin/' + str(yui_compressor), self.quoteChrs(inpfile), '-o', self.quoteChrs(outfile)])
eo = self.get_setting('yui_charset')
if type(eo).__name__ in ('str', 'unicode'):
cmd.extend(['--charset', eo])
eo = self.get_setting('yui_line_break')
if type(eo).__name__ in ('int', 'str', 'unicode'):
cmd.extend(['--line-break', str(eo)])
else:
cmd = self.fixStr(self.get_setting('cleancss_command') or 'cleancss').split()
eo = self.get_setting('cleancss_options') or '-O2 --skip-rebase'
if type(eo).__name__ in ('str', 'unicode'):
cmd.extend(self.fixStr(eo).split())
if self.get_setting('css_source_map'):
cmd.extend(['--source-map'])
cwd = os.path.dirname(outfile)
cmd.extend(['-o', self.quoteChrs(outfile), self.quoteChrs(inpfile)])
elif re.search(r'\.html?$', inpfile) or re.search(r'/HTML\.tmLanguage$', syntax):
cmd = self.fixStr(self.get_setting('html-minifier_command') or 'html-minifier').split()
eo = self.get_setting('html-minifier_options') or '--collapse-boolean-attributes --collapse-whitespace --html5 --minify-css --minify-js --preserve-line-breaks --process-conditional-comments --remove-comments --remove-empty-attributes --remove-redundant-attributes --remove-script-type-attributes --remove-style-link-type-attributes'
if type(eo).__name__ in ('str', 'unicode'):
cmd.extend(self.fixStr(eo).split())
cmd.extend(['-o', self.quoteChrs(outfile), self.quoteChrs(inpfile)])
elif re.search(r'\.svg$', inpfile):
cmd = self.fixStr(self.get_setting('svgo_command') or 'svgo').split()
eo = self.get_setting('svgo_min_options')
if type(eo).__name__ in ('str', 'unicode'):
cmd.extend(self.fixStr(eo).split())
cmd.extend(['-i', self.quoteChrs(inpfile), '-o', self.quoteChrs(outfile)])
else:
cmd = False
if cmd:
print('Minify: Minifying file:' + str(inpfile))
self.run_cmd(cmd, outfile, cwd)
class BeautifyClass(MinifyUtils):
def beautify(self):
inpfile = self.view.file_name()
if type(inpfile).__name__ in ('str', 'unicode') and re.search(r'\.[^\.]+$', inpfile):
if self.view.is_dirty() and self.get_setting('save_first'):
self.view.run_command('save')
outfile = re.sub(r'(?:\.min)?(\.[^\.]+)$', r'.beautified\1', inpfile, 1)
syntax = self.view.settings().get('syntax')
if re.search(r'\.js$', inpfile) or re.search(r'/JavaScript\.tmLanguage$', syntax):
cmd = self.fixStr(self.get_setting('uglifyjs_command') or 'uglifyjs').split()
cmd.extend([self.quoteChrs(inpfile), '-o', self.quoteChrs(outfile), '--comments', 'all', '-b'])
eo = self.get_setting('uglifyjs_pretty_options')
if type(eo).__name__ in ('str', 'unicode'):
cmd.extend(self.fixStr(eo).split())
elif re.search(r'\.json$', inpfile) or re.search(r'/JSON\.tmLanguage$', syntax):
cmd = self.fixStr(self.get_setting('minjson_command') or 'minjson').split()
cmd.extend([self.quoteChrs(inpfile), '-o', self.quoteChrs(outfile), '-b'])
elif re.search(r'\.css$', inpfile) or re.search(r'/CSS\.tmLanguage$', syntax):
cmd = self.fixStr(self.get_setting('js-beautify_command') or 'js-beautify').split()
eo = self.get_setting('js-beautify_options')
if type(eo).__name__ in ('str', 'unicode'):
cmd.extend(self.fixStr(eo).split())
cmd.extend(['--css', '-o', self.quoteChrs(outfile), self.quoteChrs(inpfile)])
elif re.search(r'\.html?$', inpfile) or re.search(r'/HTML\.tmLanguage$', syntax):
outfile = re.sub(r'(?:\.min)?(\.[^\.]+)$', r'.pretty\1', inpfile, 1)
cmd = self.fixStr(self.get_setting('js-beautify_command') or 'js-beautify').split()
eo = self.get_setting('js-beautify_html_options')
if type(eo).__name__ in ('str', 'unicode'):
cmd.extend(self.fixStr(eo).split())
cmd.extend(['--html', '-o', self.quoteChrs(outfile), self.quoteChrs(inpfile)])
elif re.search(r'\.svg$', inpfile):
outfile = re.sub(r'(?:\.min)?(\.[^\.]+)$', r'.pretty\1', inpfile, 1)
cmd = self.fixStr(self.get_setting('svgo_command') or 'svgo').split()
eo = self.get_setting('svgo_pretty_options')
if type(eo).__name__ in ('str', 'unicode'):
cmd.extend(self.fixStr(eo).split())
cmd.extend(['--pretty', '-i', self.quoteChrs(inpfile), '-o', self.quoteChrs(outfile)])
if cmd:
print('Minify: Beautifying file:' + str(inpfile))
self.run_cmd(cmd, outfile)
class MinifyCommand(PluginBase, MinifyClass, sublime_plugin.TextCommand):
def do_action(self):
self.minify()
class BeautifyCommand(PluginBase, BeautifyClass, sublime_plugin.TextCommand):
def do_action(self):
self.beautify()
class RunAfterSave(ThreadHandling, MinifyClass, sublime_plugin.EventListener):
def on_post_save(self, view):
self.view = view
if self.get_setting('auto_minify_on_save'):
filename = self.view.file_name()
syntax = self.view.settings().get('syntax')
if type(filename).__name__ in ('str', 'unicode') and ((re.search(r'\.(?:css|js|json|html?|svg)$', filename)) or (re.search(r'(\.[^\.]+)$', filename) and re.search(r'/(?:CSS|JavaScript|JSON|HTML)\.tmLanguage$', syntax))):
searchFName = ''
searchSyntax = ''
if 'css' in self.get_setting('allowed_file_types'):
searchFName += 'css|'
searchSyntax += 'CSS|'
if 'js' in self.get_setting('allowed_file_types'):
searchFName += 'js|'
searchSyntax += 'JavaScript|'
if 'json' in self.get_setting('allowed_file_types'):
searchFName += 'json|'
searchSyntax += 'JSON|'
if 'html' in self.get_setting('allowed_file_types'):
searchFName += 'html?|'
searchSyntax += 'HTML|'
if 'svg' in self.get_setting('allowed_file_types'):
searchFName += 'svg|'
searchFNameRegEx = r'\.(?:' + searchFName.rstrip('|') + ')$'
searchSyntaxRegEx = r'/(?:' + searchSyntax.rstrip('|') + ')\.tmLanguage$'
if re.search(searchFNameRegEx, filename) or (re.search(r'(\.[^\.]+)$', filename) and re.search(searchSyntaxRegEx, syntax)):
if re.search(r'\.min\.[^\.]+$', filename):
if self.get_setting('debug_mode'):
print('Minify: Skipping file ' + filename + ' - already minified')
else:
if SUBL_ASYNC:
sublime.set_timeout_async(lambda: self.minify(), 0)
else:
self.minify()
else:
if self.get_setting('debug_mode'):
print('Minify: Skipping file ' + filename + ' - not in allowed_file_types')
| 47.534091 | 336 | 0.666587 | import sublime, sublime_plugin, re, os, subprocess, platform, ntpath, shlex
PLUGIN_DIR = os.getcwd() if int(sublime.version()) < 3000 else os.path.dirname(__file__)
SUBL_ASYNC = callable(getattr(sublime, 'set_timeout_async', None))
USE_SHELL = sublime.platform() == 'windows'
POPEN_ENV = ({'PATH': ':'.join(['/usr/local/bin', os.environ['PATH']])}) if sublime.platform() == 'osx' and os.path.isdir('/usr/local/bin') else None
if sublime.load_settings('Minify.sublime-settings').get('debug_mode'):
print('Minify: Sublime Platform:' + str(sublime.platform()))
print('Minify: Sublime Version:' + str(sublime.version()))
print('Minify: Python Version:' + str(platform.python_version()))
print('Minify: PLUGIN_DIR:' + str(PLUGIN_DIR))
print('Minify: SUBL_ASYNC:' + str(SUBL_ASYNC))
print('Minify: USE_SHELL:' + str(USE_SHELL))
print('Minify: POPEN_ENV:' + str(POPEN_ENV))
class MinifyUtils():
def fixStr(self, s):
return s.encode('utf8') if (type(s).__name__ == 'unicode') else s
def quoteChrs(self, s):
return s.replace("(", "^^(").replace(")", "^^)") if USE_SHELL else s
def runProgram(self, cmd, cwd = False):
if '>' in cmd:
p = subprocess.Popen(cmd, stderr=subprocess.PIPE, shell=USE_SHELL, env=POPEN_ENV)
output = p.communicate()[1]
else:
if cwd:
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=USE_SHELL, env=POPEN_ENV, cwd=cwd)
else:
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=USE_SHELL, env=POPEN_ENV)
output = p.communicate()[0]
return p.returncode, output
def get_setting(self, key):
settings = self.view.settings().get('Minify')
if settings is None or settings.get(key) is None:
settings = sublime.load_settings('Minify.sublime-settings')
return settings.get(key)
if not SUBL_ASYNC:
import threading
class RunCmdInOtherThread(MinifyUtils, threading.Thread):
def __init__(self, cmd, cwd = False):
self.cmd = cmd
self.retCode = 1
self.output = ''
self.cwd = cwd;
threading.Thread.__init__(self)
def run(self):
if not SUBL_ASYNC and self.cwd:
old_cwd = os.getcwd()
os.chdir(self.cwd)
self.retCode, self.output = self.runProgram(self.cmd)
if not SUBL_ASYNC and self.cwd:
os.chdir(old_cwd)
class ThreadHandling(MinifyUtils):
def handle_result(self, cmd, outfile, retCode, output):
if retCode:
if output:
sublime.error_message(' '.join(cmd) + '\r\n\r\n' + output.decode('utf-8'))
else:
if self.get_setting('open_file'):
sublime.active_window().open_file(outfile)
def handle_thread(self, thread, outfile):
if thread.is_alive():
sublime.set_timeout(lambda: self.handle_thread(thread, outfile), 100)
else:
self.handle_result(thread.cmd, outfile, thread.retCode, thread.output)
def run_cmd(self, cmd, outfile, cwd=False):
if self.get_setting('debug_mode'):
print('Minify: Output file:' + str(outfile))
print('Minify: Command:' + str(cmd))
if SUBL_ASYNC:
retCode, output = self.runProgram(cmd, cwd)
self.handle_result(cmd, outfile, retCode, output)
else:
thread = RunCmdInOtherThread(cmd, cwd)
thread.start()
sublime.set_timeout(lambda: self.handle_thread(thread, outfile), 100)
class PluginBase(ThreadHandling):
def is_enabled(self):
filename = self.view.file_name()
return bool(type(filename).__name__ in ('str', 'unicode') and ((re.search(r'\.(?:css|js|json|html?|svg)$', filename)) or (re.search(r'(\.[^\.]+)$', filename) and re.search(r'/(?:CSS|JavaScript|JSON|HTML)\.tmLanguage$', self.view.settings().get('syntax')))))
def run(self, edit):
if SUBL_ASYNC:
sublime.set_timeout_async(lambda: self.do_action(), 0)
else:
self.do_action()
class MinifyClass(MinifyUtils):
def minify(self):
inpfile = self.view.file_name()
cwd = False
if type(inpfile).__name__ in ('str', 'unicode') and re.search(r'\.[^\.]+$', inpfile):
if self.view.is_dirty() and self.get_setting('save_first'):
self.view.run_command('save')
if self.get_setting('auto_minify_on_save'):
return
outfile = re.sub(r'(\.[^\.]+)$', r'.min\1', inpfile, 1)
syntax = self.view.settings().get('syntax')
if self.get_setting('debug_mode'):
print('Minify: Syntax: ' + str(syntax))
if re.search(r'\.js$', inpfile) or re.search(r'/JavaScript\.tmLanguage$', syntax):
cmd = self.fixStr(self.get_setting('uglifyjs_command') or 'uglifyjs').split()
cmd.extend([self.quoteChrs(inpfile), '-o', self.quoteChrs(outfile), '-m', '-c'])
eo = self.get_setting('uglifyjs_options')
if type(eo).__name__ in ('str', 'unicode'):
cmd.extend(self.fixStr(eo).split())
if self.get_setting('source_map'):
directory, rfile = ntpath.split(outfile)
mapfile = rfile or ntpath.basename(directory)
content = ''
if self.get_setting('js_map_content'):
content = ',content="' + (self.quoteChrs(inpfile + '.map') if os.path.isfile(inpfile + '.map') else 'inline') + '"'
cmd.extend(['--source-map', "url='" + self.quoteChrs(mapfile) + ".map'" + content + ",root='',base='" + self.quoteChrs(directory) + "'"])
if self.get_setting('keep_comments'):
cmd.extend(['--comments'])
eo = self.get_setting('comments_to_keep')
if type(eo).__name__ in ('str', 'unicode'):
cmd.extend([eo])
elif re.search(r'\.json$', inpfile) or re.search(r'/JSON\.tmLanguage$', syntax):
cmd = self.fixStr(self.get_setting('minjson_command') or 'minjson').split()
cmd.extend([self.quoteChrs(inpfile), '-o', self.quoteChrs(outfile)])
elif re.search(r'\.css$', inpfile) or re.search(r'/CSS\.tmLanguage$', syntax):
minifier = self.get_setting('cssminifier') or 'clean-css'
if minifier == 'uglifycss':
cmd = self.fixStr(self.get_setting('uglifycss_command') or 'uglifycss').split()
eo = self.get_setting('uglifycss_options')
if type(eo).__name__ in ('str', 'unicode'):
cmd.extend(self.fixStr(eo).split())
cmd.extend([self.quoteChrs(inpfile), '>', self.quoteChrs(outfile)])
elif minifier == 'yui':
cmd = self.fixStr(self.get_setting('java_command') or 'java').split()
yui_compressor = self.get_setting('yui_compressor') or 'yuicompressor-2.4.7.jar'
cmd.extend(['-jar', PLUGIN_DIR + '/bin/' + str(yui_compressor), self.quoteChrs(inpfile), '-o', self.quoteChrs(outfile)])
eo = self.get_setting('yui_charset')
if type(eo).__name__ in ('str', 'unicode'):
cmd.extend(['--charset', eo])
eo = self.get_setting('yui_line_break')
if type(eo).__name__ in ('int', 'str', 'unicode'):
cmd.extend(['--line-break', str(eo)])
else:
cmd = self.fixStr(self.get_setting('cleancss_command') or 'cleancss').split()
eo = self.get_setting('cleancss_options') or '-O2 --skip-rebase'
if type(eo).__name__ in ('str', 'unicode'):
cmd.extend(self.fixStr(eo).split())
if self.get_setting('css_source_map'):
cmd.extend(['--source-map'])
cwd = os.path.dirname(outfile)
cmd.extend(['-o', self.quoteChrs(outfile), self.quoteChrs(inpfile)])
elif re.search(r'\.html?$', inpfile) or re.search(r'/HTML\.tmLanguage$', syntax):
cmd = self.fixStr(self.get_setting('html-minifier_command') or 'html-minifier').split()
eo = self.get_setting('html-minifier_options') or '--collapse-boolean-attributes --collapse-whitespace --html5 --minify-css --minify-js --preserve-line-breaks --process-conditional-comments --remove-comments --remove-empty-attributes --remove-redundant-attributes --remove-script-type-attributes --remove-style-link-type-attributes'
if type(eo).__name__ in ('str', 'unicode'):
cmd.extend(self.fixStr(eo).split())
cmd.extend(['-o', self.quoteChrs(outfile), self.quoteChrs(inpfile)])
elif re.search(r'\.svg$', inpfile):
cmd = self.fixStr(self.get_setting('svgo_command') or 'svgo').split()
eo = self.get_setting('svgo_min_options')
if type(eo).__name__ in ('str', 'unicode'):
cmd.extend(self.fixStr(eo).split())
cmd.extend(['-i', self.quoteChrs(inpfile), '-o', self.quoteChrs(outfile)])
else:
cmd = False
if cmd:
print('Minify: Minifying file:' + str(inpfile))
self.run_cmd(cmd, outfile, cwd)
class BeautifyClass(MinifyUtils):
def beautify(self):
inpfile = self.view.file_name()
if type(inpfile).__name__ in ('str', 'unicode') and re.search(r'\.[^\.]+$', inpfile):
if self.view.is_dirty() and self.get_setting('save_first'):
self.view.run_command('save')
outfile = re.sub(r'(?:\.min)?(\.[^\.]+)$', r'.beautified\1', inpfile, 1)
syntax = self.view.settings().get('syntax')
if re.search(r'\.js$', inpfile) or re.search(r'/JavaScript\.tmLanguage$', syntax):
cmd = self.fixStr(self.get_setting('uglifyjs_command') or 'uglifyjs').split()
cmd.extend([self.quoteChrs(inpfile), '-o', self.quoteChrs(outfile), '--comments', 'all', '-b'])
eo = self.get_setting('uglifyjs_pretty_options')
if type(eo).__name__ in ('str', 'unicode'):
cmd.extend(self.fixStr(eo).split())
elif re.search(r'\.json$', inpfile) or re.search(r'/JSON\.tmLanguage$', syntax):
cmd = self.fixStr(self.get_setting('minjson_command') or 'minjson').split()
cmd.extend([self.quoteChrs(inpfile), '-o', self.quoteChrs(outfile), '-b'])
elif re.search(r'\.css$', inpfile) or re.search(r'/CSS\.tmLanguage$', syntax):
cmd = self.fixStr(self.get_setting('js-beautify_command') or 'js-beautify').split()
eo = self.get_setting('js-beautify_options')
if type(eo).__name__ in ('str', 'unicode'):
cmd.extend(self.fixStr(eo).split())
cmd.extend(['--css', '-o', self.quoteChrs(outfile), self.quoteChrs(inpfile)])
elif re.search(r'\.html?$', inpfile) or re.search(r'/HTML\.tmLanguage$', syntax):
outfile = re.sub(r'(?:\.min)?(\.[^\.]+)$', r'.pretty\1', inpfile, 1)
cmd = self.fixStr(self.get_setting('js-beautify_command') or 'js-beautify').split()
eo = self.get_setting('js-beautify_html_options')
if type(eo).__name__ in ('str', 'unicode'):
cmd.extend(self.fixStr(eo).split())
cmd.extend(['--html', '-o', self.quoteChrs(outfile), self.quoteChrs(inpfile)])
elif re.search(r'\.svg$', inpfile):
outfile = re.sub(r'(?:\.min)?(\.[^\.]+)$', r'.pretty\1', inpfile, 1)
cmd = self.fixStr(self.get_setting('svgo_command') or 'svgo').split()
eo = self.get_setting('svgo_pretty_options')
if type(eo).__name__ in ('str', 'unicode'):
cmd.extend(self.fixStr(eo).split())
cmd.extend(['--pretty', '-i', self.quoteChrs(inpfile), '-o', self.quoteChrs(outfile)])
if cmd:
print('Minify: Beautifying file:' + str(inpfile))
self.run_cmd(cmd, outfile)
class MinifyCommand(PluginBase, MinifyClass, sublime_plugin.TextCommand):
def do_action(self):
self.minify()
class BeautifyCommand(PluginBase, BeautifyClass, sublime_plugin.TextCommand):
def do_action(self):
self.beautify()
class RunAfterSave(ThreadHandling, MinifyClass, sublime_plugin.EventListener):
def on_post_save(self, view):
self.view = view
if self.get_setting('auto_minify_on_save'):
filename = self.view.file_name()
syntax = self.view.settings().get('syntax')
if type(filename).__name__ in ('str', 'unicode') and ((re.search(r'\.(?:css|js|json|html?|svg)$', filename)) or (re.search(r'(\.[^\.]+)$', filename) and re.search(r'/(?:CSS|JavaScript|JSON|HTML)\.tmLanguage$', syntax))):
searchFName = ''
searchSyntax = ''
if 'css' in self.get_setting('allowed_file_types'):
searchFName += 'css|'
searchSyntax += 'CSS|'
if 'js' in self.get_setting('allowed_file_types'):
searchFName += 'js|'
searchSyntax += 'JavaScript|'
if 'json' in self.get_setting('allowed_file_types'):
searchFName += 'json|'
searchSyntax += 'JSON|'
if 'html' in self.get_setting('allowed_file_types'):
searchFName += 'html?|'
searchSyntax += 'HTML|'
if 'svg' in self.get_setting('allowed_file_types'):
searchFName += 'svg|'
searchFNameRegEx = r'\.(?:' + searchFName.rstrip('|') + ')$'
searchSyntaxRegEx = r'/(?:' + searchSyntax.rstrip('|') + ')\.tmLanguage$'
if re.search(searchFNameRegEx, filename) or (re.search(r'(\.[^\.]+)$', filename) and re.search(searchSyntaxRegEx, syntax)):
if re.search(r'\.min\.[^\.]+$', filename):
if self.get_setting('debug_mode'):
print('Minify: Skipping file ' + filename + ' - already minified')
else:
if SUBL_ASYNC:
sublime.set_timeout_async(lambda: self.minify(), 0)
else:
self.minify()
else:
if self.get_setting('debug_mode'):
print('Minify: Skipping file ' + filename + ' - not in allowed_file_types')
| true | true |
f7fe10d850678e29dc62fae9992a5d8940eba91a | 2,046 | py | Python | examples/demo2.py | rmaiko/pyvsim | 18d51d8fc3678ffcb08fd0939dc72c1a8834327d | [
"Apache-2.0"
] | null | null | null | examples/demo2.py | rmaiko/pyvsim | 18d51d8fc3678ffcb08fd0939dc72c1a8834327d | [
"Apache-2.0"
] | null | null | null | examples/demo2.py | rmaiko/pyvsim | 18d51d8fc3678ffcb08fd0939dc72c1a8834327d | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
"""
PyVSim part2.1
Copyright 2013 Ricardo Entz
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
if __name__ == '__main__':
import sys
sys.path.append("../")
import numpy as np
from pyvsim import *
"""
This demo shows a simple render of a famous image, but with
physically correct angles
"""
vol = Primitives.Volume()
vol.points = np.array([[0 ,0,0],
[1 ,0,0],
[0.5 ,0.866,0],
[1e-6,0,0],
[0 ,0,0.1],
[1 ,0,0.1],
[0.5 ,0.866,0.1],
[1e-6,0,0.1]])
vol.surfaceProperty = vol.TRANSPARENT
sellmeierCoeffs = np.array([[1.03961212, 0.00600069867],
[0.23179234, 0.02001791440],
[70.01046945, 103.560653000]])
vol.material = Library.Glass(sellmeierCoeffs)
vol.material.name = "The dark side of the moon glass"
r = Primitives.RayBundle()
n = 200
v = Utils.normalize(np.array([0.5,0.17,0]))
p = np.array([-0.5,0.1,0.05])
v = np.tile(v,(n,1))
w = np.linspace(380e-9, 780e-9, n) #all the visible spectrum
r.append(v, p, w)
a = Primitives.Assembly()
a.append(vol)
a.append(r)
r.maximumRayTrace = 2
r.trace()
System.save(obj = a, filename = "./test.dat", mode = "json")
dec = System.load(filename = "./test.dat")
System.plot(dec,displayAxes=False) | 32.47619 | 72 | 0.56696 |
if __name__ == '__main__':
import sys
sys.path.append("../")
import numpy as np
from pyvsim import *
vol = Primitives.Volume()
vol.points = np.array([[0 ,0,0],
[1 ,0,0],
[0.5 ,0.866,0],
[1e-6,0,0],
[0 ,0,0.1],
[1 ,0,0.1],
[0.5 ,0.866,0.1],
[1e-6,0,0.1]])
vol.surfaceProperty = vol.TRANSPARENT
sellmeierCoeffs = np.array([[1.03961212, 0.00600069867],
[0.23179234, 0.02001791440],
[70.01046945, 103.560653000]])
vol.material = Library.Glass(sellmeierCoeffs)
vol.material.name = "The dark side of the moon glass"
r = Primitives.RayBundle()
n = 200
v = Utils.normalize(np.array([0.5,0.17,0]))
p = np.array([-0.5,0.1,0.05])
v = np.tile(v,(n,1))
w = np.linspace(380e-9, 780e-9, n)
r.append(v, p, w)
a = Primitives.Assembly()
a.append(vol)
a.append(r)
r.maximumRayTrace = 2
r.trace()
System.save(obj = a, filename = "./test.dat", mode = "json")
dec = System.load(filename = "./test.dat")
System.plot(dec,displayAxes=False) | true | true |
f7fe120f3a4d41a00ea89f6c4175ee40b1ec2cfe | 1,346 | py | Python | files_to_dataframe/ftd/manipulators/collection.py | LilianBoulard/utils | 84acd0f24afc976a92aa422bfd8ec5c725800b98 | [
"MIT"
] | 2 | 2020-12-15T11:54:58.000Z | 2021-01-21T17:34:06.000Z | files_to_dataframe/ftd/manipulators/collection.py | Phaide/utils | 84acd0f24afc976a92aa422bfd8ec5c725800b98 | [
"MIT"
] | null | null | null | files_to_dataframe/ftd/manipulators/collection.py | Phaide/utils | 84acd0f24afc976a92aa422bfd8ec5c725800b98 | [
"MIT"
] | null | null | null | from .base import BaseManipulator
from . import ByUserManipulator, ByDateManipulator, ByExtensionManipulator, ByDirectoryManipulator, BySizeManipulator
class ManipulatorCollection:
"""
This class is used to instantiate all the manipulators
"""
def __init__(self, **kwargs):
# Forbid passing "parent"
if 'parent' in kwargs.keys():
raise RuntimeError('Got unexpected argument "parent" in keyword arguments')
self.user_manipulator = ByUserManipulator(parent=self, **kwargs)
self.date_manipulator = ByDateManipulator(parent=self, **kwargs)
self.ext_manipulator = ByExtensionManipulator(parent=self, **kwargs)
self.dir_manipulator = ByDirectoryManipulator(parent=self, **kwargs)
self.size_manipulator = BySizeManipulator(parent=self, **kwargs)
self.all_manipulators = [
self.user_manipulator,
self.date_manipulator,
self.ext_manipulator,
self.dir_manipulator,
self.size_manipulator
]
def call_method(self, method_name: str, **kwargs):
if hasattr(BaseManipulator, method_name):
for man in self.all_manipulators:
getattr(man, method_name)(**kwargs)
else:
raise RuntimeError(f'No such function of manipulator: {method_name}')
| 36.378378 | 117 | 0.673848 | from .base import BaseManipulator
from . import ByUserManipulator, ByDateManipulator, ByExtensionManipulator, ByDirectoryManipulator, BySizeManipulator
class ManipulatorCollection:
def __init__(self, **kwargs):
if 'parent' in kwargs.keys():
raise RuntimeError('Got unexpected argument "parent" in keyword arguments')
self.user_manipulator = ByUserManipulator(parent=self, **kwargs)
self.date_manipulator = ByDateManipulator(parent=self, **kwargs)
self.ext_manipulator = ByExtensionManipulator(parent=self, **kwargs)
self.dir_manipulator = ByDirectoryManipulator(parent=self, **kwargs)
self.size_manipulator = BySizeManipulator(parent=self, **kwargs)
self.all_manipulators = [
self.user_manipulator,
self.date_manipulator,
self.ext_manipulator,
self.dir_manipulator,
self.size_manipulator
]
def call_method(self, method_name: str, **kwargs):
if hasattr(BaseManipulator, method_name):
for man in self.all_manipulators:
getattr(man, method_name)(**kwargs)
else:
raise RuntimeError(f'No such function of manipulator: {method_name}')
| true | true |
f7fe127a3088f5e9dfdd5f7eaf83eb9a34bf72e0 | 1,190 | py | Python | sts/routes.py | Cray-HPE/cray-sts | a731a54f58ccb4b57f829253e96afd0f75ca2a01 | [
"MIT"
] | null | null | null | sts/routes.py | Cray-HPE/cray-sts | a731a54f58ccb4b57f829253e96afd0f75ca2a01 | [
"MIT"
] | 2 | 2021-11-30T16:20:34.000Z | 2021-12-01T01:23:02.000Z | sts/routes.py | Cray-HPE/cray-sts | a731a54f58ccb4b57f829253e96afd0f75ca2a01 | [
"MIT"
] | null | null | null | # Copyright 2019, Cray Inc. All rights reserved.
""" These are the routes that are mapped to from connexion """
import uuid
import urllib3
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
from flask import current_app as app
from sts import client as c
def put_token():
""" PUT /token - Generate a new STS token and return in payload """
# pylint: disable=broad-except, invalid-name
try:
client = c.get_sts_client()
except Exception as e:
app.logger.error(e)
return "Error", 500
conf = app.config['RADOS_CONFIG']
response = client.assume_role(
RoleArn=conf.get('arn'),
RoleSessionName=str(uuid.uuid4()), # Confirm with Craig that this wont cause issues
DurationSeconds=3600 # Max expiration, 12 hours
)
if response.get('ResponseMetadata', {}).get('HTTPStatusCode', -1) != 200:
app.logger.error(response.get('ResponseMetadata'))
return 'Error', 500
creds = response['Credentials']
creds['EndpointURL'] = conf.get('ext_endpoint_url')
return {"Credentials": creds}, 201
def get_healthz():
""" Return health status """
return {"Status": "ok"}, 200
| 29.75 | 92 | 0.670588 |
import uuid
import urllib3
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
from flask import current_app as app
from sts import client as c
def put_token():
try:
client = c.get_sts_client()
except Exception as e:
app.logger.error(e)
return "Error", 500
conf = app.config['RADOS_CONFIG']
response = client.assume_role(
RoleArn=conf.get('arn'),
RoleSessionName=str(uuid.uuid4()),
DurationSeconds=3600
)
if response.get('ResponseMetadata', {}).get('HTTPStatusCode', -1) != 200:
app.logger.error(response.get('ResponseMetadata'))
return 'Error', 500
creds = response['Credentials']
creds['EndpointURL'] = conf.get('ext_endpoint_url')
return {"Credentials": creds}, 201
def get_healthz():
return {"Status": "ok"}, 200
| true | true |
f7fe144a6b1475ca33c443395eda241d9632d7db | 11,015 | py | Python | django/entity/models.py | cmu-lib/authority | 9b8d5f2f0b6b5ae50ca1de4f85fde5a3aa003167 | [
"MIT"
] | null | null | null | django/entity/models.py | cmu-lib/authority | 9b8d5f2f0b6b5ae50ca1de4f85fde5a3aa003167 | [
"MIT"
] | null | null | null | django/entity/models.py | cmu-lib/authority | 9b8d5f2f0b6b5ae50ca1de4f85fde5a3aa003167 | [
"MIT"
] | null | null | null | from django.db import models
from django.contrib.postgres.fields import ArrayField
from authority import mixins, namespaces
from authority.models import CloseMatch, Authority
from rdflib import Graph, namespace
from rdflib.term import URIRef
from edtf import parse_edtf, struct_time_to_date
from collections import namedtuple
"""
Abstract models used across the rest of the application
"""
class Entity(mixins.trackedModel):
pref_label = models.CharField(
default="", blank=True, db_index=True, max_length=5000
)
@property
def viaf_match(self):
try:
return CloseMatch.objects.get(
authority__namespace=namespaces.VIAF, entity=self
).identifier
except:
return None
@property
def lcnaf_match(self):
try:
return CloseMatch.objects.get(
authority__namespace=namespaces.LOC, entity=self
).identifier
except:
return None
def __str__(self):
return self.pref_label
class Meta:
verbose_name_plural = "entities"
class Name(mixins.labeledModel, mixins.trackedModel):
name_of = models.ForeignKey(
"Entity", on_delete=models.CASCADE, related_name="alt_labels"
)
authority = models.ForeignKey(
"authority.Authority",
null=True,
blank=True,
on_delete=models.CASCADE,
help_text="Authority that uses this name",
related_name="names_used",
)
language = models.CharField(default="", blank=True, max_length=50, db_index=True)
preferred = models.BooleanField(
default=False,
db_index=True,
help_text="Is this name considered 'preferred' by the authority using it?",
)
class Meta:
unique_together = ("label", "name_of", "language", "preferred", "authority")
class Person(Entity):
birth_edtf = models.CharField(
default="",
blank=True,
max_length=1000,
verbose_name="Birth date expressed in EDTF",
)
birth_early = models.DateField(null=True, blank=True)
birth_late = models.DateField(null=True, blank=True)
death_edtf = models.CharField(
default="",
blank=True,
max_length=1000,
verbose_name="Death date expressed in EDTF",
)
death_early = models.DateField(null=True, blank=True)
death_late = models.DateField(null=True, blank=True)
class Meta:
verbose_name_plural = "people"
def process_edtf(self, d):
ProcessedEDTF = namedtuple("ProcessedEDTF", "string begin end")
edtf_date = parse_edtf(d)
start_date = struct_time_to_date(edtf_date.lower_strict())
end_date = struct_time_to_date(edtf_date.upper_strict())
return ProcessedEDTF(str(edtf_date), start_date, end_date)
def process_birth_edtf(self):
processed_birth = self.process_edtf(self.birth_edtf)
self.birth_early = processed_birth.begin
self.birth_late = processed_birth.end
def process_death_edtf(self):
processed_death = self.process_edtf(self.death_edtf)
self.death_early = processed_death.begin
self.death_late = processed_death.end
def populate_from_lcnaf_graph(self, lcnaf_graph, update_viaf=False):
"""
Given an RDF graph of LCNAF data, populate birth/death dates
"""
core_data = [
(str(s), o.value)
for s, p, o in lcnaf_graph.triples((None, namespace.SKOS.prefLabel, None))
][0]
prefLabel = core_data[1]
altLabels = []
for s, p, o in lcnaf_graph.triples((None, namespace.SKOS.altLabel, None)):
lang = o.language
if lang is None:
lang = ""
altLabels.append({"value": o.value, "lang": lang})
self.label = prefLabel
self.pref_label = prefLabel
alt_labels = [
Name(name_of=self, label=l["value"], language=l["lang"]) for l in altLabels
]
Name.objects.bulk_create(alt_labels, ignore_conflicts=True)
viaf_concept = [
str(o)
for s, p, o in lcnaf_graph.triples((None, namespace.SKOS.exactMatch, None))
if namespaces.VIAF in str(o)
]
if len(viaf_concept) > 0:
viaf_graph = Graph().parse(viaf_concept[0])
viaf_uris = [
str(o)
for s, p, o in viaf_graph.triples(
(None, URIRef(f"{namespace.FOAF}focus"), None)
)
]
if len(viaf_uris) > 0 and update_viaf:
CloseMatch.objects.get_or_create(
entity=self,
authority=Authority.objects.get(namespace=namespaces.VIAF),
identifier=viaf_uris[0],
)
self.populate_from_viaf_graph(viaf_graph)
self.save()
def populate_from_lcnaf_uri(self, update_viaf=True):
if self.lcnaf_match is None:
return
g = Graph().parse(f"{self.lcnaf_match}.skos.xml", format="xml")
self.populate_from_lcnaf_graph(lcnaf_graph=g, update_viaf=update_viaf)
def populate_from_viaf_graph(self, viaf_graph):
viaf_uri = self.viaf_match
# Collect other authority labels
pref_labels = [
{"label": o.value, "language": o.language, "subject": s}
for s, p, o in viaf_graph.triples((None, namespace.SKOS.prefLabel, None))
if str(s) != viaf_uri
]
prefNames = []
for l in pref_labels:
# Get source schema
source_schemae = [
o
for s, p, o in viaf_graph.triples(
(l["subject"], namespace.SKOS.inScheme, None)
)
]
# If there is no source schema, skip to the next name
if len(source_schemae) == 0:
continue
else:
source_schema = source_schemae[0].__str__()
authority = Authority.objects.get_or_create(
viaf_namespace=source_schema,
defaults={
"label": source_schema,
"namespace": source_schema,
},
)[0]
norm_language = l["language"] if l["language"] is not None else ""
prefNames.append(
Name(
name_of=self,
label=l["label"],
language=norm_language,
authority=authority,
preferred=True,
)
)
Name.objects.bulk_create(prefNames, ignore_conflicts=True)
# Assign person pref_label from VIAF's preferred labels, privilegeing english first if possible
viaf_preferred_labels = [
{"label": o.value, "language": o.language}
for s, p, o in viaf_graph.triples(
(URIRef(viaf_uri), namespace.SKOS.prefLabel, None)
)
]
viaf_en_label = [l for l in viaf_preferred_labels if l["language"] == "en-US"]
if len(viaf_en_label) > 0:
self.pref_label = viaf_en_label[0]["label"]
elif len(viaf_preferred_labels) > 0:
self.pref_label = viaf_preferred_labels[0]["label"]
else:
self.pref_label = pref_labels[0]["label"]
# Collect other authority labels
alt_labels = [
{"label": o.value, "language": o.language, "subject": s}
for s, p, o in viaf_graph.triples((None, namespace.SKOS.altLabel, None))
if s is not URIRef(viaf_uri)
]
altNames = []
for l in alt_labels:
# Get source schema
source_schemae = [
o
for s, p, o in viaf_graph.triples(
(l["subject"], namespace.SKOS.inScheme, None)
)
]
# If there is no source schema, skip to the next name
if len(source_schemae) == 0:
continue
else:
source_schema = source_schemae[0].__str__()
authority = Authority.objects.get_or_create(
viaf_namespace=source_schema,
defaults={"label": source_schema, "namespace": source_schema},
)[0]
norm_language = l["language"] if l["language"] is not None else ""
altNames.append(
Name(
name_of=self,
label=l["label"],
language=norm_language,
authority=authority,
preferred=False,
)
)
Name.objects.bulk_create(altNames, ignore_conflicts=True)
# Get birthdates
birth_literals = {
o.value
for s, p, o in viaf_graph.triples(
(URIRef(viaf_uri), URIRef("http://schema.org/birthDate"), None)
)
}
birth_start = None
birth_end = None
for d in birth_literals:
try:
self.birth_edtf = d
self.process_birth_edtf()
break
except Exception as e:
continue
# Get deathdates
death_literals = birth_literals = {
o.value
for s, p, o in viaf_graph.triples(
(URIRef(viaf_uri), URIRef("http://schema.org/deathDate"), None)
)
}
death_start = None
death_end = None
for d in death_literals:
try:
self.death_edtf = d
self.process_death_edtf()
break
except:
continue
self.save()
def populate_from_viaf_uri(self):
if self.viaf_match is None:
return
g = Graph().parse(f"{self.viaf_match}/rdf.xml", format="xml")
self.populate_from_viaf_graph(viaf_graph=g)
class CorporateBody(Entity):
class Meta:
verbose_name_plural = "corporate bodies"
class Concept(Entity):
broader = models.ManyToManyField("Concept", related_name="narrower_items")
narrower = models.ManyToManyField("Concept", related_name="broader_items")
class Meta:
pass
# class Predicate(
# userCreatedModel, uniqueLabledModel, descriptionModel, URIModel, dateModifiedModel
# ):
# authority = models.ForeignKey(
# "Authority", on_delete=models.CASCADE, related_name="predicates"
# )
# class Relation(userCreatedModel, dateModifiedModel):
# source = models.ForeignKey(
# Entity, on_delete=models.CASCADE, related_name="statements_from"
# )
# target = models.ForeignKey(
# Entity, on_delete=models.CASCADE, related_name="statements_to"
# )
# relation_type = models.ForeignKey(
# Predicate, on_delete=models.CASCADE, related_name="used_in_statements"
# )
# class Meta:
# unique_together = ("source", "target", "relation_type")
| 33.788344 | 103 | 0.57567 | from django.db import models
from django.contrib.postgres.fields import ArrayField
from authority import mixins, namespaces
from authority.models import CloseMatch, Authority
from rdflib import Graph, namespace
from rdflib.term import URIRef
from edtf import parse_edtf, struct_time_to_date
from collections import namedtuple
class Entity(mixins.trackedModel):
pref_label = models.CharField(
default="", blank=True, db_index=True, max_length=5000
)
@property
def viaf_match(self):
try:
return CloseMatch.objects.get(
authority__namespace=namespaces.VIAF, entity=self
).identifier
except:
return None
@property
def lcnaf_match(self):
try:
return CloseMatch.objects.get(
authority__namespace=namespaces.LOC, entity=self
).identifier
except:
return None
def __str__(self):
return self.pref_label
class Meta:
verbose_name_plural = "entities"
class Name(mixins.labeledModel, mixins.trackedModel):
name_of = models.ForeignKey(
"Entity", on_delete=models.CASCADE, related_name="alt_labels"
)
authority = models.ForeignKey(
"authority.Authority",
null=True,
blank=True,
on_delete=models.CASCADE,
help_text="Authority that uses this name",
related_name="names_used",
)
language = models.CharField(default="", blank=True, max_length=50, db_index=True)
preferred = models.BooleanField(
default=False,
db_index=True,
help_text="Is this name considered 'preferred' by the authority using it?",
)
class Meta:
unique_together = ("label", "name_of", "language", "preferred", "authority")
class Person(Entity):
birth_edtf = models.CharField(
default="",
blank=True,
max_length=1000,
verbose_name="Birth date expressed in EDTF",
)
birth_early = models.DateField(null=True, blank=True)
birth_late = models.DateField(null=True, blank=True)
death_edtf = models.CharField(
default="",
blank=True,
max_length=1000,
verbose_name="Death date expressed in EDTF",
)
death_early = models.DateField(null=True, blank=True)
death_late = models.DateField(null=True, blank=True)
class Meta:
verbose_name_plural = "people"
def process_edtf(self, d):
ProcessedEDTF = namedtuple("ProcessedEDTF", "string begin end")
edtf_date = parse_edtf(d)
start_date = struct_time_to_date(edtf_date.lower_strict())
end_date = struct_time_to_date(edtf_date.upper_strict())
return ProcessedEDTF(str(edtf_date), start_date, end_date)
def process_birth_edtf(self):
processed_birth = self.process_edtf(self.birth_edtf)
self.birth_early = processed_birth.begin
self.birth_late = processed_birth.end
def process_death_edtf(self):
processed_death = self.process_edtf(self.death_edtf)
self.death_early = processed_death.begin
self.death_late = processed_death.end
def populate_from_lcnaf_graph(self, lcnaf_graph, update_viaf=False):
core_data = [
(str(s), o.value)
for s, p, o in lcnaf_graph.triples((None, namespace.SKOS.prefLabel, None))
][0]
prefLabel = core_data[1]
altLabels = []
for s, p, o in lcnaf_graph.triples((None, namespace.SKOS.altLabel, None)):
lang = o.language
if lang is None:
lang = ""
altLabels.append({"value": o.value, "lang": lang})
self.label = prefLabel
self.pref_label = prefLabel
alt_labels = [
Name(name_of=self, label=l["value"], language=l["lang"]) for l in altLabels
]
Name.objects.bulk_create(alt_labels, ignore_conflicts=True)
viaf_concept = [
str(o)
for s, p, o in lcnaf_graph.triples((None, namespace.SKOS.exactMatch, None))
if namespaces.VIAF in str(o)
]
if len(viaf_concept) > 0:
viaf_graph = Graph().parse(viaf_concept[0])
viaf_uris = [
str(o)
for s, p, o in viaf_graph.triples(
(None, URIRef(f"{namespace.FOAF}focus"), None)
)
]
if len(viaf_uris) > 0 and update_viaf:
CloseMatch.objects.get_or_create(
entity=self,
authority=Authority.objects.get(namespace=namespaces.VIAF),
identifier=viaf_uris[0],
)
self.populate_from_viaf_graph(viaf_graph)
self.save()
def populate_from_lcnaf_uri(self, update_viaf=True):
if self.lcnaf_match is None:
return
g = Graph().parse(f"{self.lcnaf_match}.skos.xml", format="xml")
self.populate_from_lcnaf_graph(lcnaf_graph=g, update_viaf=update_viaf)
def populate_from_viaf_graph(self, viaf_graph):
viaf_uri = self.viaf_match
pref_labels = [
{"label": o.value, "language": o.language, "subject": s}
for s, p, o in viaf_graph.triples((None, namespace.SKOS.prefLabel, None))
if str(s) != viaf_uri
]
prefNames = []
for l in pref_labels:
source_schemae = [
o
for s, p, o in viaf_graph.triples(
(l["subject"], namespace.SKOS.inScheme, None)
)
]
if len(source_schemae) == 0:
continue
else:
source_schema = source_schemae[0].__str__()
authority = Authority.objects.get_or_create(
viaf_namespace=source_schema,
defaults={
"label": source_schema,
"namespace": source_schema,
},
)[0]
norm_language = l["language"] if l["language"] is not None else ""
prefNames.append(
Name(
name_of=self,
label=l["label"],
language=norm_language,
authority=authority,
preferred=True,
)
)
Name.objects.bulk_create(prefNames, ignore_conflicts=True)
viaf_preferred_labels = [
{"label": o.value, "language": o.language}
for s, p, o in viaf_graph.triples(
(URIRef(viaf_uri), namespace.SKOS.prefLabel, None)
)
]
viaf_en_label = [l for l in viaf_preferred_labels if l["language"] == "en-US"]
if len(viaf_en_label) > 0:
self.pref_label = viaf_en_label[0]["label"]
elif len(viaf_preferred_labels) > 0:
self.pref_label = viaf_preferred_labels[0]["label"]
else:
self.pref_label = pref_labels[0]["label"]
# Collect other authority labels
alt_labels = [
{"label": o.value, "language": o.language, "subject": s}
for s, p, o in viaf_graph.triples((None, namespace.SKOS.altLabel, None))
if s is not URIRef(viaf_uri)
]
altNames = []
for l in alt_labels:
# Get source schema
source_schemae = [
o
for s, p, o in viaf_graph.triples(
(l["subject"], namespace.SKOS.inScheme, None)
)
]
# If there is no source schema, skip to the next name
if len(source_schemae) == 0:
continue
else:
source_schema = source_schemae[0].__str__()
authority = Authority.objects.get_or_create(
viaf_namespace=source_schema,
defaults={"label": source_schema, "namespace": source_schema},
)[0]
norm_language = l["language"] if l["language"] is not None else ""
altNames.append(
Name(
name_of=self,
label=l["label"],
language=norm_language,
authority=authority,
preferred=False,
)
)
Name.objects.bulk_create(altNames, ignore_conflicts=True)
# Get birthdates
birth_literals = {
o.value
for s, p, o in viaf_graph.triples(
(URIRef(viaf_uri), URIRef("http://schema.org/birthDate"), None)
)
}
birth_start = None
birth_end = None
for d in birth_literals:
try:
self.birth_edtf = d
self.process_birth_edtf()
break
except Exception as e:
continue
# Get deathdates
death_literals = birth_literals = {
o.value
for s, p, o in viaf_graph.triples(
(URIRef(viaf_uri), URIRef("http://schema.org/deathDate"), None)
)
}
death_start = None
death_end = None
for d in death_literals:
try:
self.death_edtf = d
self.process_death_edtf()
break
except:
continue
self.save()
def populate_from_viaf_uri(self):
if self.viaf_match is None:
return
g = Graph().parse(f"{self.viaf_match}/rdf.xml", format="xml")
self.populate_from_viaf_graph(viaf_graph=g)
class CorporateBody(Entity):
class Meta:
verbose_name_plural = "corporate bodies"
class Concept(Entity):
broader = models.ManyToManyField("Concept", related_name="narrower_items")
narrower = models.ManyToManyField("Concept", related_name="broader_items")
class Meta:
pass
# class Predicate(
# userCreatedModel, uniqueLabledModel, descriptionModel, URIModel, dateModifiedModel
# ):
# authority = models.ForeignKey(
# "Authority", on_delete=models.CASCADE, related_name="predicates"
# )
# class Relation(userCreatedModel, dateModifiedModel):
# source = models.ForeignKey(
# Entity, on_delete=models.CASCADE, related_name="statements_from"
# )
# target = models.ForeignKey(
# Entity, on_delete=models.CASCADE, related_name="statements_to"
# )
# relation_type = models.ForeignKey(
# Predicate, on_delete=models.CASCADE, related_name="used_in_statements"
# )
# class Meta:
# unique_together = ("source", "target", "relation_type")
| true | true |
f7fe14afd267c81e5801235ae2a641efdc6ae22a | 1,508 | py | Python | IsStressful.py | Cynthyah/Exercises | 4c458cb518d8e77f2c51a9dd8d36eb4e4c73364c | [
"MIT"
] | null | null | null | IsStressful.py | Cynthyah/Exercises | 4c458cb518d8e77f2c51a9dd8d36eb4e4c73364c | [
"MIT"
] | null | null | null | IsStressful.py | Cynthyah/Exercises | 4c458cb518d8e77f2c51a9dd8d36eb4e4c73364c | [
"MIT"
] | null | null | null | # The function should recognise if a subject line is stressful.
# A stressful subject line means that all letters are in uppercase,
# and/or ends by at least 3 exclamation marks, and/or contains at least
# one of the following “red” words: "help", "asap", "urgent".
# Any of those "red" words can be spelled in different ways -
# "HELP", "help", "HeLp", "H!E!L!P!", "H-E-L-P", even in a very loooong way "HHHEEEEEEEEELLP,"
# they just can't have any other letters interspersed between them.
def is_stressful(subj):
"""
recognize stressful subject
"""
if subj[-3:] == '!!!' or subj.isupper():
return True
word = ' '
for l in subj.lower():
if l.isalpha():
if word[-1] != l:
word += l
red_words = ['help','asap','urgent']
for red in red_words:
if red in word:
return True
return False
if __name__ == '__main__':
#These "asserts" are only for self-checking and not necessarily for auto-testing
assert is_stressful("Hi!") == False, "First"
assert is_stressful("I neeed HELP") == True, "Second"
assert is_stressful("h!e!l!p") == True, "Second"
assert is_stressful("He loves peace") == False, "333"
assert is_stressful("We need you A.S.A.P.!!") == True, "333"
assert is_stressful("UUUURGGGEEEEENT here") == True, "333"
assert is_stressful("Headlamp, wastepaper bin and supermagnificently") == False, "333"
print('Done! Go Check it!')
| 35.904762 | 95 | 0.617374 |
def is_stressful(subj):
if subj[-3:] == '!!!' or subj.isupper():
return True
word = ' '
for l in subj.lower():
if l.isalpha():
if word[-1] != l:
word += l
red_words = ['help','asap','urgent']
for red in red_words:
if red in word:
return True
return False
if __name__ == '__main__':
#These "asserts" are only for self-checking and not necessarily for auto-testing
assert is_stressful("Hi!") == False, "First"
assert is_stressful("I neeed HELP") == True, "Second"
assert is_stressful("h!e!l!p") == True, "Second"
assert is_stressful("He loves peace") == False, "333"
assert is_stressful("We need you A.S.A.P.!!") == True, "333"
assert is_stressful("UUUURGGGEEEEENT here") == True, "333"
assert is_stressful("Headlamp, wastepaper bin and supermagnificently") == False, "333"
print('Done! Go Check it!')
| true | true |
f7fe14d138cf6f7fd1ab65ca5a26b92088033547 | 44 | py | Python | imgcrop/settings/__init__.py | shineklbm/django-image-upload-crop | d87e43e450f5b4bd5b87c2c1a456a4215c594a74 | [
"MIT"
] | null | null | null | imgcrop/settings/__init__.py | shineklbm/django-image-upload-crop | d87e43e450f5b4bd5b87c2c1a456a4215c594a74 | [
"MIT"
] | null | null | null | imgcrop/settings/__init__.py | shineklbm/django-image-upload-crop | d87e43e450f5b4bd5b87c2c1a456a4215c594a74 | [
"MIT"
] | null | null | null | from .default import *
from .local import *
| 14.666667 | 22 | 0.727273 | from .default import *
from .local import *
| true | true |
f7fe150adba20525340c4fb770952bf65cd76c07 | 1,182 | py | Python | examples/nameko_sqlalchemy/app.py | sww/graphene-sqlalchemy | 4016b624173207d6d302c8600b841aa1a2eaf87d | [
"MIT"
] | null | null | null | examples/nameko_sqlalchemy/app.py | sww/graphene-sqlalchemy | 4016b624173207d6d302c8600b841aa1a2eaf87d | [
"MIT"
] | null | null | null | examples/nameko_sqlalchemy/app.py | sww/graphene-sqlalchemy | 4016b624173207d6d302c8600b841aa1a2eaf87d | [
"MIT"
] | null | null | null | from database import db_session, init_db
from schema import schema
from graphql_server import (HttpQueryError, default_format_error,
encode_execution_results, json_encode,load_json_body, run_http_query)
class App():
def __init__(self):
init_db()
def query(self, request):
data = self.parse_body(request)
execution_results, params = run_http_query(
schema,
'post',
data)
result, status_code = encode_execution_results(
execution_results,
format_error=default_format_error,is_batch=False, encode=json_encode)
return result
def parse_body(self,request):
# We use mimetype here since we don't need the other
# information provided by content_type
content_type = request.mimetype
if content_type == 'application/graphql':
return {'query': request.data.decode('utf8')}
elif content_type == 'application/json':
return load_json_body(request.data.decode('utf8'))
elif content_type in ('application/x-www-form-urlencoded', 'multipart/form-data'):
return request.form
return {} | 32.833333 | 97 | 0.661591 | from database import db_session, init_db
from schema import schema
from graphql_server import (HttpQueryError, default_format_error,
encode_execution_results, json_encode,load_json_body, run_http_query)
class App():
def __init__(self):
init_db()
def query(self, request):
data = self.parse_body(request)
execution_results, params = run_http_query(
schema,
'post',
data)
result, status_code = encode_execution_results(
execution_results,
format_error=default_format_error,is_batch=False, encode=json_encode)
return result
def parse_body(self,request):
# information provided by content_type
content_type = request.mimetype
if content_type == 'application/graphql':
return {'query': request.data.decode('utf8')}
elif content_type == 'application/json':
return load_json_body(request.data.decode('utf8'))
elif content_type in ('application/x-www-form-urlencoded', 'multipart/form-data'):
return request.form
return {} | true | true |
f7fe162588d2c1b718581e23282cac2f0c645211 | 4,275 | py | Python | zcrmsdk/src/com/zoho/crm/api/util/datatype_converter.py | zoho/zohocrm-python-sdk-2.0 | 3a93eb3b57fed4e08f26bd5b311e101cb2995411 | [
"Apache-2.0"
] | null | null | null | zcrmsdk/src/com/zoho/crm/api/util/datatype_converter.py | zoho/zohocrm-python-sdk-2.0 | 3a93eb3b57fed4e08f26bd5b311e101cb2995411 | [
"Apache-2.0"
] | null | null | null | zcrmsdk/src/com/zoho/crm/api/util/datatype_converter.py | zoho/zohocrm-python-sdk-2.0 | 3a93eb3b57fed4e08f26bd5b311e101cb2995411 | [
"Apache-2.0"
] | null | null | null | try:
from dateutil.tz import tz
import dateutil.parser
from zcrmsdk.src.com.zoho.crm.api.util.constants import Constants
from datetime import date, datetime
except Exception:
from dateutil.tz import tz
import dateutil.parser
from .constants import Constants
from datetime import date, datetime
class DataTypeConverter(object):
"""
This class converts JSON value to the expected object type and vice versa.
"""
pre_converter_map = {}
post_converter_map = {}
@staticmethod
def init():
"""
The method to initialize the PreConverter and PostConverter lambda functions.
"""
if len(DataTypeConverter.pre_converter_map) != 0 and len(DataTypeConverter.post_converter_map) != 0:
return
DataTypeConverter.add_to_map("String", lambda obj: str(obj), lambda obj: str(obj))
DataTypeConverter.add_to_map("Integer", lambda obj: int(obj), lambda obj: int(obj))
DataTypeConverter.add_to_map("Long", lambda obj: int(obj) if str(obj) != Constants.NULL_VALUE else None, lambda obj: int(obj))
DataTypeConverter.add_to_map("Boolean", lambda obj: bool(obj), lambda obj: bool(obj))
DataTypeConverter.add_to_map("Float", lambda obj: float(obj), lambda obj: float(obj))
DataTypeConverter.add_to_map("Double", lambda obj: float(obj), lambda obj: float(obj))
DataTypeConverter.add_to_map("Date", lambda obj: dateutil.parser.isoparse(obj).date(), lambda obj: obj.isoformat())
DataTypeConverter.add_to_map("DateTime", lambda obj: dateutil.parser.isoparse(obj).astimezone(tz.tzlocal()), lambda obj: obj.replace(microsecond=0).astimezone(tz.tzlocal()).isoformat())
DataTypeConverter.add_to_map("Object", lambda obj: DataTypeConverter.pre_convert_object_data(obj), lambda obj: DataTypeConverter.post_convert_object_data(obj))
@staticmethod
def pre_convert_object_data(obj):
return obj
@staticmethod
def post_convert_object_data(obj):
if obj is None:
return None
if isinstance(obj, list):
list_value = []
for data in obj:
list_value.append(DataTypeConverter.post_convert_object_data(data))
return list_value
elif isinstance(obj, dict):
dict_value = {}
for key, value in obj.items():
dict_value[key] = DataTypeConverter.post_convert_object_data(value)
return dict_value
elif isinstance(obj, date):
return DataTypeConverter.post_convert(obj, Constants.DATE_NAMESPACE)
elif isinstance(obj, datetime):
return DataTypeConverter.post_convert(obj, Constants.DATETIME_NAMESPACE)
else:
return obj
@staticmethod
def add_to_map(name, pre_converter, post_converter):
"""
This method to add PreConverter and PostConverter instance.
:param name: A str containing the data type class name.
:param pre_converter: A pre_converter instance.
:param post_converter: A post_converter instance.
"""
DataTypeConverter.pre_converter_map[name] = pre_converter
DataTypeConverter.post_converter_map[name] = post_converter
@staticmethod
def pre_convert(obj, data_type):
"""
The method to convert JSON value to expected data value.
:param obj: An object containing the JSON value.
:param data_type: A str containing the expected method return type.
:return: An object containing the expected data value.
"""
DataTypeConverter.init()
if data_type in DataTypeConverter.pre_converter_map:
return DataTypeConverter.pre_converter_map[data_type](obj)
@staticmethod
def post_convert(obj, data_type):
"""
The method to convert python data to JSON data value.
:param obj: A object containing the python data value.
:param data_type: A str containing the expected method return type.
:return: An object containing the expected data value.
"""
DataTypeConverter.init()
if data_type in DataTypeConverter.post_converter_map:
return DataTypeConverter.post_converter_map[data_type](obj)
| 37.173913 | 193 | 0.679766 | try:
from dateutil.tz import tz
import dateutil.parser
from zcrmsdk.src.com.zoho.crm.api.util.constants import Constants
from datetime import date, datetime
except Exception:
from dateutil.tz import tz
import dateutil.parser
from .constants import Constants
from datetime import date, datetime
class DataTypeConverter(object):
pre_converter_map = {}
post_converter_map = {}
@staticmethod
def init():
if len(DataTypeConverter.pre_converter_map) != 0 and len(DataTypeConverter.post_converter_map) != 0:
return
DataTypeConverter.add_to_map("String", lambda obj: str(obj), lambda obj: str(obj))
DataTypeConverter.add_to_map("Integer", lambda obj: int(obj), lambda obj: int(obj))
DataTypeConverter.add_to_map("Long", lambda obj: int(obj) if str(obj) != Constants.NULL_VALUE else None, lambda obj: int(obj))
DataTypeConverter.add_to_map("Boolean", lambda obj: bool(obj), lambda obj: bool(obj))
DataTypeConverter.add_to_map("Float", lambda obj: float(obj), lambda obj: float(obj))
DataTypeConverter.add_to_map("Double", lambda obj: float(obj), lambda obj: float(obj))
DataTypeConverter.add_to_map("Date", lambda obj: dateutil.parser.isoparse(obj).date(), lambda obj: obj.isoformat())
DataTypeConverter.add_to_map("DateTime", lambda obj: dateutil.parser.isoparse(obj).astimezone(tz.tzlocal()), lambda obj: obj.replace(microsecond=0).astimezone(tz.tzlocal()).isoformat())
DataTypeConverter.add_to_map("Object", lambda obj: DataTypeConverter.pre_convert_object_data(obj), lambda obj: DataTypeConverter.post_convert_object_data(obj))
@staticmethod
def pre_convert_object_data(obj):
return obj
@staticmethod
def post_convert_object_data(obj):
if obj is None:
return None
if isinstance(obj, list):
list_value = []
for data in obj:
list_value.append(DataTypeConverter.post_convert_object_data(data))
return list_value
elif isinstance(obj, dict):
dict_value = {}
for key, value in obj.items():
dict_value[key] = DataTypeConverter.post_convert_object_data(value)
return dict_value
elif isinstance(obj, date):
return DataTypeConverter.post_convert(obj, Constants.DATE_NAMESPACE)
elif isinstance(obj, datetime):
return DataTypeConverter.post_convert(obj, Constants.DATETIME_NAMESPACE)
else:
return obj
@staticmethod
def add_to_map(name, pre_converter, post_converter):
DataTypeConverter.pre_converter_map[name] = pre_converter
DataTypeConverter.post_converter_map[name] = post_converter
@staticmethod
def pre_convert(obj, data_type):
DataTypeConverter.init()
if data_type in DataTypeConverter.pre_converter_map:
return DataTypeConverter.pre_converter_map[data_type](obj)
@staticmethod
def post_convert(obj, data_type):
DataTypeConverter.init()
if data_type in DataTypeConverter.post_converter_map:
return DataTypeConverter.post_converter_map[data_type](obj)
| true | true |
f7fe1667392be67c07c4a7b95524ad4da5f2b62c | 21,643 | py | Python | tests/test_errors.py | maltefritz/tespy | e7463cf71f69d091adc0eeb985d5ac549ae170a8 | [
"MIT"
] | 1 | 2022-03-23T10:25:36.000Z | 2022-03-23T10:25:36.000Z | tests/test_errors.py | nkawerau/tespy | 26865e3d530b972c59fe0af47d6561843bcdd4d6 | [
"MIT"
] | null | null | null | tests/test_errors.py | nkawerau/tespy | 26865e3d530b972c59fe0af47d6561843bcdd4d6 | [
"MIT"
] | null | null | null | # -*- coding: utf-8
"""Module for testing program errors.
This file is part of project TESPy (github.com/oemof/tespy). It's copyrighted
by the contributors recorded in the version control history of the file,
available from its original location
tests/test_errors.py
SPDX-License-Identifier: MIT
"""
import os
import shutil
from pytest import raises
from tespy.components import CombustionChamber
from tespy.components import CombustionEngine
from tespy.components import Compressor
from tespy.components import Merge
from tespy.components import Pipe
from tespy.components import Sink
from tespy.components import Source
from tespy.components import Splitter
from tespy.components import Subsystem
from tespy.components import Turbine
from tespy.components import WaterElectrolyzer
from tespy.components.component import Component
from tespy.connections import Bus
from tespy.connections import Connection
from tespy.connections import Ref
from tespy.networks import Network
from tespy.tools.characteristics import CharLine
from tespy.tools.characteristics import CharMap
from tespy.tools.characteristics import load_custom_char
from tespy.tools.data_containers import ComponentCharacteristics as dc_cc
from tespy.tools.data_containers import DataContainer
from tespy.tools.data_containers import FluidComposition as dc_flu
from tespy.tools.fluid_properties import h_mix_pQ
from tespy.tools.helpers import TESPyComponentError
from tespy.tools.helpers import TESPyConnectionError
from tespy.tools.helpers import TESPyNetworkError
from tespy.tools.helpers import UserDefinedEquation
from tespy.tools.helpers import extend_basic_path
##############################################################################
# test errors of set_attr and get_attr methods
def get_attr_KeyError(instance, key):
with raises(KeyError):
instance.get_attr(key)
def set_attr_KeyError(instance, **kwargs):
with raises(KeyError):
instance.set_attr(**kwargs)
def set_attr_NotImplementedError(instance, **kwargs):
with raises(NotImplementedError):
instance.set_attr(**kwargs)
def set_attr_TypeError(instance, **kwargs):
with raises(TypeError):
instance.set_attr(**kwargs)
def set_attr_ValueError(instance, **kwargs):
with raises(ValueError):
instance.set_attr(**kwargs)
def test_set_attr_errors():
"""Test errors of set_attr methods."""
nw = Network(['water', 'air'])
comb = CombustionEngine('combustion engine')
pipeline = Pipe('pipeline')
conn = Connection(comb, 'out1', pipeline, 'in1')
mybus = Bus('mybus')
# ValueErrors
set_attr_ValueError(comb, offdesign=['Q'])
set_attr_ValueError(conn, offdesign=['f'])
set_attr_ValueError(nw, m_unit='kg')
set_attr_ValueError(nw, h_unit='kg')
set_attr_ValueError(nw, p_unit='kg')
set_attr_ValueError(nw, T_unit='kg')
set_attr_ValueError(nw, v_unit='kg')
set_attr_ValueError(conn, state=5)
# TypeErrors
set_attr_TypeError(comb, P=[5])
set_attr_TypeError(comb, P=[5])
set_attr_TypeError(comb, tiP_char=7)
set_attr_TypeError(comb, design='f')
set_attr_TypeError(comb, lamb=dc_cc())
set_attr_TypeError(comb, design_path=7)
set_attr_TypeError(comb, local_design=5)
set_attr_TypeError(comb, local_offdesign=5)
set_attr_TypeError(pipeline, hydro_group=5)
set_attr_TypeError(comb, printout=5)
set_attr_TypeError(conn, design='h')
set_attr_TypeError(conn, fluid_balance=1)
set_attr_TypeError(conn, h0=[4])
set_attr_TypeError(conn, fluid=5)
set_attr_TypeError(conn, design_path=5)
set_attr_TypeError(conn, local_design=5)
set_attr_TypeError(conn, local_offdesign=5)
set_attr_TypeError(conn, printout=5)
set_attr_TypeError(conn, state='f')
set_attr_TypeError(nw, m_range=5)
set_attr_TypeError(nw, p_range=5)
set_attr_TypeError(nw, h_range=5)
set_attr_TypeError(nw, iterinfo=5)
set_attr_TypeError(mybus, P='some value')
set_attr_TypeError(mybus, printout=5)
# KeyErrors
set_attr_KeyError(dc_cc(), x=7)
set_attr_KeyError(comb, wow=5)
set_attr_KeyError(conn, jey=5)
set_attr_KeyError(mybus, power_output=100000)
# NotImplementedError
set_attr_NotImplementedError(conn, Td_bp=Ref(conn, 1, 0))
def test_get_attr_errors():
"""Test errors of get_attr methods."""
nw = Network(['water', 'air'])
comb = CombustionEngine('combustion engine')
pipeline = Pipe('pipeline')
conn = Connection(comb, 'out1', pipeline, 'in1')
mybus = Bus('mybus')
sub = Subsystem('MySub')
get_attr_KeyError(comb, 'wow')
get_attr_KeyError(conn, 'key')
get_attr_KeyError(mybus, 'components')
get_attr_KeyError(nw, 'missing')
get_attr_KeyError(Ref(conn, 1, 0), 'comp')
get_attr_KeyError(sub, 'test')
get_attr_KeyError(CharLine(), 'test')
get_attr_KeyError(DataContainer(), 'somekey')
get_attr_KeyError(CharMap(), 'Stuff')
##############################################################################
# test error in component label
def test_cmp_instanciation_ValueError():
"""Test bad label specification for component."""
labels = [5, 'Label,', 'Labe;l', 'Label.']
for label in labels:
with raises(ValueError):
CombustionEngine(label)
##############################################################################
# test errors in connection classes
##############################################################################
# connection
def test_Connection_creation_ValueError():
"""Test ValueErrors creating connections."""
comb = CombustionEngine('combustion engine')
pipeline = Pipe('pipeline')
with raises(ValueError):
Connection(comb, 'out6', pipeline, 'in1')
with raises(ValueError):
Connection(comb, 'out1', pipeline, 'in5')
def test_Connection_creation_TypeError():
"""Test TypeErrors creating connections."""
comb = CombustionEngine('combustion engine')
pipeline = Pipe('pipeline')
with raises(TypeError):
Connection(comb, 'out1', 7, 'in1')
with raises(TypeError):
Connection(comb, 'out1', pipeline, 'in1', label=5)
def test_Connection_creation_TESPyConnectionError():
comb = CombustionEngine('combustion engine')
with raises(TESPyConnectionError):
Connection(comb, 'out1', comb, 'in1')
##############################################################################
# ref
def create_ref_TypeError(params):
with raises(TypeError):
Ref(params[0], params[1], params[2])
def test_ref_creation_error():
"""Test errors creating reference objects."""
comb = CombustionEngine('combustion engine')
pipeline = Pipe('pipeline')
conn = Connection(comb, 'out1', pipeline, 'in1')
create_ref_TypeError([conn, 7, 'hi'])
create_ref_TypeError([conn, 'hi', 0])
create_ref_TypeError([comb, 1, 0])
##############################################################################
# bus
def bus_add_comps_TypeError(b, c):
with raises(TypeError):
b.add_comps(c)
def test_Bus_add_comps_errors():
"""Test errors adding components to busses."""
mybus = Bus('mybus')
comb = CombustionEngine('combustion engine')
pipeline = Pipe('pipeline')
conn = Connection(comb, 'out1', pipeline, 'in1')
bus_add_comps_TypeError(mybus, {'comp': conn})
bus_add_comps_TypeError(mybus, {'f': comb})
bus_add_comps_TypeError(mybus, {'comp': comb, 'char': 'Hi'})
bus_add_comps_TypeError(mybus, {'comp': comb, 'param': 5})
bus_add_comps_TypeError(mybus, {'comp': comb, 'P_ref': 'what'})
bus_add_comps_TypeError(mybus, comb)
with raises(ValueError):
mybus.add_comps({'comp': comb, 'base': 5})
##############################################################################
# test errors of UserDefinedEquation
def udf_dummy():
return
def test_UserDefinedEquation_errors():
with raises(TypeError):
UserDefinedEquation(7, udf_dummy, udf_dummy, [])
with raises(TypeError):
UserDefinedEquation('label', udf_dummy, udf_dummy, 'connections')
with raises(TypeError):
UserDefinedEquation('label', udf_dummy, udf_dummy, [], params=[])
##############################################################################
# test errors of component classes
##############################################################################
# CombustionChamber
def test_CombustionChamber_missing_fuel():
"""Test no fuel in network."""
nw = Network(['H2O', 'N2', 'O2', 'Ar', 'CO2'])
instance = CombustionChamber('combustion chamber')
c1 = Connection(Source('air'), 'out1', instance, 'in1')
c2 = Connection(Source('fuel'), 'out1', instance, 'in2')
c3 = Connection(instance, 'out1', Sink('flue gas'), 'in1')
nw.add_conns(c1, c2, c3)
with raises(TESPyComponentError):
nw.solve('design', init_only=True)
def test_CombustionChamber_missing_oxygen():
"""Test no oxygen in network."""
nw = Network(['H2O', 'N2', 'Ar', 'CO2', 'CH4'])
instance = CombustionChamber('combustion chamber')
c1 = Connection(Source('air'), 'out1', instance, 'in1')
c2 = Connection(Source('fuel'), 'out1', instance, 'in2')
c3 = Connection(instance, 'out1', Sink('flue gas'), 'in1')
nw.add_conns(c1, c2, c3)
with raises(TESPyComponentError):
nw.solve('design', init_only=True)
##############################################################################
# combustion_engine
class TestCombustionEngineBusErrors:
def setup(self):
self.nw = Network(['water', 'air'])
self.instance = CombustionEngine('combustion engine')
self.bus = Bus('power')
self.bus.add_comps({'comp': self.instance, 'param': 'Param'})
def test_missing_Bus_param_func(self):
"""Test wrong/missing bus parameter in bus function."""
with raises(ValueError):
self.instance.bus_func(self.bus.comps.loc[self.instance])
def test_missing_Bus_param_deriv(self):
"""Test wrong/missing bus parameter in bus derivatives."""
# both values do not matter, but are required for the test
self.instance.num_nw_vars = 1
self.instance.num_vars = 1
self.instance.inl = [Connection(self.instance, 'out1',
Sink('sink'), 'in1')]
self.instance.inl[0].fluid = dc_flu(val={'water': 1})
with raises(ValueError):
self.instance.bus_deriv(self.bus)
##############################################################################
# compressor
def test_compressor_missing_char_parameter():
"""Compressor with invalid parameter for eta_s_char function."""
nw = Network(['CH4'])
so = Source('source')
si = Sink('sink')
instance = Compressor('compressor')
c1 = Connection(so, 'out1', instance, 'in1')
c2 = Connection(instance, 'out1', si, 'in1')
nw.add_conns(c1, c2)
instance.set_attr(eta_s_char={
'func': CharLine([0, 1], [1, 2]), 'is_set': True, 'param': None})
nw.solve('design', init_only=True)
with raises(ValueError):
instance.eta_s_char_func()
##############################################################################
# subsystems
def test_subsys_label_str():
with raises(ValueError):
Subsystem(5)
def test_subsys_label_forbidden():
with raises(ValueError):
Subsystem('label;')
##############################################################################
# turbine
def test_Turbine_missing_char_parameter():
"""Turbine with invalid parameter for eta_s_char function."""
nw = Network(['CH4'])
so = Source('source')
si = Sink('sink')
instance = Turbine('turbine')
c1 = Connection(so, 'out1', instance, 'in1')
c2 = Connection(instance, 'out1', si, 'in1')
nw.add_conns(c1, c2)
instance.set_attr(eta_s_char={
'char_func': CharLine([0, 1], [1, 2]), 'is_set': True, 'param': None})
nw.solve('design', init_only=True)
with raises(ValueError):
instance.eta_s_char_func()
##############################################################################
# WaterElectrolyzer
class TestWaterElectrolyzerErrors:
def setup_electrolyzer_Network(self):
"""Set up Network for electrolyzer tests."""
self.instance = WaterElectrolyzer('electrolyzer')
fw = Source('feed water')
cw_in = Source('cooling water')
o2 = Sink('oxygen sink')
h2 = Sink('hydrogen sink')
cw_out = Sink('cooling water sink')
cw_el = Connection(cw_in, 'out1', self.instance, 'in1')
el_cw = Connection(self.instance, 'out1', cw_out, 'in1')
self.nw.add_conns(cw_el, el_cw)
fw_el = Connection(fw, 'out1', self.instance, 'in2')
el_o2 = Connection(self.instance, 'out2', o2, 'in1')
el_h2 = Connection(self.instance, 'out3', h2, 'in1')
self.nw.add_conns(fw_el, el_o2, el_h2)
def test_missing_hydrogen_in_Network(self):
"""Test missing hydrogen in Network fluids with water electrolyzer."""
self.nw = Network(['H2O', 'O2'])
self.setup_electrolyzer_Network()
with raises(TESPyComponentError):
self.nw.solve('design')
def test_missing_oxygen_in_Network(self):
"""Test missing oxygen in Network fluids with water electrolyzer."""
self.nw = Network(['H2O', 'H2'])
self.setup_electrolyzer_Network()
with raises(TESPyComponentError):
self.nw.solve('design')
def test_missing_water_in_Network(self):
"""Test missing water in Network fluids with water electrolyzer."""
self.nw = Network(['O2', 'H2'])
self.setup_electrolyzer_Network()
with raises(TESPyComponentError):
self.nw.solve('design')
def test_wrong_Bus_param_func():
"""Test missing/wrong bus parameter specification in equations."""
# this test does not need setup, since the function is called without
# network initialisation
instance = WaterElectrolyzer('electrolyzer')
some_bus = Bus('some_bus')
param = 'G'
some_bus.add_comps({'comp': instance, 'param': param})
with raises(ValueError):
instance.bus_func(some_bus.comps.loc[instance])
def test_wrong_Bus_param_deriv():
"""Test missing/wrong bus parameter specification in derivatives."""
# this test does not need setup, since the function is called without
# network initialisation
instance = WaterElectrolyzer('electrolyzer')
# required for calling bus_deriv method without network initialisation
instance.num_vars = 1
instance.num_nw_fluids = 1
instance.num_nw_vars = 1
some_bus = Bus('some_bus')
param = 'G'
some_bus.add_comps({'comp': instance, 'param': param})
with raises(ValueError):
instance.bus_deriv(some_bus)
##############################################################################
# test errors of Network class
class TestNetworkErrors:
def setup(self):
self.nw = Network(['water'])
def test_add_conns_TypeError(self):
with raises(TypeError):
self.nw.add_conns(Component('test'))
def test_no_connections_error(self):
with raises(TESPyNetworkError):
self.nw.solve('design')
def test_bad_fluids_in_fluid_vector(self):
source1 = Source('source1')
sink1 = Sink('sink1')
a = Connection(source1, 'out1', sink1, 'in1', fluid={'air': 1})
self.nw.add_conns(a)
with raises(TESPyNetworkError):
self.nw.solve('design')
def test_duplicate_Connection_labels(self):
source1 = Source('source1')
source2 = Source('source2')
sink1 = Sink('sink1')
sink2 = Sink('sink2')
a = Connection(source1, 'out1', sink1, 'in1', label='myconn')
b = Connection(source2, 'out1', sink2, 'in1', label='myconn')
with raises(ValueError):
self.nw.add_conns(a, b)
def test_Connection_error_source(self):
source = Source('source')
sink1 = Sink('sink1')
sink2 = Sink('sink2')
a = Connection(source, 'out1', sink1, 'in1')
b = Connection(source, 'out1', sink2, 'in1')
self.nw.add_conns(a, b)
with raises(TESPyNetworkError):
self.nw.check_network()
def test_Connection_error_target(self):
source1 = Source('source1')
source2 = Source('source2')
sink = Sink('sink')
a = Connection(source1, 'out1', sink, 'in1')
b = Connection(source2, 'out1', sink, 'in1')
self.nw.add_conns(a, b)
with raises(TESPyNetworkError):
self.nw.check_network()
def test_consistency_inlets(self):
merge = Merge('merge')
sink = Sink('label')
a = Connection(merge, 'out1', sink, 'in1')
self.nw.add_conns(a)
with raises(TESPyNetworkError):
self.nw.check_network()
def test_consistency_outlets(self):
source = Source('source')
splitter = Splitter('splitter')
a = Connection(source, 'out1', splitter, 'in1')
self.nw.add_conns(a)
with raises(TESPyNetworkError):
self.nw.check_network()
def test_component_label_duplicates(self):
source = Source('label')
sink = Sink('label')
a = Connection(source, 'out1', sink, 'in1')
self.nw.add_conns(a)
with raises(TESPyNetworkError):
self.nw.check_network()
def test_missing_offdesign_path(self):
source = Source('source')
sink = Sink('sink')
a = Connection(source, 'out1', sink, 'in1')
self.nw.add_conns(a)
with raises(TESPyNetworkError):
self.nw.solve('offdesign')
def test_bad_mode_specification(self):
source = Source('source')
sink = Sink('sink')
a = Connection(source, 'out1', sink, 'in1')
self.nw.add_conns(a)
with raises(ValueError):
self.nw.solve('ofdesign')
def test_underdetermination(self):
source = Source('source')
sink = Sink('sink')
a = Connection(source, 'out1', sink, 'in1', m=1)
self.nw.add_conns(a)
with raises(TESPyNetworkError):
self.nw.solve('design')
def test_overdetermination(self):
source = Source('source')
sink = Sink('sink')
a = Connection(source, 'out1', sink, 'in1', m=1, p=1e5, x=1, h=1e6,
fluid={'water': 1}, fluid_balance=True)
self.nw.add_conns(a)
with raises(TESPyNetworkError):
self.nw.solve('design')
def test_add_Bus_TypeError(self):
source = Source('label')
sink = Sink('label')
a = Connection(source, 'out1', sink, 'in1')
with raises(TypeError):
self.nw.add_busses(a)
def test_Bus_duplicate(self):
with raises(TESPyNetworkError):
b = Bus('mybus')
self.nw.add_busses(b, b)
def test_buslabel_duplicate(self):
with raises(TESPyNetworkError):
a = Bus('mybus')
b = Bus('mybus')
self.nw.add_busses(a, b)
def test_Network_instanciation_no_fluids():
nw = Network([])
so = Source('source')
si = Sink('sink')
conn = Connection(so, 'out1', si, 'in1')
nw.add_conns(conn)
with raises(TESPyNetworkError):
nw.solve('design', init_only=True)
def test_Network_instanciation_single_fluid():
with raises(TypeError):
Network('water')
##############################################################################
# test errors of characteristics classes
def test_char_number_of_points():
with raises(ValueError):
CharLine(x=[0, 1, 2], y=[1, 2, 3, 4])
def test_CharMap_number_of_points():
with raises(ValueError):
CharMap(x=[0, 1, 2], y=[[1, 2, 3, 4], [1, 2, 3, 4], [1, 2, 3]])
def test_CharMap_number_of_dimensions():
with raises(ValueError):
CharMap(x=[0, 1, 2], y=[[1, 2, 3, 4], [1, 2, 3, 4]])
def test_CharMap_y_z_dimension_mismatch():
with raises(ValueError):
CharMap(x=[0, 1], y=[[1, 2, 3, 4], [1, 2, 3, 4]],
z=[[1, 2, 3, 4], [1, 2, 3, 4], [1, 2, 3, 4]])
def test_missing_CharLine_files():
"""Test missing files."""
path = extend_basic_path('data')
tmp_path = extend_basic_path('tmp_dir_for_testing')
if os.path.exists(path):
for f in os.listdir(path):
shutil.copy(src=path + '/' + f, dst=tmp_path)
shutil.rmtree(path, ignore_errors=True)
with raises(FileNotFoundError):
load_custom_char('stuff', CharLine)
if os.path.exists(tmp_path):
for f in os.listdir(tmp_path):
shutil.copy(src=tmp_path + '/' + f, dst=path)
shutil.rmtree(tmp_path, ignore_errors=True)
def test_missing_CharMap_files():
"""Test missing files."""
path = extend_basic_path('data')
tmp_path = extend_basic_path('tmp_dir_for_testing')
if os.path.exists(path):
for f in os.listdir(path):
shutil.copy(src=path + '/' + f, dst=tmp_path)
shutil.rmtree(path, ignore_errors=True)
with raises(FileNotFoundError):
load_custom_char('some other stuff', CharMap)
if os.path.exists(tmp_path):
for f in os.listdir(tmp_path):
shutil.copy(src=tmp_path + '/' + f, dst=path)
shutil.rmtree(tmp_path, ignore_errors=True)
##############################################################################
# test errors in fluid porperty functions
def test_h_mix_pQ_on_mixtures():
with raises(ValueError):
h_mix_pQ([0, 0, 0, {'O2': 0.24, 'N2': 0.76}], 0.75)
| 32.742814 | 78 | 0.618537 |
import os
import shutil
from pytest import raises
from tespy.components import CombustionChamber
from tespy.components import CombustionEngine
from tespy.components import Compressor
from tespy.components import Merge
from tespy.components import Pipe
from tespy.components import Sink
from tespy.components import Source
from tespy.components import Splitter
from tespy.components import Subsystem
from tespy.components import Turbine
from tespy.components import WaterElectrolyzer
from tespy.components.component import Component
from tespy.connections import Bus
from tespy.connections import Connection
from tespy.connections import Ref
from tespy.networks import Network
from tespy.tools.characteristics import CharLine
from tespy.tools.characteristics import CharMap
from tespy.tools.characteristics import load_custom_char
from tespy.tools.data_containers import ComponentCharacteristics as dc_cc
from tespy.tools.data_containers import DataContainer
from tespy.tools.data_containers import FluidComposition as dc_flu
from tespy.tools.fluid_properties import h_mix_pQ
from tespy.tools.helpers import TESPyComponentError
from tespy.tools.helpers import TESPyConnectionError
from tespy.tools.helpers import TESPyNetworkError
from tespy.tools.helpers import UserDefinedEquation
from tespy.tools.helpers import extend_basic_path
KeyError(CharMap(), 'Stuff')
| true | true |
f7fe16fd1ca24a2a6bf431dc7ddc6550afffd1e4 | 4,425 | py | Python | tests/integration/widgets/test_toggle.py | daledali/bokeh | c4f0debe7bd230d7e1aa8500716e8e997c04f528 | [
"BSD-3-Clause"
] | 1 | 2020-01-19T03:17:18.000Z | 2020-01-19T03:17:18.000Z | tests/integration/widgets/test_toggle.py | daledali/bokeh | c4f0debe7bd230d7e1aa8500716e8e997c04f528 | [
"BSD-3-Clause"
] | 1 | 2021-05-12T10:14:45.000Z | 2021-05-12T10:14:45.000Z | tests/integration/widgets/test_toggle.py | daledali/bokeh | c4f0debe7bd230d7e1aa8500716e8e997c04f528 | [
"BSD-3-Clause"
] | 1 | 2020-01-21T12:03:58.000Z | 2020-01-21T12:03:58.000Z | #-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2017, Anaconda, Inc. All rights reserved.
#
# Powered by the Bokeh Development Team.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Boilerplate
#-----------------------------------------------------------------------------
import pytest ; pytest
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Bokeh imports
from bokeh._testing.util.selenium import RECORD
from bokeh.core.enums import ButtonType
from bokeh.layouts import column
from bokeh.models import (
Circle,
ColumnDataSource,
CustomAction,
CustomJS,
Plot,
Range1d,
Toggle,
)
#-----------------------------------------------------------------------------
# Tests
#-----------------------------------------------------------------------------
pytest_plugins = (
"bokeh._testing.plugins.project",
)
@pytest.mark.integration
@pytest.mark.selenium
class Test_Toggle(object):
def test_displays_label(self, bokeh_model_page):
button = Toggle(label="label", css_classes=["foo"])
page = bokeh_model_page(button)
button = page.driver.find_element_by_css_selector('.foo .bk-btn')
assert button.text == "label"
@pytest.mark.parametrize('typ', list(ButtonType))
def test_displays_button_type(self, typ, bokeh_model_page):
button = Toggle(button_type=typ, css_classes=["foo"])
page = bokeh_model_page(button)
button = page.driver.find_element_by_css_selector('.foo .bk-btn')
assert typ in button.get_attribute('class')
def test_server_on_click_round_trip(self, bokeh_server_page):
def modify_doc(doc):
source = ColumnDataSource(dict(x=[1, 2], y=[1, 1]))
plot = Plot(plot_height=400, plot_width=400, x_range=Range1d(0, 1), y_range=Range1d(0, 1), min_border=0)
plot.add_glyph(source, Circle(x='x', y='y', size=20))
plot.add_tools(CustomAction(callback=CustomJS(args=dict(s=source), code=RECORD("data", "s.data"))))
button = Toggle(css_classes=['foo'])
def cb(value):
if value:
source.data=dict(x=[10, 20], y=[10, 10])
else:
source.data=dict(x=[100, 200], y=[100, 100])
button.on_click(cb)
doc.add_root(column(button, plot))
page = bokeh_server_page(modify_doc)
button = page.driver.find_element_by_css_selector('.foo .bk-btn')
button.click()
page.click_custom_action()
results = page.results
assert results == {'data': {'x': [10, 20], 'y': [10, 10]}}
button = page.driver.find_element_by_css_selector('.foo .bk-btn')
button.click()
page.click_custom_action()
results = page.results
assert results == {'data': {'x': [100, 200], 'y': [100, 100]}}
button = page.driver.find_element_by_css_selector('.foo .bk-btn')
button.click()
page.click_custom_action()
results = page.results
assert results == {'data': {'x': [10, 20], 'y': [10, 10]}}
# XXX (bev) disabled until https://github.com/bokeh/bokeh/issues/7970 is resolved
#assert page.has_no_console_errors()
# XXX (bev) Toggle does not register to process ButtonClick events
def test_js_on_click_executes(self, bokeh_model_page):
button = Toggle(css_classes=['foo'])
button.js_on_click(CustomJS(code=RECORD("value", "cb_obj.active")))
page = bokeh_model_page(button)
button = page.driver.find_element_by_css_selector('.foo .bk-btn')
button.click()
results = page.results
assert results == {'value': True}
button = page.driver.find_element_by_css_selector('.foo .bk-btn')
button.click()
results = page.results
assert results == {'value': False}
button = page.driver.find_element_by_css_selector('.foo .bk-btn')
button.click()
results = page.results
assert results == {'value': True}
assert page.has_no_console_errors()
| 33.270677 | 116 | 0.541921 |
import pytest ; pytest
from bokeh._testing.util.selenium import RECORD
from bokeh.core.enums import ButtonType
from bokeh.layouts import column
from bokeh.models import (
Circle,
ColumnDataSource,
CustomAction,
CustomJS,
Plot,
Range1d,
Toggle,
)
pytest_plugins = (
"bokeh._testing.plugins.project",
)
@pytest.mark.integration
@pytest.mark.selenium
class Test_Toggle(object):
def test_displays_label(self, bokeh_model_page):
button = Toggle(label="label", css_classes=["foo"])
page = bokeh_model_page(button)
button = page.driver.find_element_by_css_selector('.foo .bk-btn')
assert button.text == "label"
@pytest.mark.parametrize('typ', list(ButtonType))
def test_displays_button_type(self, typ, bokeh_model_page):
button = Toggle(button_type=typ, css_classes=["foo"])
page = bokeh_model_page(button)
button = page.driver.find_element_by_css_selector('.foo .bk-btn')
assert typ in button.get_attribute('class')
def test_server_on_click_round_trip(self, bokeh_server_page):
def modify_doc(doc):
source = ColumnDataSource(dict(x=[1, 2], y=[1, 1]))
plot = Plot(plot_height=400, plot_width=400, x_range=Range1d(0, 1), y_range=Range1d(0, 1), min_border=0)
plot.add_glyph(source, Circle(x='x', y='y', size=20))
plot.add_tools(CustomAction(callback=CustomJS(args=dict(s=source), code=RECORD("data", "s.data"))))
button = Toggle(css_classes=['foo'])
def cb(value):
if value:
source.data=dict(x=[10, 20], y=[10, 10])
else:
source.data=dict(x=[100, 200], y=[100, 100])
button.on_click(cb)
doc.add_root(column(button, plot))
page = bokeh_server_page(modify_doc)
button = page.driver.find_element_by_css_selector('.foo .bk-btn')
button.click()
page.click_custom_action()
results = page.results
assert results == {'data': {'x': [10, 20], 'y': [10, 10]}}
button = page.driver.find_element_by_css_selector('.foo .bk-btn')
button.click()
page.click_custom_action()
results = page.results
assert results == {'data': {'x': [100, 200], 'y': [100, 100]}}
button = page.driver.find_element_by_css_selector('.foo .bk-btn')
button.click()
page.click_custom_action()
results = page.results
assert results == {'data': {'x': [10, 20], 'y': [10, 10]}}
def test_js_on_click_executes(self, bokeh_model_page):
button = Toggle(css_classes=['foo'])
button.js_on_click(CustomJS(code=RECORD("value", "cb_obj.active")))
page = bokeh_model_page(button)
button = page.driver.find_element_by_css_selector('.foo .bk-btn')
button.click()
results = page.results
assert results == {'value': True}
button = page.driver.find_element_by_css_selector('.foo .bk-btn')
button.click()
results = page.results
assert results == {'value': False}
button = page.driver.find_element_by_css_selector('.foo .bk-btn')
button.click()
results = page.results
assert results == {'value': True}
assert page.has_no_console_errors()
| true | true |
f7fe1743ea73f309aebae27d9620407ab48702ec | 173 | py | Python | shiyanlou_cs354-127b99c086/minecloud/virtualmachine/constants.py | tongxindao/shiyanlou | 1d002ea342deb69066c287db9935f77f49f0a09e | [
"Apache-2.0"
] | null | null | null | shiyanlou_cs354-127b99c086/minecloud/virtualmachine/constants.py | tongxindao/shiyanlou | 1d002ea342deb69066c287db9935f77f49f0a09e | [
"Apache-2.0"
] | null | null | null | shiyanlou_cs354-127b99c086/minecloud/virtualmachine/constants.py | tongxindao/shiyanlou | 1d002ea342deb69066c287db9935f77f49f0a09e | [
"Apache-2.0"
] | null | null | null | # _*_ coding: utf-8 _*_
# VM status
VM_RUNNING = 5
VM_SHUTDOWN = 1
VM_INIT = 0
VM_STATUS = {
VM_RUNNING: 'RUNNING',
VM_SHUTDOWN: 'SHUTDOWN',
VM_INIT: 'INIT'
}
| 13.307692 | 28 | 0.630058 |
VM_RUNNING = 5
VM_SHUTDOWN = 1
VM_INIT = 0
VM_STATUS = {
VM_RUNNING: 'RUNNING',
VM_SHUTDOWN: 'SHUTDOWN',
VM_INIT: 'INIT'
}
| true | true |
f7fe17fae6c1cfe317f6a98617410bb1fb9db586 | 722 | py | Python | BOJ/02000~02999/2100~2199/2143.py | shinkeonkim/today-ps | f3e5e38c5215f19579bb0422f303a9c18c626afa | [
"Apache-2.0"
] | 2 | 2020-01-29T06:54:41.000Z | 2021-11-07T13:23:27.000Z | BOJ/02000~02999/2100~2199/2143.py | shinkeonkim/Today_PS | bb0cda0ee1b9c57e1cfa38355e29d0f1c6167a44 | [
"Apache-2.0"
] | null | null | null | BOJ/02000~02999/2100~2199/2143.py | shinkeonkim/Today_PS | bb0cda0ee1b9c57e1cfa38355e29d0f1c6167a44 | [
"Apache-2.0"
] | null | null | null | T = int(input())
N = int(input())
L1 = list(map(int,input().split()))
M = int(input())
L2 = list(map(int,input().split()))
d1 = dict()
d2 = dict()
D1 = [0,L1[0]]
D2 = [0,L2[0]]
for i in range(1,N):
D1.append(D1[-1]+L1[i])
for i in range(1,M):
D2.append(D2[-1]+L2[i])
for i in range(N+1):
for j in range(i+1,N+1):
try:
d1[D1[j]-D1[i]] +=1
except:
d1[D1[j]-D1[i]] = 1
for i in range(M+1):
for j in range(i+1,M+1):
try:
d2[D2[j]-D2[i]] +=1
except:
d2[D2[j]-D2[i]] = 1
ret = sorted(list(d1.items()),key = lambda t:t[0])
cnt = 0
for i in ret:
try:
cnt += d2[T-i[0]] * i[1]
except:
continue
print(cnt) | 18.512821 | 50 | 0.469529 | T = int(input())
N = int(input())
L1 = list(map(int,input().split()))
M = int(input())
L2 = list(map(int,input().split()))
d1 = dict()
d2 = dict()
D1 = [0,L1[0]]
D2 = [0,L2[0]]
for i in range(1,N):
D1.append(D1[-1]+L1[i])
for i in range(1,M):
D2.append(D2[-1]+L2[i])
for i in range(N+1):
for j in range(i+1,N+1):
try:
d1[D1[j]-D1[i]] +=1
except:
d1[D1[j]-D1[i]] = 1
for i in range(M+1):
for j in range(i+1,M+1):
try:
d2[D2[j]-D2[i]] +=1
except:
d2[D2[j]-D2[i]] = 1
ret = sorted(list(d1.items()),key = lambda t:t[0])
cnt = 0
for i in ret:
try:
cnt += d2[T-i[0]] * i[1]
except:
continue
print(cnt) | true | true |
f7fe1835534bf2c815a3365cf3833fb15b5aa8c1 | 77,800 | py | Python | lib/endpoints-1.0/endpoints/api_config.py | enpi/Test | 5fb2055c7cfd4cc91ff97471c529b041f21abeb6 | [
"Apache-2.0"
] | null | null | null | lib/endpoints-1.0/endpoints/api_config.py | enpi/Test | 5fb2055c7cfd4cc91ff97471c529b041f21abeb6 | [
"Apache-2.0"
] | null | null | null | lib/endpoints-1.0/endpoints/api_config.py | enpi/Test | 5fb2055c7cfd4cc91ff97471c529b041f21abeb6 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Library for generating an API configuration document for a ProtoRPC backend.
The protorpc.remote.Service is inspected and a JSON document describing
the API is returned.
class MyResponse(messages.Message):
bool_value = messages.BooleanField(1)
int32_value = messages.IntegerField(2)
class MyService(remote.Service):
@remote.method(message_types.VoidMessage, MyResponse)
def entries_get(self, request):
pass
api = ApiConfigGenerator().pretty_print_config_to_json(MyService)
"""
try:
import json
except ImportError:
import simplejson as json
import logging
import re
from endpoints import message_parser
from endpoints import users_id_token
from protorpc import message_types
from protorpc import messages
from protorpc import remote
from protorpc import util
try:
from google.appengine.api import app_identity
except ImportError:
from google.appengine.api import app_identity
__all__ = [
'API_EXPLORER_CLIENT_ID',
'ApiAuth',
'ApiConfigGenerator',
'ApiConfigurationError',
'ApiFrontEndLimitRule',
'ApiFrontEndLimits',
'CacheControl',
'ResourceContainer',
'EMAIL_SCOPE',
'api',
'method',
'AUTH_LEVEL'
]
API_EXPLORER_CLIENT_ID = '292824132082.apps.googleusercontent.com'
EMAIL_SCOPE = 'https://www.googleapis.com/auth/userinfo.email'
_PATH_VARIABLE_PATTERN = r'{([a-zA-Z_][a-zA-Z_.\d]*)}'
_MULTICLASS_MISMATCH_ERROR_TEMPLATE = (
'Attempting to implement service %s, version %s, with multiple '
'classes that aren\'t compatible. See docstring for api() for '
'examples how to implement a multi-class API.')
def _Enum(docstring, *names):
"""Utility to generate enum classes used by annotations.
Args:
docstring: Docstring for the generated enum class.
*names: Enum names.
Returns:
A class that contains enum names as attributes.
"""
enums = dict(zip(names, range(len(names))))
reverse = dict((value, key) for key, value in enums.iteritems())
enums['reverse_mapping'] = reverse
enums['__doc__'] = docstring
return type('Enum', (object,), enums)
_AUTH_LEVEL_DOCSTRING = """
Define the enums used by the auth_level annotation to specify frontend
authentication requirement.
Frontend authentication is handled by a Google API server prior to the
request reaching backends. An early return before hitting the backend can
happen if the request does not fulfil the requirement specified by the
auth_level.
Valid values of auth_level and their meanings are:
AUTH_LEVEL.REQUIRED: Valid authentication credentials are required. Backend
will be called only if authentication credentials are present and valid.
AUTH_LEVEL.OPTIONAL: Authentication is optional. If authentication credentials
are supplied they must be valid. Backend will be called if the request
contains valid authentication credentials or no authentication credentials.
AUTH_LEVEL.OPTIONAL_CONTINUE: Authentication is optional and will be attempted
if authentication credentials are supplied. Invalid authentication
credentials will be removed but the request can always reach backend.
AUTH_LEVEL.NONE: Frontend authentication will be skipped. If authentication is
desired, it will need to be performed by the backend.
"""
AUTH_LEVEL = _Enum(_AUTH_LEVEL_DOCSTRING, 'REQUIRED', 'OPTIONAL',
'OPTIONAL_CONTINUE', 'NONE')
class ApiConfigurationError(Exception):
"""Exception thrown if there's an error in the configuration/annotations."""
def _GetFieldAttributes(field):
"""Decomposes field into the needed arguments to pass to the constructor.
This can be used to create copies of the field or to compare if two fields
are "equal" (since __eq__ is not implemented on messages.Field).
Args:
field: A ProtoRPC message field (potentially to be copied).
Raises:
TypeError: If the field is not an instance of messages.Field.
Returns:
A pair of relevant arguments to be passed to the constructor for the field
type. The first element is a list of positional arguments for the
constructor and the second is a dictionary of keyword arguments.
"""
if not isinstance(field, messages.Field):
raise TypeError('Field %r to be copied not a ProtoRPC field.' % (field,))
positional_args = []
kwargs = {
'required': field.required,
'repeated': field.repeated,
'variant': field.variant,
'default': field._Field__default,
}
if isinstance(field, messages.MessageField):
kwargs.pop('default')
if not isinstance(field, message_types.DateTimeField):
positional_args.insert(0, field.message_type)
elif isinstance(field, messages.EnumField):
positional_args.insert(0, field.type)
return positional_args, kwargs
def _CopyField(field, number=None):
"""Copies a (potentially) owned ProtoRPC field instance into a new copy.
Args:
field: A ProtoRPC message field to be copied.
number: An integer for the field to override the number of the field.
Defaults to None.
Raises:
TypeError: If the field is not an instance of messages.Field.
Returns:
A copy of the ProtoRPC message field.
"""
positional_args, kwargs = _GetFieldAttributes(field)
number = number or field.number
positional_args.append(number)
return field.__class__(*positional_args, **kwargs)
def _CompareFields(field, other_field):
"""Checks if two ProtoRPC fields are "equal".
Compares the arguments, rather than the id of the elements (which is
the default __eq__ behavior) as well as the class of the fields.
Args:
field: A ProtoRPC message field to be compared.
other_field: A ProtoRPC message field to be compared.
Returns:
Boolean indicating whether the fields are equal.
"""
field_attrs = _GetFieldAttributes(field)
other_field_attrs = _GetFieldAttributes(other_field)
if field_attrs != other_field_attrs:
return False
return field.__class__ == other_field.__class__
class ResourceContainer(object):
"""Container for a request body resource combined with parameters.
Used for API methods which may also have path or query parameters in addition
to a request body.
Attributes:
body_message_class: A message class to represent a request body.
parameters_message_class: A placeholder message class for request
parameters.
"""
__remote_info_cache = {}
__combined_message_class = None
def __init__(self, _body_message_class=message_types.VoidMessage, **kwargs):
"""Constructor for ResourceContainer.
Stores a request body message class and attempts to create one from the
keyword arguments passed in.
Args:
_body_message_class: A keyword argument to be treated like a positional
argument. This will not conflict with the potential names of fields
since they can't begin with underscore. We make this a keyword
argument since the default VoidMessage is a very common choice given
the prevalence of GET methods.
**kwargs: Keyword arguments specifying field names (the named arguments)
and instances of ProtoRPC fields as the values.
"""
self.body_message_class = _body_message_class
self.parameters_message_class = type('ParameterContainer',
(messages.Message,), kwargs)
@property
def combined_message_class(self):
"""A ProtoRPC message class with both request and parameters fields.
Caches the result in a local private variable. Uses _CopyField to create
copies of the fields from the existing request and parameters classes since
those fields are "owned" by the message classes.
Raises:
TypeError: If a field name is used in both the request message and the
parameters but the two fields do not represent the same type.
Returns:
Value of combined message class for this property.
"""
if self.__combined_message_class is not None:
return self.__combined_message_class
fields = {}
field_number = 1
for field in self.body_message_class.all_fields():
fields[field.name] = _CopyField(field, number=field_number)
field_number += 1
for field in self.parameters_message_class.all_fields():
if field.name in fields:
if not _CompareFields(field, fields[field.name]):
raise TypeError('Field %r contained in both parameters and request '
'body, but the fields differ.' % (field.name,))
else:
continue
fields[field.name] = _CopyField(field, number=field_number)
field_number += 1
self.__combined_message_class = type('CombinedContainer',
(messages.Message,), fields)
return self.__combined_message_class
@classmethod
def add_to_cache(cls, remote_info, container):
"""Adds a ResourceContainer to a cache tying it to a protorpc method.
Args:
remote_info: Instance of protorpc.remote._RemoteMethodInfo corresponding
to a method.
container: An instance of ResourceContainer.
Raises:
TypeError: if the container is not an instance of cls.
KeyError: if the remote method has been reference by a container before.
This created remote method should never occur because a remote method
is created once.
"""
if not isinstance(container, cls):
raise TypeError('%r not an instance of %r, could not be added to cache.' %
(container, cls))
if remote_info in cls.__remote_info_cache:
raise KeyError('Cache has collision but should not.')
cls.__remote_info_cache[remote_info] = container
@classmethod
def get_request_message(cls, remote_info):
"""Gets request message or container from remote info.
Args:
remote_info: Instance of protorpc.remote._RemoteMethodInfo corresponding
to a method.
Returns:
Either an instance of the request type from the remote or the
ResourceContainer that was cached with the remote method.
"""
if remote_info in cls.__remote_info_cache:
return cls.__remote_info_cache[remote_info]
else:
return remote_info.request_type()
def _CheckListType(settings, allowed_type, name, allow_none=True):
"""Verify that settings in list are of the allowed type or raise TypeError.
Args:
settings: The list of settings to check.
allowed_type: The allowed type of items in 'settings'.
name: Name of the setting, added to the exception.
allow_none: If set, None is also allowed.
Raises:
TypeError: if setting is not of the allowed type.
Returns:
The list of settings, for convenient use in assignment.
"""
if settings is None:
if not allow_none:
raise TypeError('%s is None, which is not allowed.' % name)
return settings
if not isinstance(settings, (tuple, list)):
raise TypeError('%s is not a list.' % name)
if not all(isinstance(i, allowed_type) for i in settings):
type_list = list(set(type(setting) for setting in settings))
raise TypeError('%s contains types that don\'t match %s: %s' %
(name, allowed_type.__name__, type_list))
return settings
def _CheckType(value, check_type, name, allow_none=True):
"""Check that the type of an object is acceptable.
Args:
value: The object whose type is to be checked.
check_type: The type that the object must be an instance of.
name: Name of the object, to be placed in any error messages.
allow_none: True if value can be None, false if not.
Raises:
TypeError: If value is not an acceptable type.
"""
if value is None and allow_none:
return
if not isinstance(value, check_type):
raise TypeError('%s type doesn\'t match %s.' % (name, check_type))
def _CheckEnum(value, check_type, name):
if value is None:
return
if value not in check_type.reverse_mapping:
raise TypeError('%s is not a valid value for %s' % (value, name))
class _ApiInfo(object):
"""Configurable attributes of an API.
A structured data object used to store API information associated with each
remote.Service-derived class that implements an API. This stores properties
that could be different for each class (such as the path or
collection/resource name), as well as properties common to all classes in
the API (such as API name and version).
"""
@util.positional(2)
def __init__(self, common_info, resource_name=None, path=None, audiences=None,
scopes=None, allowed_client_ids=None, auth_level=None):
"""Constructor for _ApiInfo.
Args:
common_info: _ApiDecorator.__ApiCommonInfo, Information that's common for
all classes that implement an API.
resource_name: string, The collection that the annotated class will
implement in the API. (Default: None)
path: string, Base request path for all methods in this API.
(Default: None)
audiences: list of strings, Acceptable audiences for authentication.
(Default: None)
scopes: list of strings, Acceptable scopes for authentication.
(Default: None)
allowed_client_ids: list of strings, Acceptable client IDs for auth.
(Default: None)
auth_level: enum from AUTH_LEVEL, Frontend authentication level.
(Default: None)
"""
_CheckType(resource_name, basestring, 'resource_name')
_CheckType(path, basestring, 'path')
_CheckListType(audiences, basestring, 'audiences')
_CheckListType(scopes, basestring, 'scopes')
_CheckListType(allowed_client_ids, basestring, 'allowed_client_ids')
_CheckEnum(auth_level, AUTH_LEVEL, 'auth_level')
self.__common_info = common_info
self.__resource_name = resource_name
self.__path = path
self.__audiences = audiences
self.__scopes = scopes
self.__allowed_client_ids = allowed_client_ids
self.__auth_level = auth_level
def is_same_api(self, other):
"""Check if this implements the same API as another _ApiInfo instance."""
if not isinstance(other, _ApiInfo):
return False
return self.__common_info is other.__common_info
@property
def name(self):
"""Name of the API."""
return self.__common_info.name
@property
def version(self):
"""Version of the API."""
return self.__common_info.version
@property
def description(self):
"""Description of the API."""
return self.__common_info.description
@property
def hostname(self):
"""Hostname for the API."""
return self.__common_info.hostname
@property
def audiences(self):
"""List of audiences accepted for the API, overriding the defaults."""
if self.__audiences is not None:
return self.__audiences
return self.__common_info.audiences
@property
def scopes(self):
"""List of scopes accepted for the API, overriding the defaults."""
if self.__scopes is not None:
return self.__scopes
return self.__common_info.scopes
@property
def allowed_client_ids(self):
"""List of client IDs accepted for the API, overriding the defaults."""
if self.__allowed_client_ids is not None:
return self.__allowed_client_ids
return self.__common_info.allowed_client_ids
@property
def auth_level(self):
"""Enum from AUTH_LEVEL specifying the frontend authentication level."""
if self.__auth_level is not None:
return self.__auth_level
return self.__common_info.auth_level
@property
def canonical_name(self):
"""Canonical name for the API."""
return self.__common_info.canonical_name
@property
def auth(self):
"""Authentication configuration information for this API."""
return self.__common_info.auth
@property
def owner_domain(self):
"""Domain of the owner of this API."""
return self.__common_info.owner_domain
@property
def owner_name(self):
"""Name of the owner of this API."""
return self.__common_info.owner_name
@property
def package_path(self):
"""Package this API belongs to, '/' delimited. Used by client libs."""
return self.__common_info.package_path
@property
def frontend_limits(self):
"""Optional query limits for unregistered developers."""
return self.__common_info.frontend_limits
@property
def title(self):
"""Human readable name of this API."""
return self.__common_info.title
@property
def documentation(self):
"""Link to the documentation for this version of the API."""
return self.__common_info.documentation
@property
def resource_name(self):
"""Resource name for the class this decorates."""
return self.__resource_name
@property
def path(self):
"""Base path prepended to any method paths in the class this decorates."""
return self.__path
class _ApiDecorator(object):
"""Decorator for single- or multi-class APIs.
An instance of this class can be used directly as a decorator for a
single-class API. Or call the api_class() method to decorate a multi-class
API.
"""
@util.positional(3)
def __init__(self, name, version, description=None, hostname=None,
audiences=None, scopes=None, allowed_client_ids=None,
canonical_name=None, auth=None, owner_domain=None,
owner_name=None, package_path=None, frontend_limits=None,
title=None, documentation=None, auth_level=None):
"""Constructor for _ApiDecorator.
Args:
name: string, Name of the API.
version: string, Version of the API.
description: string, Short description of the API (Default: None)
hostname: string, Hostname of the API (Default: app engine default host)
audiences: list of strings, Acceptable audiences for authentication.
scopes: list of strings, Acceptable scopes for authentication.
allowed_client_ids: list of strings, Acceptable client IDs for auth.
canonical_name: string, the canonical name for the API, a more human
readable version of the name.
auth: ApiAuth instance, the authentication configuration information
for this API.
owner_domain: string, the domain of the person or company that owns
this API. Along with owner_name, this provides hints to properly
name client libraries for this API.
owner_name: string, the name of the owner of this API. Along with
owner_domain, this provides hints to properly name client libraries
for this API.
package_path: string, the "package" this API belongs to. This '/'
delimited value specifies logical groupings of APIs. This is used by
client libraries of this API.
frontend_limits: ApiFrontEndLimits, optional query limits for unregistered
developers.
title: string, the human readable title of your API. It is exposed in the
discovery service.
documentation: string, a URL where users can find documentation about this
version of the API. This will be surfaced in the API Explorer and GPE
plugin to allow users to learn about your service.
auth_level: enum from AUTH_LEVEL, Frontend authentication level.
"""
self.__common_info = self.__ApiCommonInfo(
name, version, description=description, hostname=hostname,
audiences=audiences, scopes=scopes,
allowed_client_ids=allowed_client_ids,
canonical_name=canonical_name, auth=auth, owner_domain=owner_domain,
owner_name=owner_name, package_path=package_path,
frontend_limits=frontend_limits, title=title,
documentation=documentation, auth_level=auth_level)
self.__classes = []
class __ApiCommonInfo(object):
"""API information that's common among all classes that implement an API.
When a remote.Service-derived class implements part of an API, there is
some common information that remains constant across all such classes
that implement the same API. This includes things like name, version,
hostname, and so on. __ApiComminInfo stores that common information, and
a single __ApiCommonInfo instance is shared among all classes that
implement the same API, guaranteeing that they share the same common
information.
Some of these values can be overridden (such as audiences and scopes),
while some can't and remain the same for all classes that implement
the API (such as name and version).
"""
@util.positional(3)
def __init__(self, name, version, description=None, hostname=None,
audiences=None, scopes=None, allowed_client_ids=None,
canonical_name=None, auth=None, owner_domain=None,
owner_name=None, package_path=None, frontend_limits=None,
title=None, documentation=None, auth_level=None):
"""Constructor for _ApiCommonInfo.
Args:
name: string, Name of the API.
version: string, Version of the API.
description: string, Short description of the API (Default: None)
hostname: string, Hostname of the API (Default: app engine default host)
audiences: list of strings, Acceptable audiences for authentication.
scopes: list of strings, Acceptable scopes for authentication.
allowed_client_ids: list of strings, Acceptable client IDs for auth.
canonical_name: string, the canonical name for the API, a more human
readable version of the name.
auth: ApiAuth instance, the authentication configuration information
for this API.
owner_domain: string, the domain of the person or company that owns
this API. Along with owner_name, this provides hints to properly
name client libraries for this API.
owner_name: string, the name of the owner of this API. Along with
owner_domain, this provides hints to properly name client libraries
for this API.
package_path: string, the "package" this API belongs to. This '/'
delimited value specifies logical groupings of APIs. This is used by
client libraries of this API.
frontend_limits: ApiFrontEndLimits, optional query limits for
unregistered developers.
title: string, the human readable title of your API. It is exposed in
the discovery service.
documentation: string, a URL where users can find documentation about
this version of the API. This will be surfaced in the API Explorer and
GPE plugin to allow users to learn about your service.
auth_level: enum from AUTH_LEVEL, Frontend authentication level.
"""
_CheckType(name, basestring, 'name', allow_none=False)
_CheckType(version, basestring, 'version', allow_none=False)
_CheckType(description, basestring, 'description')
_CheckType(hostname, basestring, 'hostname')
_CheckListType(audiences, basestring, 'audiences')
_CheckListType(scopes, basestring, 'scopes')
_CheckListType(allowed_client_ids, basestring, 'allowed_client_ids')
_CheckType(canonical_name, basestring, 'canonical_name')
_CheckType(auth, ApiAuth, 'auth')
_CheckType(owner_domain, basestring, 'owner_domain')
_CheckType(owner_name, basestring, 'owner_name')
_CheckType(package_path, basestring, 'package_path')
_CheckType(frontend_limits, ApiFrontEndLimits, 'frontend_limits')
_CheckType(title, basestring, 'title')
_CheckType(documentation, basestring, 'documentation')
_CheckEnum(auth_level, AUTH_LEVEL, 'auth_level')
if hostname is None:
hostname = app_identity.get_default_version_hostname()
if audiences is None:
audiences = []
if scopes is None:
scopes = [EMAIL_SCOPE]
if allowed_client_ids is None:
allowed_client_ids = [API_EXPLORER_CLIENT_ID]
if auth_level is None:
auth_level = AUTH_LEVEL.NONE
self.__name = name
self.__version = version
self.__description = description
self.__hostname = hostname
self.__audiences = audiences
self.__scopes = scopes
self.__allowed_client_ids = allowed_client_ids
self.__canonical_name = canonical_name
self.__auth = auth
self.__owner_domain = owner_domain
self.__owner_name = owner_name
self.__package_path = package_path
self.__frontend_limits = frontend_limits
self.__title = title
self.__documentation = documentation
self.__auth_level = auth_level
@property
def name(self):
"""Name of the API."""
return self.__name
@property
def version(self):
"""Version of the API."""
return self.__version
@property
def description(self):
"""Description of the API."""
return self.__description
@property
def hostname(self):
"""Hostname for the API."""
return self.__hostname
@property
def audiences(self):
"""List of audiences accepted by default for the API."""
return self.__audiences
@property
def scopes(self):
"""List of scopes accepted by default for the API."""
return self.__scopes
@property
def allowed_client_ids(self):
"""List of client IDs accepted by default for the API."""
return self.__allowed_client_ids
@property
def auth_level(self):
"""Enum from AUTH_LEVEL specifying default frontend auth level."""
return self.__auth_level
@property
def canonical_name(self):
"""Canonical name for the API."""
return self.__canonical_name
@property
def auth(self):
"""Authentication configuration for this API."""
return self.__auth
@property
def owner_domain(self):
"""Domain of the owner of this API."""
return self.__owner_domain
@property
def owner_name(self):
"""Name of the owner of this API."""
return self.__owner_name
@property
def package_path(self):
"""Package this API belongs to, '/' delimited. Used by client libs."""
return self.__package_path
@property
def frontend_limits(self):
"""Optional query limits for unregistered developers."""
return self.__frontend_limits
@property
def title(self):
"""Human readable name of this API."""
return self.__title
@property
def documentation(self):
"""Link to the documentation for this version of the API."""
return self.__documentation
def __call__(self, service_class):
"""Decorator for ProtoRPC class that configures Google's API server.
Args:
service_class: remote.Service class, ProtoRPC service class being wrapped.
Returns:
Same class with API attributes assigned in api_info.
"""
return self.api_class()(service_class)
def api_class(self, resource_name=None, path=None, audiences=None,
scopes=None, allowed_client_ids=None, auth_level=None):
"""Get a decorator for a class that implements an API.
This can be used for single-class or multi-class implementations. It's
used implicitly in simple single-class APIs that only use @api directly.
Args:
resource_name: string, Resource name for the class this decorates.
(Default: None)
path: string, Base path prepended to any method paths in the class this
decorates. (Default: None)
audiences: list of strings, Acceptable audiences for authentication.
(Default: None)
scopes: list of strings, Acceptable scopes for authentication.
(Default: None)
allowed_client_ids: list of strings, Acceptable client IDs for auth.
(Default: None)
auth_level: enum from AUTH_LEVEL, Frontend authentication level.
(Default: None)
Returns:
A decorator function to decorate a class that implements an API.
"""
def apiserving_api_decorator(api_class):
"""Decorator for ProtoRPC class that configures Google's API server.
Args:
api_class: remote.Service class, ProtoRPC service class being wrapped.
Returns:
Same class with API attributes assigned in api_info.
"""
self.__classes.append(api_class)
api_class.api_info = _ApiInfo(
self.__common_info, resource_name=resource_name,
path=path, audiences=audiences, scopes=scopes,
allowed_client_ids=allowed_client_ids, auth_level=auth_level)
return api_class
return apiserving_api_decorator
def get_api_classes(self):
"""Get the list of remote.Service classes that implement this API."""
return self.__classes
class ApiAuth(object):
"""Optional authorization configuration information for an API."""
def __init__(self, allow_cookie_auth=None, blocked_regions=None):
"""Constructor for ApiAuth, authentication information for an API.
Args:
allow_cookie_auth: boolean, whether cooking auth is allowed. By
default, API methods do not allow cookie authentication, and
require the use of OAuth2 or ID tokens. Setting this field to
True will allow cookies to be used to access the API, with
potentially dangerous results. Please be very cautious in enabling
this setting, and make sure to require appropriate XSRF tokens to
protect your API.
blocked_regions: list of Strings, a list of 2-letter ISO region codes
to block.
"""
_CheckType(allow_cookie_auth, bool, 'allow_cookie_auth')
_CheckListType(blocked_regions, basestring, 'blocked_regions')
self.__allow_cookie_auth = allow_cookie_auth
self.__blocked_regions = blocked_regions
@property
def allow_cookie_auth(self):
"""Whether cookie authentication is allowed for this API."""
return self.__allow_cookie_auth
@property
def blocked_regions(self):
"""List of 2-letter ISO region codes to block."""
return self.__blocked_regions
class ApiFrontEndLimitRule(object):
"""Custom rule to limit unregistered traffic."""
def __init__(self, match=None, qps=None, user_qps=None, daily=None,
analytics_id=None):
"""Constructor for ApiFrontEndLimitRule.
Args:
match: string, the matching rule that defines this traffic segment.
qps: int, the aggregate QPS for this segment.
user_qps: int, the per-end-user QPS for this segment.
daily: int, the aggregate daily maximum for this segment.
analytics_id: string, the project ID under which traffic for this segment
will be logged.
"""
_CheckType(match, basestring, 'match')
_CheckType(qps, int, 'qps')
_CheckType(user_qps, int, 'user_qps')
_CheckType(daily, int, 'daily')
_CheckType(analytics_id, basestring, 'analytics_id')
self.__match = match
self.__qps = qps
self.__user_qps = user_qps
self.__daily = daily
self.__analytics_id = analytics_id
@property
def match(self):
"""The matching rule that defines this traffic segment."""
return self.__match
@property
def qps(self):
"""The aggregate QPS for this segment."""
return self.__qps
@property
def user_qps(self):
"""The per-end-user QPS for this segment."""
return self.__user_qps
@property
def daily(self):
"""The aggregate daily maximum for this segment."""
return self.__daily
@property
def analytics_id(self):
"""Project ID under which traffic for this segment will be logged."""
return self.__analytics_id
class ApiFrontEndLimits(object):
"""Optional front end limit information for an API."""
def __init__(self, unregistered_user_qps=None, unregistered_qps=None,
unregistered_daily=None, rules=None):
"""Constructor for ApiFrontEndLimits, front end limit info for an API.
Args:
unregistered_user_qps: int, the per-end-user QPS. Users are identified
by their IP address. A value of 0 will block unregistered requests.
unregistered_qps: int, an aggregate QPS upper-bound for all unregistered
traffic. A value of 0 currently means unlimited, though it might change
in the future. To block unregistered requests, use unregistered_user_qps
or unregistered_daily instead.
unregistered_daily: int, an aggregate daily upper-bound for all
unregistered traffic. A value of 0 will block unregistered requests.
rules: A list or tuple of ApiFrontEndLimitRule instances: custom rules
used to apply limits to unregistered traffic.
"""
_CheckType(unregistered_user_qps, int, 'unregistered_user_qps')
_CheckType(unregistered_qps, int, 'unregistered_qps')
_CheckType(unregistered_daily, int, 'unregistered_daily')
_CheckListType(rules, ApiFrontEndLimitRule, 'rules')
self.__unregistered_user_qps = unregistered_user_qps
self.__unregistered_qps = unregistered_qps
self.__unregistered_daily = unregistered_daily
self.__rules = rules
@property
def unregistered_user_qps(self):
"""Per-end-user QPS limit."""
return self.__unregistered_user_qps
@property
def unregistered_qps(self):
"""Aggregate QPS upper-bound for all unregistered traffic."""
return self.__unregistered_qps
@property
def unregistered_daily(self):
"""Aggregate daily upper-bound for all unregistered traffic."""
return self.__unregistered_daily
@property
def rules(self):
"""Custom rules used to apply limits to unregistered traffic."""
return self.__rules
@util.positional(2)
def api(name, version, description=None, hostname=None, audiences=None,
scopes=None, allowed_client_ids=None, canonical_name=None,
auth=None, owner_domain=None, owner_name=None, package_path=None,
frontend_limits=None, title=None, documentation=None, auth_level=None):
"""Decorate a ProtoRPC Service class for use by the framework above.
This decorator can be used to specify an API name, version, description, and
hostname for your API.
Sample usage (python 2.7):
@endpoints.api(name='guestbook', version='v0.2',
description='Guestbook API')
class PostService(remote.Service):
...
Sample usage (python 2.5):
class PostService(remote.Service):
...
endpoints.api(name='guestbook', version='v0.2',
description='Guestbook API')(PostService)
Sample usage if multiple classes implement one API:
api_root = endpoints.api(name='library', version='v1.0')
@api_root.api_class(resource_name='shelves')
class Shelves(remote.Service):
...
@api_root.api_class(resource_name='books', path='books')
class Books(remote.Service):
...
Args:
name: string, Name of the API.
version: string, Version of the API.
description: string, Short description of the API (Default: None)
hostname: string, Hostname of the API (Default: app engine default host)
audiences: list of strings, Acceptable audiences for authentication.
scopes: list of strings, Acceptable scopes for authentication.
allowed_client_ids: list of strings, Acceptable client IDs for auth.
canonical_name: string, the canonical name for the API, a more human
readable version of the name.
auth: ApiAuth instance, the authentication configuration information
for this API.
owner_domain: string, the domain of the person or company that owns
this API. Along with owner_name, this provides hints to properly
name client libraries for this API.
owner_name: string, the name of the owner of this API. Along with
owner_domain, this provides hints to properly name client libraries
for this API.
package_path: string, the "package" this API belongs to. This '/'
delimited value specifies logical groupings of APIs. This is used by
client libraries of this API.
frontend_limits: ApiFrontEndLimits, optional query limits for unregistered
developers.
title: string, the human readable title of your API. It is exposed in the
discovery service.
documentation: string, a URL where users can find documentation about this
version of the API. This will be surfaced in the API Explorer and GPE
plugin to allow users to learn about your service.
auth_level: enum from AUTH_LEVEL, frontend authentication level.
Returns:
Class decorated with api_info attribute, an instance of ApiInfo.
"""
return _ApiDecorator(name, version, description=description,
hostname=hostname, audiences=audiences, scopes=scopes,
allowed_client_ids=allowed_client_ids,
canonical_name=canonical_name, auth=auth,
owner_domain=owner_domain, owner_name=owner_name,
package_path=package_path,
frontend_limits=frontend_limits, title=title,
documentation=documentation, auth_level=auth_level)
class CacheControl(object):
"""Cache control settings for an API method.
Setting is composed of a directive and maximum cache age.
Available types:
PUBLIC - Allows clients and proxies to cache responses.
PRIVATE - Allows only clients to cache responses.
NO_CACHE - Allows none to cache responses.
"""
PUBLIC = 'public'
PRIVATE = 'private'
NO_CACHE = 'no-cache'
VALID_VALUES = (PUBLIC, PRIVATE, NO_CACHE)
def __init__(self, directive=NO_CACHE, max_age_seconds=0):
"""Constructor.
Args:
directive: string, Cache control directive, as above. (Default: NO_CACHE)
max_age_seconds: int, Maximum age of cache responses. (Default: 0)
"""
if directive not in self.VALID_VALUES:
directive = self.NO_CACHE
self.__directive = directive
self.__max_age_seconds = max_age_seconds
@property
def directive(self):
"""The cache setting for this method, PUBLIC, PRIVATE, or NO_CACHE."""
return self.__directive
@property
def max_age_seconds(self):
"""The maximum age of cache responses for this method, in seconds."""
return self.__max_age_seconds
class _MethodInfo(object):
"""Configurable attributes of an API method.
Consolidates settings from @method decorator and/or any settings that were
calculating from the ProtoRPC method name, so they only need to be calculated
once.
"""
@util.positional(1)
def __init__(self, name=None, path=None, http_method=None,
cache_control=None, scopes=None, audiences=None,
allowed_client_ids=None, auth_level=None):
"""Constructor.
Args:
name: string, Name of the method, prepended with <apiname>. to make it
unique.
path: string, Path portion of the URL to the method, for RESTful methods.
http_method: string, HTTP method supported by the method.
cache_control: CacheControl, Cache settings for the API method.
scopes: list of string, OAuth2 token must contain one of these scopes.
audiences: list of string, IdToken must contain one of these audiences.
allowed_client_ids: list of string, Client IDs allowed to call the method.
auth_level: enum from AUTH_LEVEL, Frontend auth level for the method.
"""
self.__name = name
self.__path = path
self.__http_method = http_method
self.__cache_control = cache_control
self.__scopes = scopes
self.__audiences = audiences
self.__allowed_client_ids = allowed_client_ids
self.__auth_level = auth_level
def __safe_name(self, method_name):
"""Restrict method name to a-zA-Z0-9_, first char lowercase."""
safe_name = re.sub('[^\.a-zA-Z0-9_]', '', method_name)
safe_name = safe_name.lstrip('_')
return safe_name[0:1].lower() + safe_name[1:]
@property
def name(self):
"""Method name as specified in decorator or derived."""
return self.__name
def get_path(self, api_info):
"""Get the path portion of the URL to the method (for RESTful methods).
Request path can be specified in the method, and it could have a base
path prepended to it.
Args:
api_info: API information for this API, possibly including a base path.
This is the api_info property on the class that's been annotated for
this API.
Returns:
This method's request path (not including the http://.../_ah/api/ prefix).
Raises:
ApiConfigurationError: If the path isn't properly formatted.
"""
path = self.__path or ''
if path and path[0] == '/':
path = path[1:]
else:
if api_info.path:
path = '%s%s%s' % (api_info.path, '/' if path else '', path)
for part in path.split('/'):
if part and '{' in part and '}' in part:
if re.match('^{[^{}]+}$', part) is None:
raise ApiConfigurationError('Invalid path segment: %s (part of %s)' %
(part, path))
return path
@property
def http_method(self):
"""HTTP method supported by the method (e.g. GET, POST)."""
return self.__http_method
@property
def cache_control(self):
"""Cache control setting for the API method."""
return self.__cache_control
@property
def scopes(self):
"""List of scopes for the API method."""
return self.__scopes
@property
def audiences(self):
"""List of audiences for the API method."""
return self.__audiences
@property
def allowed_client_ids(self):
"""List of allowed client IDs for the API method."""
return self.__allowed_client_ids
@property
def auth_level(self):
"""Enum from AUTH_LEVEL specifying default frontend auth level."""
return self.__auth_level
def method_id(self, api_info):
"""Computed method name."""
if api_info.resource_name:
resource_part = '.%s' % self.__safe_name(api_info.resource_name)
else:
resource_part = ''
return '%s%s.%s' % (self.__safe_name(api_info.name), resource_part,
self.__safe_name(self.name))
@util.positional(2)
def method(request_message=message_types.VoidMessage,
response_message=message_types.VoidMessage,
name=None,
path=None,
http_method='POST',
cache_control=None,
scopes=None,
audiences=None,
allowed_client_ids=None,
auth_level=None):
"""Decorate a ProtoRPC Method for use by the framework above.
This decorator can be used to specify a method name, path, http method,
cache control, scopes, audiences, client ids and auth_level.
Sample usage:
@api_config.method(RequestMessage, ResponseMessage,
name='insert', http_method='PUT')
def greeting_insert(request):
...
return response
Args:
request_message: Message type of expected request.
response_message: Message type of expected response.
name: string, Name of the method, prepended with <apiname>. to make it
unique. (Default: python method name)
path: string, Path portion of the URL to the method, for RESTful methods.
http_method: string, HTTP method supported by the method. (Default: POST)
cache_control: CacheControl, Cache settings for the API method.
scopes: list of string, OAuth2 token must contain one of these scopes.
audiences: list of string, IdToken must contain one of these audiences.
allowed_client_ids: list of string, Client IDs allowed to call the method.
Currently limited to 5. If None, no calls will be allowed.
auth_level: enum from AUTH_LEVEL, Frontend auth level for the method.
Returns:
'apiserving_method_wrapper' function.
Raises:
ValueError: if more than 5 allowed_client_ids are specified.
TypeError: if the request_type or response_type parameters are not
proper subclasses of messages.Message.
"""
DEFAULT_HTTP_METHOD = 'POST'
def check_type(setting, allowed_type, name, allow_none=True):
"""Verify that the setting is of the allowed type or raise TypeError.
Args:
setting: The setting to check.
allowed_type: The allowed type.
name: Name of the setting, added to the exception.
allow_none: If set, None is also allowed.
Raises:
TypeError: if setting is not of the allowed type.
Returns:
The setting, for convenient use in assignment.
"""
if (setting is None and allow_none or
isinstance(setting, allowed_type)):
return setting
raise TypeError('%s is not of type %s' % (name, allowed_type.__name__))
def apiserving_method_decorator(api_method):
"""Decorator for ProtoRPC method that configures Google's API server.
Args:
api_method: Original method being wrapped.
Returns:
Function responsible for actual invocation.
Assigns the following attributes to invocation function:
remote: Instance of RemoteInfo, contains remote method information.
remote.request_type: Expected request type for remote method.
remote.response_type: Response type returned from remote method.
method_info: Instance of _MethodInfo, api method configuration.
It is also assigned attributes corresponding to the aforementioned kwargs.
Raises:
TypeError: if the request_type or response_type parameters are not
proper subclasses of messages.Message.
KeyError: if the request_message is a ResourceContainer and the newly
created remote method has been reference by the container before. This
should never occur because a remote method is created once.
"""
if isinstance(request_message, ResourceContainer):
remote_decorator = remote.method(request_message.combined_message_class,
response_message)
else:
remote_decorator = remote.method(request_message, response_message)
remote_method = remote_decorator(api_method)
def invoke_remote(service_instance, request):
users_id_token._maybe_set_current_user_vars(
invoke_remote, api_info=getattr(service_instance, 'api_info', None),
request=request)
return remote_method(service_instance, request)
invoke_remote.remote = remote_method.remote
if isinstance(request_message, ResourceContainer):
ResourceContainer.add_to_cache(invoke_remote.remote, request_message)
invoke_remote.method_info = _MethodInfo(
name=name or api_method.__name__, path=path or api_method.__name__,
http_method=http_method or DEFAULT_HTTP_METHOD,
cache_control=cache_control, scopes=scopes, audiences=audiences,
allowed_client_ids=allowed_client_ids, auth_level=auth_level)
invoke_remote.__name__ = invoke_remote.method_info.name
return invoke_remote
check_type(cache_control, CacheControl, 'cache_control')
_CheckListType(scopes, basestring, 'scopes')
_CheckListType(audiences, basestring, 'audiences')
_CheckListType(allowed_client_ids, basestring, 'allowed_client_ids')
_CheckEnum(auth_level, AUTH_LEVEL, 'auth_level')
return apiserving_method_decorator
class ApiConfigGenerator(object):
"""Generates an API configuration from a ProtoRPC service.
Example:
class HelloRequest(messages.Message):
my_name = messages.StringField(1, required=True)
class HelloResponse(messages.Message):
hello = messages.StringField(1, required=True)
class HelloService(remote.Service):
@remote.method(HelloRequest, HelloResponse)
def hello(self, request):
return HelloResponse(hello='Hello there, %s!' %
request.my_name)
api_config = ApiConfigGenerator().pretty_print_config_to_json(HelloService)
The resulting api_config will be a JSON document describing the API
implemented by HelloService.
"""
__NO_BODY = 1
__HAS_BODY = 2
def __init__(self):
self.__parser = message_parser.MessageTypeToJsonSchema()
self.__request_schema = {}
self.__response_schema = {}
self.__id_from_name = {}
def __get_request_kind(self, method_info):
"""Categorize the type of the request.
Args:
method_info: _MethodInfo, method information.
Returns:
The kind of request.
"""
if method_info.http_method in ('GET', 'DELETE'):
return self.__NO_BODY
else:
return self.__HAS_BODY
def __field_to_subfields(self, field):
"""Fully describes data represented by field, including the nested case.
In the case that the field is not a message field, we have no fields nested
within a message definition, so we can simply return that field. However, in
the nested case, we can't simply describe the data with one field or even
with one chain of fields.
For example, if we have a message field
m_field = messages.MessageField(RefClass, 1)
which references a class with two fields:
class RefClass(messages.Message):
one = messages.StringField(1)
two = messages.IntegerField(2)
then we would need to include both one and two to represent all the
data contained.
Calling __field_to_subfields(m_field) would return:
[
[<MessageField "m_field">, <StringField "one">],
[<MessageField "m_field">, <StringField "two">],
]
If the second field was instead a message field
class RefClass(messages.Message):
one = messages.StringField(1)
two = messages.MessageField(OtherRefClass, 2)
referencing another class with two fields
class OtherRefClass(messages.Message):
three = messages.BooleanField(1)
four = messages.FloatField(2)
then we would need to recurse one level deeper for two.
With this change, calling __field_to_subfields(m_field) would return:
[
[<MessageField "m_field">, <StringField "one">],
[<MessageField "m_field">, <StringField "two">, <StringField "three">],
[<MessageField "m_field">, <StringField "two">, <StringField "four">],
]
Args:
field: An instance of a subclass of messages.Field.
Returns:
A list of lists, where each sublist is a list of fields.
"""
if not isinstance(field, messages.MessageField):
return [[field]]
result = []
for subfield in sorted(field.message_type.all_fields(),
key=lambda f: f.number):
subfield_results = self.__field_to_subfields(subfield)
for subfields_list in subfield_results:
subfields_list.insert(0, field)
result.append(subfields_list)
return result
def __field_to_parameter_type(self, field):
"""Converts the field variant type into a string describing the parameter.
Args:
field: An instance of a subclass of messages.Field.
Returns:
A string corresponding to the variant enum of the field, with a few
exceptions. In the case of signed ints, the 's' is dropped; for the BOOL
variant, 'boolean' is used; and for the ENUM variant, 'string' is used.
Raises:
TypeError: if the field variant is a message variant.
"""
variant = field.variant
if variant == messages.Variant.MESSAGE:
raise TypeError('A message variant can\'t be used in a parameter.')
custom_variant_map = {
messages.Variant.SINT32: 'int32',
messages.Variant.SINT64: 'int64',
messages.Variant.BOOL: 'boolean',
messages.Variant.ENUM: 'string',
}
return custom_variant_map.get(variant) or variant.name.lower()
def __get_path_parameters(self, path):
"""Parses path paremeters from a URI path and organizes them by parameter.
Some of the parameters may correspond to message fields, and so will be
represented as segments corresponding to each subfield; e.g. first.second if
the field "second" in the message field "first" is pulled from the path.
The resulting dictionary uses the first segments as keys and each key has as
value the list of full parameter values with first segment equal to the key.
If the match path parameter is null, that part of the path template is
ignored; this occurs if '{}' is used in a template.
Args:
path: String; a URI path, potentially with some parameters.
Returns:
A dictionary with strings as keys and list of strings as values.
"""
path_parameters_by_segment = {}
for format_var_name in re.findall(_PATH_VARIABLE_PATTERN, path):
first_segment = format_var_name.split('.', 1)[0]
matches = path_parameters_by_segment.setdefault(first_segment, [])
matches.append(format_var_name)
return path_parameters_by_segment
def __validate_simple_subfield(self, parameter, field, segment_list,
_segment_index=0):
"""Verifies that a proposed subfield actually exists and is a simple field.
Here, simple means it is not a MessageField (nested).
Args:
parameter: String; the '.' delimited name of the current field being
considered. This is relative to some root.
field: An instance of a subclass of messages.Field. Corresponds to the
previous segment in the path (previous relative to _segment_index),
since this field should be a message field with the current segment
as a field in the message class.
segment_list: The full list of segments from the '.' delimited subfield
being validated.
_segment_index: Integer; used to hold the position of current segment so
that segment_list can be passed as a reference instead of having to
copy using segment_list[1:] at each step.
Raises:
TypeError: If the final subfield (indicated by _segment_index relative
to the length of segment_list) is a MessageField.
TypeError: If at any stage the lookup at a segment fails, e.g if a.b
exists but a.b.c does not exist. This can happen either if a.b is not
a message field or if a.b.c is not a property on the message class from
a.b.
"""
if _segment_index >= len(segment_list):
if isinstance(field, messages.MessageField):
field_class = field.__class__.__name__
raise TypeError('Can\'t use messages in path. Subfield %r was '
'included but is a %s.' % (parameter, field_class))
return
segment = segment_list[_segment_index]
parameter += '.' + segment
try:
field = field.type.field_by_name(segment)
except (AttributeError, KeyError):
raise TypeError('Subfield %r from path does not exist.' % (parameter,))
self.__validate_simple_subfield(parameter, field, segment_list,
_segment_index=_segment_index + 1)
def __validate_path_parameters(self, field, path_parameters):
"""Verifies that all path parameters correspond to an existing subfield.
Args:
field: An instance of a subclass of messages.Field. Should be the root
level property name in each path parameter in path_parameters. For
example, if the field is called 'foo', then each path parameter should
begin with 'foo.'.
path_parameters: A list of Strings representing URI parameter variables.
Raises:
TypeError: If one of the path parameters does not start with field.name.
"""
for param in path_parameters:
segment_list = param.split('.')
if segment_list[0] != field.name:
raise TypeError('Subfield %r can\'t come from field %r.'
% (param, field.name))
self.__validate_simple_subfield(field.name, field, segment_list[1:])
def __parameter_default(self, final_subfield):
"""Returns default value of final subfield if it has one.
If this subfield comes from a field list returned from __field_to_subfields,
none of the fields in the subfield list can have a default except the final
one since they all must be message fields.
Args:
final_subfield: A simple field from the end of a subfield list.
Returns:
The default value of the subfield, if any exists, with the exception of an
enum field, which will have its value cast to a string.
"""
if final_subfield.default:
if isinstance(final_subfield, messages.EnumField):
return final_subfield.default.name
else:
return final_subfield.default
def __parameter_enum(self, final_subfield):
"""Returns enum descriptor of final subfield if it is an enum.
An enum descriptor is a dictionary with keys as the names from the enum and
each value is a dictionary with a single key "backendValue" and value equal
to the same enum name used to stored it in the descriptor.
The key "description" can also be used next to "backendValue", but protorpc
Enum classes have no way of supporting a description for each value.
Args:
final_subfield: A simple field from the end of a subfield list.
Returns:
The enum descriptor for the field, if it's an enum descriptor, else
returns None.
"""
if isinstance(final_subfield, messages.EnumField):
enum_descriptor = {}
for enum_value in final_subfield.type.to_dict().keys():
enum_descriptor[enum_value] = {'backendValue': enum_value}
return enum_descriptor
def __parameter_descriptor(self, subfield_list):
"""Creates descriptor for a parameter using the subfields that define it.
Each parameter is defined by a list of fields, with all but the last being
a message field and the final being a simple (non-message) field.
Many of the fields in the descriptor are determined solely by the simple
field at the end, though some (such as repeated and required) take the whole
chain of fields into consideration.
Args:
subfield_list: List of fields describing the parameter.
Returns:
Dictionary containing a descriptor for the parameter described by the list
of fields.
"""
descriptor = {}
final_subfield = subfield_list[-1]
if all(subfield.required for subfield in subfield_list):
descriptor['required'] = True
descriptor['type'] = self.__field_to_parameter_type(final_subfield)
default = self.__parameter_default(final_subfield)
if default is not None:
descriptor['default'] = default
if any(subfield.repeated for subfield in subfield_list):
descriptor['repeated'] = True
enum_descriptor = self.__parameter_enum(final_subfield)
if enum_descriptor is not None:
descriptor['enum'] = enum_descriptor
return descriptor
def __add_parameters_from_field(self, field, path_parameters,
params, param_order):
"""Adds all parameters in a field to a method parameters descriptor.
Simple fields will only have one parameter, but a message field 'x' that
corresponds to a message class with fields 'y' and 'z' will result in
parameters 'x.y' and 'x.z', for example. The mapping from field to
parameters is mostly handled by __field_to_subfields.
Args:
field: Field from which parameters will be added to the method descriptor.
path_parameters: A list of parameters matched from a path for this field.
For example for the hypothetical 'x' from above if the path was
'/a/{x.z}/b/{other}' then this list would contain only the element
'x.z' since 'other' does not match to this field.
params: Dictionary with parameter names as keys and parameter descriptors
as values. This will be updated for each parameter in the field.
param_order: List of required parameter names to give them an order in the
descriptor. All required parameters in the field will be added to this
list.
"""
for subfield_list in self.__field_to_subfields(field):
descriptor = self.__parameter_descriptor(subfield_list)
qualified_name = '.'.join(subfield.name for subfield in subfield_list)
in_path = qualified_name in path_parameters
if descriptor.get('required', in_path):
descriptor['required'] = True
param_order.append(qualified_name)
params[qualified_name] = descriptor
def __params_descriptor_without_container(self, message_type,
request_kind, path):
"""Describe parameters of a method which does not use a ResourceContainer.
Makes sure that the path parameters are included in the message definition
and adds any required fields and URL query parameters.
This method is to preserve backwards compatibility and will be removed in
a future release.
Args:
message_type: messages.Message class, Message with parameters to describe.
request_kind: The type of request being made.
path: string, HTTP path to method.
Returns:
A tuple (dict, list of string): Descriptor of the parameters, Order of the
parameters.
"""
params = {}
param_order = []
path_parameter_dict = self.__get_path_parameters(path)
for field in sorted(message_type.all_fields(), key=lambda f: f.number):
matched_path_parameters = path_parameter_dict.get(field.name, [])
self.__validate_path_parameters(field, matched_path_parameters)
if matched_path_parameters or request_kind == self.__NO_BODY:
self.__add_parameters_from_field(field, matched_path_parameters,
params, param_order)
return params, param_order
def __params_descriptor(self, message_type, request_kind, path, method_id):
"""Describe the parameters of a method.
If the message_type is not a ResourceContainer, will fall back to
__params_descriptor_without_container (which will eventually be deprecated).
If the message type is a ResourceContainer, then all path/query parameters
will come from the ResourceContainer. This method will also make sure all
path parameters are covered by the message fields.
Args:
message_type: messages.Message or ResourceContainer class, Message with
parameters to describe.
request_kind: The type of request being made.
path: string, HTTP path to method.
method_id: string, Unique method identifier (e.g. 'myapi.items.method')
Returns:
A tuple (dict, list of string): Descriptor of the parameters, Order of the
parameters.
"""
path_parameter_dict = self.__get_path_parameters(path)
if not isinstance(message_type, ResourceContainer):
if path_parameter_dict:
logging.warning('Method %s specifies path parameters but you are not '
'using a ResourceContainer. This will fail in future '
'releases; please switch to using ResourceContainer as '
'soon as possible.', method_id)
return self.__params_descriptor_without_container(
message_type, request_kind, path)
message_type = message_type.parameters_message_class()
params = {}
param_order = []
for field_name, matched_path_parameters in path_parameter_dict.iteritems():
field = message_type.field_by_name(field_name)
self.__validate_path_parameters(field, matched_path_parameters)
for field in sorted(message_type.all_fields(), key=lambda f: f.number):
matched_path_parameters = path_parameter_dict.get(field.name, [])
self.__add_parameters_from_field(field, matched_path_parameters,
params, param_order)
return params, param_order
def __request_message_descriptor(self, request_kind, message_type, method_id,
path):
"""Describes the parameters and body of the request.
Args:
request_kind: The type of request being made.
message_type: messages.Message or ResourceContainer class. The message to
describe.
method_id: string, Unique method identifier (e.g. 'myapi.items.method')
path: string, HTTP path to method.
Returns:
Dictionary describing the request.
Raises:
ValueError: if the method path and request required fields do not match
"""
descriptor = {}
params, param_order = self.__params_descriptor(message_type, request_kind,
path, method_id)
if isinstance(message_type, ResourceContainer):
message_type = message_type.body_message_class()
if (request_kind == self.__NO_BODY or
message_type == message_types.VoidMessage()):
descriptor['body'] = 'empty'
else:
descriptor['body'] = 'autoTemplate(backendRequest)'
descriptor['bodyName'] = 'resource'
self.__request_schema[method_id] = self.__parser.add_message(
message_type.__class__)
if params:
descriptor['parameters'] = params
if param_order:
descriptor['parameterOrder'] = param_order
return descriptor
def __response_message_descriptor(self, message_type, method_id,
cache_control):
"""Describes the response.
Args:
message_type: messages.Message class, The message to describe.
method_id: string, Unique method identifier (e.g. 'myapi.items.method')
cache_control: CacheControl, Cache settings for the API method.
Returns:
Dictionary describing the response.
"""
descriptor = {}
self.__parser.add_message(message_type.__class__)
if message_type == message_types.VoidMessage():
descriptor['body'] = 'empty'
else:
descriptor['body'] = 'autoTemplate(backendResponse)'
descriptor['bodyName'] = 'resource'
self.__response_schema[method_id] = self.__parser.ref_for_message_type(
message_type.__class__)
if cache_control is not None:
descriptor['cacheControl'] = {
'type': cache_control.directive,
'maxAge': cache_control.max_age_seconds,
}
return descriptor
def __method_descriptor(self, service, service_name, method_info,
protorpc_method_name, protorpc_method_info):
"""Describes a method.
Args:
service: endpoints.Service, Implementation of the API as a service.
service_name: string, Name of the service.
method_info: _MethodInfo, Configuration for the method.
protorpc_method_name: string, Name of the method as given in the
ProtoRPC implementation.
protorpc_method_info: protorpc.remote._RemoteMethodInfo, ProtoRPC
description of the method.
Returns:
Dictionary describing the method.
"""
descriptor = {}
request_message_type = ResourceContainer.get_request_message(
protorpc_method_info.remote)
request_kind = self.__get_request_kind(method_info)
remote_method = protorpc_method_info.remote
descriptor['path'] = method_info.get_path(service.api_info)
descriptor['httpMethod'] = method_info.http_method
descriptor['rosyMethod'] = '%s.%s' % (service_name, protorpc_method_name)
descriptor['request'] = self.__request_message_descriptor(
request_kind, request_message_type,
method_info.method_id(service.api_info),
descriptor['path'])
descriptor['response'] = self.__response_message_descriptor(
remote_method.response_type(), method_info.method_id(service.api_info),
method_info.cache_control)
scopes = (method_info.scopes
if method_info.scopes is not None
else service.api_info.scopes)
if scopes:
descriptor['scopes'] = scopes
audiences = (method_info.audiences
if method_info.audiences is not None
else service.api_info.audiences)
if audiences:
descriptor['audiences'] = audiences
allowed_client_ids = (method_info.allowed_client_ids
if method_info.allowed_client_ids is not None
else service.api_info.allowed_client_ids)
if allowed_client_ids:
descriptor['clientIds'] = allowed_client_ids
if remote_method.method.__doc__:
descriptor['description'] = remote_method.method.__doc__
auth_level = (method_info.auth_level
if method_info.auth_level is not None
else service.api_info.auth_level)
if auth_level is not None:
descriptor['authLevel'] = AUTH_LEVEL.reverse_mapping[auth_level]
return descriptor
def __schema_descriptor(self, services):
"""Descriptor for the all the JSON Schema used.
Args:
services: List of protorpc.remote.Service instances implementing an
api/version.
Returns:
Dictionary containing all the JSON Schema used in the service.
"""
methods_desc = {}
for service in services:
protorpc_methods = service.all_remote_methods()
for protorpc_method_name in protorpc_methods.iterkeys():
method_id = self.__id_from_name[protorpc_method_name]
request_response = {}
request_schema_id = self.__request_schema.get(method_id)
if request_schema_id:
request_response['request'] = {
'$ref': request_schema_id
}
response_schema_id = self.__response_schema.get(method_id)
if response_schema_id:
request_response['response'] = {
'$ref': response_schema_id
}
rosy_method = '%s.%s' % (service.__name__, protorpc_method_name)
methods_desc[rosy_method] = request_response
descriptor = {
'methods': methods_desc,
'schemas': self.__parser.schemas(),
}
return descriptor
def __get_merged_api_info(self, services):
"""Builds a description of an API.
Args:
services: List of protorpc.remote.Service instances implementing an
api/version.
Returns:
The _ApiInfo object to use for the API that the given services implement.
Raises:
ApiConfigurationError: If there's something wrong with the API
configuration, such as a multiclass API decorated with different API
descriptors (see the docstring for api()).
"""
merged_api_info = services[0].api_info
for service in services[1:]:
if not merged_api_info.is_same_api(service.api_info):
raise ApiConfigurationError(_MULTICLASS_MISMATCH_ERROR_TEMPLATE % (
service.api_info.name, service.api_info.version))
return merged_api_info
def __auth_descriptor(self, api_info):
if api_info.auth is None:
return None
auth_descriptor = {}
if api_info.auth.allow_cookie_auth is not None:
auth_descriptor['allowCookieAuth'] = api_info.auth.allow_cookie_auth
if api_info.auth.blocked_regions:
auth_descriptor['blockedRegions'] = api_info.auth.blocked_regions
return auth_descriptor
def __frontend_limit_descriptor(self, api_info):
if api_info.frontend_limits is None:
return None
descriptor = {}
for propname, descname in (('unregistered_user_qps', 'unregisteredUserQps'),
('unregistered_qps', 'unregisteredQps'),
('unregistered_daily', 'unregisteredDaily')):
if getattr(api_info.frontend_limits, propname) is not None:
descriptor[descname] = getattr(api_info.frontend_limits, propname)
rules = self.__frontend_limit_rules_descriptor(api_info)
if rules:
descriptor['rules'] = rules
return descriptor
def __frontend_limit_rules_descriptor(self, api_info):
if not api_info.frontend_limits.rules:
return None
rules = []
for rule in api_info.frontend_limits.rules:
descriptor = {}
for propname, descname in (('match', 'match'),
('qps', 'qps'),
('user_qps', 'userQps'),
('daily', 'daily'),
('analytics_id', 'analyticsId')):
if getattr(rule, propname) is not None:
descriptor[descname] = getattr(rule, propname)
if descriptor:
rules.append(descriptor)
return rules
def __api_descriptor(self, services, hostname=None):
"""Builds a description of an API.
Args:
services: List of protorpc.remote.Service instances implementing an
api/version.
hostname: string, Hostname of the API, to override the value set on the
current service. Defaults to None.
Returns:
A dictionary that can be deserialized into JSON and stored as an API
description document.
Raises:
ApiConfigurationError: If there's something wrong with the API
configuration, such as a multiclass API decorated with different API
descriptors (see the docstring for api()), or a repeated method
signature.
"""
merged_api_info = self.__get_merged_api_info(services)
descriptor = self.get_descriptor_defaults(merged_api_info,
hostname=hostname)
description = merged_api_info.description
if not description and len(services) == 1:
description = services[0].__doc__
if description:
descriptor['description'] = description
auth_descriptor = self.__auth_descriptor(merged_api_info)
if auth_descriptor:
descriptor['auth'] = auth_descriptor
frontend_limit_descriptor = self.__frontend_limit_descriptor(
merged_api_info)
if frontend_limit_descriptor:
descriptor['frontendLimits'] = frontend_limit_descriptor
method_map = {}
method_collision_tracker = {}
rest_collision_tracker = {}
for service in services:
remote_methods = service.all_remote_methods()
for protorpc_meth_name, protorpc_meth_info in remote_methods.iteritems():
method_info = getattr(protorpc_meth_info, 'method_info', None)
if method_info is None:
continue
method_id = method_info.method_id(service.api_info)
self.__id_from_name[protorpc_meth_name] = method_id
method_map[method_id] = self.__method_descriptor(
service, service.__name__, method_info,
protorpc_meth_name, protorpc_meth_info)
if method_id in method_collision_tracker:
raise ApiConfigurationError(
'Method %s used multiple times, in classes %s and %s' %
(method_id, method_collision_tracker[method_id],
service.__name__))
else:
method_collision_tracker[method_id] = service.__name__
rest_identifier = (method_info.http_method,
method_info.get_path(service.api_info))
if rest_identifier in rest_collision_tracker:
raise ApiConfigurationError(
'%s path "%s" used multiple times, in classes %s and %s' %
(method_info.http_method, method_info.get_path(service.api_info),
rest_collision_tracker[rest_identifier],
service.__name__))
else:
rest_collision_tracker[rest_identifier] = service.__name__
if method_map:
descriptor['methods'] = method_map
descriptor['descriptor'] = self.__schema_descriptor(services)
return descriptor
def get_descriptor_defaults(self, api_info, hostname=None):
"""Gets a default configuration for a service.
Args:
api_info: _ApiInfo object for this service.
hostname: string, Hostname of the API, to override the value set on the
current service. Defaults to None.
Returns:
A dictionary with the default configuration.
"""
hostname = hostname or api_info.hostname
defaults = {
'extends': 'thirdParty.api',
'root': 'https://%s/_ah/api' % hostname,
'name': api_info.name,
'version': api_info.version,
'defaultVersion': True,
'abstract': False,
'adapter': {
'bns': 'https://%s/_ah/spi' % hostname,
'type': 'lily',
'deadline': 10.0
}
}
if api_info.canonical_name:
defaults['canonicalName'] = api_info.canonical_name
if api_info.owner_domain:
defaults['ownerDomain'] = api_info.owner_domain
if api_info.owner_name:
defaults['ownerName'] = api_info.owner_name
if api_info.package_path:
defaults['packagePath'] = api_info.package_path
if api_info.title:
defaults['title'] = api_info.title
if api_info.documentation:
defaults['documentation'] = api_info.documentation
return defaults
def pretty_print_config_to_json(self, services, hostname=None):
"""Description of a protorpc.remote.Service in API format.
Args:
services: Either a single protorpc.remote.Service or a list of them
that implements an api/version.
hostname: string, Hostname of the API, to override the value set on the
current service. Defaults to None.
Returns:
string, The API descriptor document as JSON.
"""
if not isinstance(services, (tuple, list)):
services = [services]
_CheckListType(services, remote._ServiceClass, 'services', allow_none=False)
descriptor = self.__api_descriptor(services, hostname=hostname)
return json.dumps(descriptor, sort_keys=True, indent=2)
| 35.331517 | 80 | 0.695488 |
try:
import json
except ImportError:
import simplejson as json
import logging
import re
from endpoints import message_parser
from endpoints import users_id_token
from protorpc import message_types
from protorpc import messages
from protorpc import remote
from protorpc import util
try:
from google.appengine.api import app_identity
except ImportError:
from google.appengine.api import app_identity
__all__ = [
'API_EXPLORER_CLIENT_ID',
'ApiAuth',
'ApiConfigGenerator',
'ApiConfigurationError',
'ApiFrontEndLimitRule',
'ApiFrontEndLimits',
'CacheControl',
'ResourceContainer',
'EMAIL_SCOPE',
'api',
'method',
'AUTH_LEVEL'
]
API_EXPLORER_CLIENT_ID = '292824132082.apps.googleusercontent.com'
EMAIL_SCOPE = 'https://www.googleapis.com/auth/userinfo.email'
_PATH_VARIABLE_PATTERN = r'{([a-zA-Z_][a-zA-Z_.\d]*)}'
_MULTICLASS_MISMATCH_ERROR_TEMPLATE = (
'Attempting to implement service %s, version %s, with multiple '
'classes that aren\'t compatible. See docstring for api() for '
'examples how to implement a multi-class API.')
def _Enum(docstring, *names):
enums = dict(zip(names, range(len(names))))
reverse = dict((value, key) for key, value in enums.iteritems())
enums['reverse_mapping'] = reverse
enums['__doc__'] = docstring
return type('Enum', (object,), enums)
_AUTH_LEVEL_DOCSTRING = """
Define the enums used by the auth_level annotation to specify frontend
authentication requirement.
Frontend authentication is handled by a Google API server prior to the
request reaching backends. An early return before hitting the backend can
happen if the request does not fulfil the requirement specified by the
auth_level.
Valid values of auth_level and their meanings are:
AUTH_LEVEL.REQUIRED: Valid authentication credentials are required. Backend
will be called only if authentication credentials are present and valid.
AUTH_LEVEL.OPTIONAL: Authentication is optional. If authentication credentials
are supplied they must be valid. Backend will be called if the request
contains valid authentication credentials or no authentication credentials.
AUTH_LEVEL.OPTIONAL_CONTINUE: Authentication is optional and will be attempted
if authentication credentials are supplied. Invalid authentication
credentials will be removed but the request can always reach backend.
AUTH_LEVEL.NONE: Frontend authentication will be skipped. If authentication is
desired, it will need to be performed by the backend.
"""
AUTH_LEVEL = _Enum(_AUTH_LEVEL_DOCSTRING, 'REQUIRED', 'OPTIONAL',
'OPTIONAL_CONTINUE', 'NONE')
class ApiConfigurationError(Exception):
def _GetFieldAttributes(field):
if not isinstance(field, messages.Field):
raise TypeError('Field %r to be copied not a ProtoRPC field.' % (field,))
positional_args = []
kwargs = {
'required': field.required,
'repeated': field.repeated,
'variant': field.variant,
'default': field._Field__default,
}
if isinstance(field, messages.MessageField):
kwargs.pop('default')
if not isinstance(field, message_types.DateTimeField):
positional_args.insert(0, field.message_type)
elif isinstance(field, messages.EnumField):
positional_args.insert(0, field.type)
return positional_args, kwargs
def _CopyField(field, number=None):
positional_args, kwargs = _GetFieldAttributes(field)
number = number or field.number
positional_args.append(number)
return field.__class__(*positional_args, **kwargs)
def _CompareFields(field, other_field):
field_attrs = _GetFieldAttributes(field)
other_field_attrs = _GetFieldAttributes(other_field)
if field_attrs != other_field_attrs:
return False
return field.__class__ == other_field.__class__
class ResourceContainer(object):
__remote_info_cache = {}
__combined_message_class = None
def __init__(self, _body_message_class=message_types.VoidMessage, **kwargs):
self.body_message_class = _body_message_class
self.parameters_message_class = type('ParameterContainer',
(messages.Message,), kwargs)
@property
def combined_message_class(self):
if self.__combined_message_class is not None:
return self.__combined_message_class
fields = {}
field_number = 1
for field in self.body_message_class.all_fields():
fields[field.name] = _CopyField(field, number=field_number)
field_number += 1
for field in self.parameters_message_class.all_fields():
if field.name in fields:
if not _CompareFields(field, fields[field.name]):
raise TypeError('Field %r contained in both parameters and request '
'body, but the fields differ.' % (field.name,))
else:
continue
fields[field.name] = _CopyField(field, number=field_number)
field_number += 1
self.__combined_message_class = type('CombinedContainer',
(messages.Message,), fields)
return self.__combined_message_class
@classmethod
def add_to_cache(cls, remote_info, container):
if not isinstance(container, cls):
raise TypeError('%r not an instance of %r, could not be added to cache.' %
(container, cls))
if remote_info in cls.__remote_info_cache:
raise KeyError('Cache has collision but should not.')
cls.__remote_info_cache[remote_info] = container
@classmethod
def get_request_message(cls, remote_info):
if remote_info in cls.__remote_info_cache:
return cls.__remote_info_cache[remote_info]
else:
return remote_info.request_type()
def _CheckListType(settings, allowed_type, name, allow_none=True):
if settings is None:
if not allow_none:
raise TypeError('%s is None, which is not allowed.' % name)
return settings
if not isinstance(settings, (tuple, list)):
raise TypeError('%s is not a list.' % name)
if not all(isinstance(i, allowed_type) for i in settings):
type_list = list(set(type(setting) for setting in settings))
raise TypeError('%s contains types that don\'t match %s: %s' %
(name, allowed_type.__name__, type_list))
return settings
def _CheckType(value, check_type, name, allow_none=True):
if value is None and allow_none:
return
if not isinstance(value, check_type):
raise TypeError('%s type doesn\'t match %s.' % (name, check_type))
def _CheckEnum(value, check_type, name):
if value is None:
return
if value not in check_type.reverse_mapping:
raise TypeError('%s is not a valid value for %s' % (value, name))
class _ApiInfo(object):
@util.positional(2)
def __init__(self, common_info, resource_name=None, path=None, audiences=None,
scopes=None, allowed_client_ids=None, auth_level=None):
_CheckType(resource_name, basestring, 'resource_name')
_CheckType(path, basestring, 'path')
_CheckListType(audiences, basestring, 'audiences')
_CheckListType(scopes, basestring, 'scopes')
_CheckListType(allowed_client_ids, basestring, 'allowed_client_ids')
_CheckEnum(auth_level, AUTH_LEVEL, 'auth_level')
self.__common_info = common_info
self.__resource_name = resource_name
self.__path = path
self.__audiences = audiences
self.__scopes = scopes
self.__allowed_client_ids = allowed_client_ids
self.__auth_level = auth_level
def is_same_api(self, other):
if not isinstance(other, _ApiInfo):
return False
return self.__common_info is other.__common_info
@property
def name(self):
return self.__common_info.name
@property
def version(self):
return self.__common_info.version
@property
def description(self):
return self.__common_info.description
@property
def hostname(self):
return self.__common_info.hostname
@property
def audiences(self):
if self.__audiences is not None:
return self.__audiences
return self.__common_info.audiences
@property
def scopes(self):
if self.__scopes is not None:
return self.__scopes
return self.__common_info.scopes
@property
def allowed_client_ids(self):
if self.__allowed_client_ids is not None:
return self.__allowed_client_ids
return self.__common_info.allowed_client_ids
@property
def auth_level(self):
if self.__auth_level is not None:
return self.__auth_level
return self.__common_info.auth_level
@property
def canonical_name(self):
return self.__common_info.canonical_name
@property
def auth(self):
return self.__common_info.auth
@property
def owner_domain(self):
return self.__common_info.owner_domain
@property
def owner_name(self):
return self.__common_info.owner_name
@property
def package_path(self):
return self.__common_info.package_path
@property
def frontend_limits(self):
return self.__common_info.frontend_limits
@property
def title(self):
return self.__common_info.title
@property
def documentation(self):
return self.__common_info.documentation
@property
def resource_name(self):
return self.__resource_name
@property
def path(self):
return self.__path
class _ApiDecorator(object):
@util.positional(3)
def __init__(self, name, version, description=None, hostname=None,
audiences=None, scopes=None, allowed_client_ids=None,
canonical_name=None, auth=None, owner_domain=None,
owner_name=None, package_path=None, frontend_limits=None,
title=None, documentation=None, auth_level=None):
self.__common_info = self.__ApiCommonInfo(
name, version, description=description, hostname=hostname,
audiences=audiences, scopes=scopes,
allowed_client_ids=allowed_client_ids,
canonical_name=canonical_name, auth=auth, owner_domain=owner_domain,
owner_name=owner_name, package_path=package_path,
frontend_limits=frontend_limits, title=title,
documentation=documentation, auth_level=auth_level)
self.__classes = []
class __ApiCommonInfo(object):
@util.positional(3)
def __init__(self, name, version, description=None, hostname=None,
audiences=None, scopes=None, allowed_client_ids=None,
canonical_name=None, auth=None, owner_domain=None,
owner_name=None, package_path=None, frontend_limits=None,
title=None, documentation=None, auth_level=None):
_CheckType(name, basestring, 'name', allow_none=False)
_CheckType(version, basestring, 'version', allow_none=False)
_CheckType(description, basestring, 'description')
_CheckType(hostname, basestring, 'hostname')
_CheckListType(audiences, basestring, 'audiences')
_CheckListType(scopes, basestring, 'scopes')
_CheckListType(allowed_client_ids, basestring, 'allowed_client_ids')
_CheckType(canonical_name, basestring, 'canonical_name')
_CheckType(auth, ApiAuth, 'auth')
_CheckType(owner_domain, basestring, 'owner_domain')
_CheckType(owner_name, basestring, 'owner_name')
_CheckType(package_path, basestring, 'package_path')
_CheckType(frontend_limits, ApiFrontEndLimits, 'frontend_limits')
_CheckType(title, basestring, 'title')
_CheckType(documentation, basestring, 'documentation')
_CheckEnum(auth_level, AUTH_LEVEL, 'auth_level')
if hostname is None:
hostname = app_identity.get_default_version_hostname()
if audiences is None:
audiences = []
if scopes is None:
scopes = [EMAIL_SCOPE]
if allowed_client_ids is None:
allowed_client_ids = [API_EXPLORER_CLIENT_ID]
if auth_level is None:
auth_level = AUTH_LEVEL.NONE
self.__name = name
self.__version = version
self.__description = description
self.__hostname = hostname
self.__audiences = audiences
self.__scopes = scopes
self.__allowed_client_ids = allowed_client_ids
self.__canonical_name = canonical_name
self.__auth = auth
self.__owner_domain = owner_domain
self.__owner_name = owner_name
self.__package_path = package_path
self.__frontend_limits = frontend_limits
self.__title = title
self.__documentation = documentation
self.__auth_level = auth_level
@property
def name(self):
return self.__name
@property
def version(self):
return self.__version
@property
def description(self):
return self.__description
@property
def hostname(self):
return self.__hostname
@property
def audiences(self):
return self.__audiences
@property
def scopes(self):
return self.__scopes
@property
def allowed_client_ids(self):
return self.__allowed_client_ids
@property
def auth_level(self):
return self.__auth_level
@property
def canonical_name(self):
return self.__canonical_name
@property
def auth(self):
return self.__auth
@property
def owner_domain(self):
return self.__owner_domain
@property
def owner_name(self):
return self.__owner_name
@property
def package_path(self):
return self.__package_path
@property
def frontend_limits(self):
return self.__frontend_limits
@property
def title(self):
return self.__title
@property
def documentation(self):
return self.__documentation
def __call__(self, service_class):
return self.api_class()(service_class)
def api_class(self, resource_name=None, path=None, audiences=None,
scopes=None, allowed_client_ids=None, auth_level=None):
def apiserving_api_decorator(api_class):
self.__classes.append(api_class)
api_class.api_info = _ApiInfo(
self.__common_info, resource_name=resource_name,
path=path, audiences=audiences, scopes=scopes,
allowed_client_ids=allowed_client_ids, auth_level=auth_level)
return api_class
return apiserving_api_decorator
def get_api_classes(self):
return self.__classes
class ApiAuth(object):
def __init__(self, allow_cookie_auth=None, blocked_regions=None):
_CheckType(allow_cookie_auth, bool, 'allow_cookie_auth')
_CheckListType(blocked_regions, basestring, 'blocked_regions')
self.__allow_cookie_auth = allow_cookie_auth
self.__blocked_regions = blocked_regions
@property
def allow_cookie_auth(self):
return self.__allow_cookie_auth
@property
def blocked_regions(self):
return self.__blocked_regions
class ApiFrontEndLimitRule(object):
def __init__(self, match=None, qps=None, user_qps=None, daily=None,
analytics_id=None):
_CheckType(match, basestring, 'match')
_CheckType(qps, int, 'qps')
_CheckType(user_qps, int, 'user_qps')
_CheckType(daily, int, 'daily')
_CheckType(analytics_id, basestring, 'analytics_id')
self.__match = match
self.__qps = qps
self.__user_qps = user_qps
self.__daily = daily
self.__analytics_id = analytics_id
@property
def match(self):
return self.__match
@property
def qps(self):
return self.__qps
@property
def user_qps(self):
return self.__user_qps
@property
def daily(self):
return self.__daily
@property
def analytics_id(self):
return self.__analytics_id
class ApiFrontEndLimits(object):
def __init__(self, unregistered_user_qps=None, unregistered_qps=None,
unregistered_daily=None, rules=None):
_CheckType(unregistered_user_qps, int, 'unregistered_user_qps')
_CheckType(unregistered_qps, int, 'unregistered_qps')
_CheckType(unregistered_daily, int, 'unregistered_daily')
_CheckListType(rules, ApiFrontEndLimitRule, 'rules')
self.__unregistered_user_qps = unregistered_user_qps
self.__unregistered_qps = unregistered_qps
self.__unregistered_daily = unregistered_daily
self.__rules = rules
@property
def unregistered_user_qps(self):
return self.__unregistered_user_qps
@property
def unregistered_qps(self):
return self.__unregistered_qps
@property
def unregistered_daily(self):
return self.__unregistered_daily
@property
def rules(self):
return self.__rules
@util.positional(2)
def api(name, version, description=None, hostname=None, audiences=None,
scopes=None, allowed_client_ids=None, canonical_name=None,
auth=None, owner_domain=None, owner_name=None, package_path=None,
frontend_limits=None, title=None, documentation=None, auth_level=None):
return _ApiDecorator(name, version, description=description,
hostname=hostname, audiences=audiences, scopes=scopes,
allowed_client_ids=allowed_client_ids,
canonical_name=canonical_name, auth=auth,
owner_domain=owner_domain, owner_name=owner_name,
package_path=package_path,
frontend_limits=frontend_limits, title=title,
documentation=documentation, auth_level=auth_level)
class CacheControl(object):
PUBLIC = 'public'
PRIVATE = 'private'
NO_CACHE = 'no-cache'
VALID_VALUES = (PUBLIC, PRIVATE, NO_CACHE)
def __init__(self, directive=NO_CACHE, max_age_seconds=0):
if directive not in self.VALID_VALUES:
directive = self.NO_CACHE
self.__directive = directive
self.__max_age_seconds = max_age_seconds
@property
def directive(self):
return self.__directive
@property
def max_age_seconds(self):
return self.__max_age_seconds
class _MethodInfo(object):
@util.positional(1)
def __init__(self, name=None, path=None, http_method=None,
cache_control=None, scopes=None, audiences=None,
allowed_client_ids=None, auth_level=None):
self.__name = name
self.__path = path
self.__http_method = http_method
self.__cache_control = cache_control
self.__scopes = scopes
self.__audiences = audiences
self.__allowed_client_ids = allowed_client_ids
self.__auth_level = auth_level
def __safe_name(self, method_name):
safe_name = re.sub('[^\.a-zA-Z0-9_]', '', method_name)
safe_name = safe_name.lstrip('_')
return safe_name[0:1].lower() + safe_name[1:]
@property
def name(self):
return self.__name
def get_path(self, api_info):
path = self.__path or ''
if path and path[0] == '/':
path = path[1:]
else:
if api_info.path:
path = '%s%s%s' % (api_info.path, '/' if path else '', path)
for part in path.split('/'):
if part and '{' in part and '}' in part:
if re.match('^{[^{}]+}$', part) is None:
raise ApiConfigurationError('Invalid path segment: %s (part of %s)' %
(part, path))
return path
@property
def http_method(self):
return self.__http_method
@property
def cache_control(self):
return self.__cache_control
@property
def scopes(self):
return self.__scopes
@property
def audiences(self):
return self.__audiences
@property
def allowed_client_ids(self):
return self.__allowed_client_ids
@property
def auth_level(self):
return self.__auth_level
def method_id(self, api_info):
if api_info.resource_name:
resource_part = '.%s' % self.__safe_name(api_info.resource_name)
else:
resource_part = ''
return '%s%s.%s' % (self.__safe_name(api_info.name), resource_part,
self.__safe_name(self.name))
@util.positional(2)
def method(request_message=message_types.VoidMessage,
response_message=message_types.VoidMessage,
name=None,
path=None,
http_method='POST',
cache_control=None,
scopes=None,
audiences=None,
allowed_client_ids=None,
auth_level=None):
DEFAULT_HTTP_METHOD = 'POST'
def check_type(setting, allowed_type, name, allow_none=True):
if (setting is None and allow_none or
isinstance(setting, allowed_type)):
return setting
raise TypeError('%s is not of type %s' % (name, allowed_type.__name__))
def apiserving_method_decorator(api_method):
if isinstance(request_message, ResourceContainer):
remote_decorator = remote.method(request_message.combined_message_class,
response_message)
else:
remote_decorator = remote.method(request_message, response_message)
remote_method = remote_decorator(api_method)
def invoke_remote(service_instance, request):
users_id_token._maybe_set_current_user_vars(
invoke_remote, api_info=getattr(service_instance, 'api_info', None),
request=request)
return remote_method(service_instance, request)
invoke_remote.remote = remote_method.remote
if isinstance(request_message, ResourceContainer):
ResourceContainer.add_to_cache(invoke_remote.remote, request_message)
invoke_remote.method_info = _MethodInfo(
name=name or api_method.__name__, path=path or api_method.__name__,
http_method=http_method or DEFAULT_HTTP_METHOD,
cache_control=cache_control, scopes=scopes, audiences=audiences,
allowed_client_ids=allowed_client_ids, auth_level=auth_level)
invoke_remote.__name__ = invoke_remote.method_info.name
return invoke_remote
check_type(cache_control, CacheControl, 'cache_control')
_CheckListType(scopes, basestring, 'scopes')
_CheckListType(audiences, basestring, 'audiences')
_CheckListType(allowed_client_ids, basestring, 'allowed_client_ids')
_CheckEnum(auth_level, AUTH_LEVEL, 'auth_level')
return apiserving_method_decorator
class ApiConfigGenerator(object):
__NO_BODY = 1
__HAS_BODY = 2
def __init__(self):
self.__parser = message_parser.MessageTypeToJsonSchema()
self.__request_schema = {}
self.__response_schema = {}
self.__id_from_name = {}
def __get_request_kind(self, method_info):
if method_info.http_method in ('GET', 'DELETE'):
return self.__NO_BODY
else:
return self.__HAS_BODY
def __field_to_subfields(self, field):
if not isinstance(field, messages.MessageField):
return [[field]]
result = []
for subfield in sorted(field.message_type.all_fields(),
key=lambda f: f.number):
subfield_results = self.__field_to_subfields(subfield)
for subfields_list in subfield_results:
subfields_list.insert(0, field)
result.append(subfields_list)
return result
def __field_to_parameter_type(self, field):
variant = field.variant
if variant == messages.Variant.MESSAGE:
raise TypeError('A message variant can\'t be used in a parameter.')
custom_variant_map = {
messages.Variant.SINT32: 'int32',
messages.Variant.SINT64: 'int64',
messages.Variant.BOOL: 'boolean',
messages.Variant.ENUM: 'string',
}
return custom_variant_map.get(variant) or variant.name.lower()
def __get_path_parameters(self, path):
path_parameters_by_segment = {}
for format_var_name in re.findall(_PATH_VARIABLE_PATTERN, path):
first_segment = format_var_name.split('.', 1)[0]
matches = path_parameters_by_segment.setdefault(first_segment, [])
matches.append(format_var_name)
return path_parameters_by_segment
def __validate_simple_subfield(self, parameter, field, segment_list,
_segment_index=0):
if _segment_index >= len(segment_list):
if isinstance(field, messages.MessageField):
field_class = field.__class__.__name__
raise TypeError('Can\'t use messages in path. Subfield %r was '
'included but is a %s.' % (parameter, field_class))
return
segment = segment_list[_segment_index]
parameter += '.' + segment
try:
field = field.type.field_by_name(segment)
except (AttributeError, KeyError):
raise TypeError('Subfield %r from path does not exist.' % (parameter,))
self.__validate_simple_subfield(parameter, field, segment_list,
_segment_index=_segment_index + 1)
def __validate_path_parameters(self, field, path_parameters):
for param in path_parameters:
segment_list = param.split('.')
if segment_list[0] != field.name:
raise TypeError('Subfield %r can\'t come from field %r.'
% (param, field.name))
self.__validate_simple_subfield(field.name, field, segment_list[1:])
def __parameter_default(self, final_subfield):
if final_subfield.default:
if isinstance(final_subfield, messages.EnumField):
return final_subfield.default.name
else:
return final_subfield.default
def __parameter_enum(self, final_subfield):
if isinstance(final_subfield, messages.EnumField):
enum_descriptor = {}
for enum_value in final_subfield.type.to_dict().keys():
enum_descriptor[enum_value] = {'backendValue': enum_value}
return enum_descriptor
def __parameter_descriptor(self, subfield_list):
descriptor = {}
final_subfield = subfield_list[-1]
if all(subfield.required for subfield in subfield_list):
descriptor['required'] = True
descriptor['type'] = self.__field_to_parameter_type(final_subfield)
default = self.__parameter_default(final_subfield)
if default is not None:
descriptor['default'] = default
if any(subfield.repeated for subfield in subfield_list):
descriptor['repeated'] = True
enum_descriptor = self.__parameter_enum(final_subfield)
if enum_descriptor is not None:
descriptor['enum'] = enum_descriptor
return descriptor
def __add_parameters_from_field(self, field, path_parameters,
params, param_order):
for subfield_list in self.__field_to_subfields(field):
descriptor = self.__parameter_descriptor(subfield_list)
qualified_name = '.'.join(subfield.name for subfield in subfield_list)
in_path = qualified_name in path_parameters
if descriptor.get('required', in_path):
descriptor['required'] = True
param_order.append(qualified_name)
params[qualified_name] = descriptor
def __params_descriptor_without_container(self, message_type,
request_kind, path):
params = {}
param_order = []
path_parameter_dict = self.__get_path_parameters(path)
for field in sorted(message_type.all_fields(), key=lambda f: f.number):
matched_path_parameters = path_parameter_dict.get(field.name, [])
self.__validate_path_parameters(field, matched_path_parameters)
if matched_path_parameters or request_kind == self.__NO_BODY:
self.__add_parameters_from_field(field, matched_path_parameters,
params, param_order)
return params, param_order
def __params_descriptor(self, message_type, request_kind, path, method_id):
path_parameter_dict = self.__get_path_parameters(path)
if not isinstance(message_type, ResourceContainer):
if path_parameter_dict:
logging.warning('Method %s specifies path parameters but you are not '
'using a ResourceContainer. This will fail in future '
'releases; please switch to using ResourceContainer as '
'soon as possible.', method_id)
return self.__params_descriptor_without_container(
message_type, request_kind, path)
message_type = message_type.parameters_message_class()
params = {}
param_order = []
for field_name, matched_path_parameters in path_parameter_dict.iteritems():
field = message_type.field_by_name(field_name)
self.__validate_path_parameters(field, matched_path_parameters)
for field in sorted(message_type.all_fields(), key=lambda f: f.number):
matched_path_parameters = path_parameter_dict.get(field.name, [])
self.__add_parameters_from_field(field, matched_path_parameters,
params, param_order)
return params, param_order
def __request_message_descriptor(self, request_kind, message_type, method_id,
path):
descriptor = {}
params, param_order = self.__params_descriptor(message_type, request_kind,
path, method_id)
if isinstance(message_type, ResourceContainer):
message_type = message_type.body_message_class()
if (request_kind == self.__NO_BODY or
message_type == message_types.VoidMessage()):
descriptor['body'] = 'empty'
else:
descriptor['body'] = 'autoTemplate(backendRequest)'
descriptor['bodyName'] = 'resource'
self.__request_schema[method_id] = self.__parser.add_message(
message_type.__class__)
if params:
descriptor['parameters'] = params
if param_order:
descriptor['parameterOrder'] = param_order
return descriptor
def __response_message_descriptor(self, message_type, method_id,
cache_control):
descriptor = {}
self.__parser.add_message(message_type.__class__)
if message_type == message_types.VoidMessage():
descriptor['body'] = 'empty'
else:
descriptor['body'] = 'autoTemplate(backendResponse)'
descriptor['bodyName'] = 'resource'
self.__response_schema[method_id] = self.__parser.ref_for_message_type(
message_type.__class__)
if cache_control is not None:
descriptor['cacheControl'] = {
'type': cache_control.directive,
'maxAge': cache_control.max_age_seconds,
}
return descriptor
def __method_descriptor(self, service, service_name, method_info,
protorpc_method_name, protorpc_method_info):
descriptor = {}
request_message_type = ResourceContainer.get_request_message(
protorpc_method_info.remote)
request_kind = self.__get_request_kind(method_info)
remote_method = protorpc_method_info.remote
descriptor['path'] = method_info.get_path(service.api_info)
descriptor['httpMethod'] = method_info.http_method
descriptor['rosyMethod'] = '%s.%s' % (service_name, protorpc_method_name)
descriptor['request'] = self.__request_message_descriptor(
request_kind, request_message_type,
method_info.method_id(service.api_info),
descriptor['path'])
descriptor['response'] = self.__response_message_descriptor(
remote_method.response_type(), method_info.method_id(service.api_info),
method_info.cache_control)
scopes = (method_info.scopes
if method_info.scopes is not None
else service.api_info.scopes)
if scopes:
descriptor['scopes'] = scopes
audiences = (method_info.audiences
if method_info.audiences is not None
else service.api_info.audiences)
if audiences:
descriptor['audiences'] = audiences
allowed_client_ids = (method_info.allowed_client_ids
if method_info.allowed_client_ids is not None
else service.api_info.allowed_client_ids)
if allowed_client_ids:
descriptor['clientIds'] = allowed_client_ids
if remote_method.method.__doc__:
descriptor['description'] = remote_method.method.__doc__
auth_level = (method_info.auth_level
if method_info.auth_level is not None
else service.api_info.auth_level)
if auth_level is not None:
descriptor['authLevel'] = AUTH_LEVEL.reverse_mapping[auth_level]
return descriptor
def __schema_descriptor(self, services):
methods_desc = {}
for service in services:
protorpc_methods = service.all_remote_methods()
for protorpc_method_name in protorpc_methods.iterkeys():
method_id = self.__id_from_name[protorpc_method_name]
request_response = {}
request_schema_id = self.__request_schema.get(method_id)
if request_schema_id:
request_response['request'] = {
'$ref': request_schema_id
}
response_schema_id = self.__response_schema.get(method_id)
if response_schema_id:
request_response['response'] = {
'$ref': response_schema_id
}
rosy_method = '%s.%s' % (service.__name__, protorpc_method_name)
methods_desc[rosy_method] = request_response
descriptor = {
'methods': methods_desc,
'schemas': self.__parser.schemas(),
}
return descriptor
def __get_merged_api_info(self, services):
merged_api_info = services[0].api_info
for service in services[1:]:
if not merged_api_info.is_same_api(service.api_info):
raise ApiConfigurationError(_MULTICLASS_MISMATCH_ERROR_TEMPLATE % (
service.api_info.name, service.api_info.version))
return merged_api_info
def __auth_descriptor(self, api_info):
if api_info.auth is None:
return None
auth_descriptor = {}
if api_info.auth.allow_cookie_auth is not None:
auth_descriptor['allowCookieAuth'] = api_info.auth.allow_cookie_auth
if api_info.auth.blocked_regions:
auth_descriptor['blockedRegions'] = api_info.auth.blocked_regions
return auth_descriptor
def __frontend_limit_descriptor(self, api_info):
if api_info.frontend_limits is None:
return None
descriptor = {}
for propname, descname in (('unregistered_user_qps', 'unregisteredUserQps'),
('unregistered_qps', 'unregisteredQps'),
('unregistered_daily', 'unregisteredDaily')):
if getattr(api_info.frontend_limits, propname) is not None:
descriptor[descname] = getattr(api_info.frontend_limits, propname)
rules = self.__frontend_limit_rules_descriptor(api_info)
if rules:
descriptor['rules'] = rules
return descriptor
def __frontend_limit_rules_descriptor(self, api_info):
if not api_info.frontend_limits.rules:
return None
rules = []
for rule in api_info.frontend_limits.rules:
descriptor = {}
for propname, descname in (('match', 'match'),
('qps', 'qps'),
('user_qps', 'userQps'),
('daily', 'daily'),
('analytics_id', 'analyticsId')):
if getattr(rule, propname) is not None:
descriptor[descname] = getattr(rule, propname)
if descriptor:
rules.append(descriptor)
return rules
def __api_descriptor(self, services, hostname=None):
merged_api_info = self.__get_merged_api_info(services)
descriptor = self.get_descriptor_defaults(merged_api_info,
hostname=hostname)
description = merged_api_info.description
if not description and len(services) == 1:
description = services[0].__doc__
if description:
descriptor['description'] = description
auth_descriptor = self.__auth_descriptor(merged_api_info)
if auth_descriptor:
descriptor['auth'] = auth_descriptor
frontend_limit_descriptor = self.__frontend_limit_descriptor(
merged_api_info)
if frontend_limit_descriptor:
descriptor['frontendLimits'] = frontend_limit_descriptor
method_map = {}
method_collision_tracker = {}
rest_collision_tracker = {}
for service in services:
remote_methods = service.all_remote_methods()
for protorpc_meth_name, protorpc_meth_info in remote_methods.iteritems():
method_info = getattr(protorpc_meth_info, 'method_info', None)
if method_info is None:
continue
method_id = method_info.method_id(service.api_info)
self.__id_from_name[protorpc_meth_name] = method_id
method_map[method_id] = self.__method_descriptor(
service, service.__name__, method_info,
protorpc_meth_name, protorpc_meth_info)
if method_id in method_collision_tracker:
raise ApiConfigurationError(
'Method %s used multiple times, in classes %s and %s' %
(method_id, method_collision_tracker[method_id],
service.__name__))
else:
method_collision_tracker[method_id] = service.__name__
rest_identifier = (method_info.http_method,
method_info.get_path(service.api_info))
if rest_identifier in rest_collision_tracker:
raise ApiConfigurationError(
'%s path "%s" used multiple times, in classes %s and %s' %
(method_info.http_method, method_info.get_path(service.api_info),
rest_collision_tracker[rest_identifier],
service.__name__))
else:
rest_collision_tracker[rest_identifier] = service.__name__
if method_map:
descriptor['methods'] = method_map
descriptor['descriptor'] = self.__schema_descriptor(services)
return descriptor
def get_descriptor_defaults(self, api_info, hostname=None):
hostname = hostname or api_info.hostname
defaults = {
'extends': 'thirdParty.api',
'root': 'https://%s/_ah/api' % hostname,
'name': api_info.name,
'version': api_info.version,
'defaultVersion': True,
'abstract': False,
'adapter': {
'bns': 'https://%s/_ah/spi' % hostname,
'type': 'lily',
'deadline': 10.0
}
}
if api_info.canonical_name:
defaults['canonicalName'] = api_info.canonical_name
if api_info.owner_domain:
defaults['ownerDomain'] = api_info.owner_domain
if api_info.owner_name:
defaults['ownerName'] = api_info.owner_name
if api_info.package_path:
defaults['packagePath'] = api_info.package_path
if api_info.title:
defaults['title'] = api_info.title
if api_info.documentation:
defaults['documentation'] = api_info.documentation
return defaults
def pretty_print_config_to_json(self, services, hostname=None):
if not isinstance(services, (tuple, list)):
services = [services]
_CheckListType(services, remote._ServiceClass, 'services', allow_none=False)
descriptor = self.__api_descriptor(services, hostname=hostname)
return json.dumps(descriptor, sort_keys=True, indent=2)
| true | true |
f7fe186cf8b35974e72fed6ab6a45a191b898763 | 6,549 | py | Python | tensorflow/compiler/tests/xla_test.py | AlexChrisF/udacity | b7f85a74058fc63ccb7601c418450ab934ef5953 | [
"Apache-2.0"
] | 28 | 2017-04-08T09:47:57.000Z | 2020-07-12T03:10:46.000Z | tensorflow/compiler/tests/xla_test.py | AlexChrisF/udacity | b7f85a74058fc63ccb7601c418450ab934ef5953 | [
"Apache-2.0"
] | 7 | 2017-07-13T09:40:59.000Z | 2019-04-08T22:46:51.000Z | tensorflow/compiler/tests/xla_test.py | AlexChrisF/udacity | b7f85a74058fc63ccb7601c418450ab934ef5953 | [
"Apache-2.0"
] | 38 | 2017-04-28T04:15:48.000Z | 2019-09-28T05:11:46.000Z | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Definition of XLA test case."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import contextlib
import random
import re
import numpy as np
from tensorflow.contrib.compiler import jit
from tensorflow.core.framework import types_pb2
from tensorflow.core.protobuf import config_pb2
from tensorflow.python.client import session
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import random_seed
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import flags
from tensorflow.python.platform import test
from tensorflow.python.platform import tf_logging as logging
FLAGS = flags.FLAGS
flags.DEFINE_string('test_device', None,
'Tensorflow device on which to place operators under test')
flags.DEFINE_string('types', None, 'Types to test. Comma-separated list.')
flags.DEFINE_string('disabled_manifest', None,
'Path to a file with a list of tests that should not run.')
class XLATestCase(test.TestCase):
"""XLA test cases are parameterized test cases."""
def __init__(self, method_name='runTest'):
super(XLATestCase, self).__init__(method_name)
self.device = FLAGS.test_device
self.has_custom_call = (self.device == 'XLA_CPU')
self.all_tf_types = [
dtypes.DType(types_pb2.DataType.Value(name))
for name in FLAGS.types.split(',')
]
self.all_types = [dtype.as_numpy_dtype for dtype in self.all_tf_types]
self.int_types = [
dtype.as_numpy_dtype for dtype in self.all_tf_types if dtype.is_integer
]
self.float_types = [
dtype.as_numpy_dtype for dtype in self.all_tf_types if dtype.is_floating
]
self.numeric_types = self.int_types + self.float_types
# Parse the manifest file, if any, into a regex identifying tests to
# disable
self.disabled_regex = None
if FLAGS.disabled_manifest is not None:
comments_re = re.compile('#.*$')
manifest_file = open(FLAGS.disabled_manifest, 'r')
lines = manifest_file.read().splitlines()
lines = [comments_re.sub('', l).strip() for l in lines]
self.disabled_regex = re.compile('|'.join(lines))
manifest_file.close()
def setUp(self):
name = '{}.{}'.format(type(self).__name__, self._testMethodName)
if self.disabled_regex is not None and self.disabled_regex.match(name):
logging.info('Disabled test case: %s', name)
self.skipTest('{} is disabled by manifest.'.format(name))
return
logging.info('Start test case: %s', name)
random.seed(random_seed.DEFAULT_GRAPH_SEED)
np.random.seed(random_seed.DEFAULT_GRAPH_SEED)
def tearDown(self):
logging.info('End test case: %s', self._testMethodName)
@contextlib.contextmanager
def test_session(self):
"""Custom implementation of test_session() for XLA tests.
We override the standard Tensorflow test_session() since it is too
specific to CPU and GPU tests. In particular, we want to disable soft
placement and explicitly assign ops to devices under test.
Yields:
A session to use when running a test case.
"""
graph = ops.Graph()
with session.Session(graph=graph) as sess, graph.as_default():
yield sess
@contextlib.contextmanager
def test_scope(self):
"""Test scope that runs tests on a Tensorflow/XLA device.
Uses a compilation_scope() to mark operators to compile.
Yields:
A scope to apply to the operators under test.
"""
with ops.device('device:{}:0'.format(self.device)):
yield
def Benchmark(tf_bench,
builder_fn,
use_xla_jit,
device,
separate_compiled_gradients=False):
"""Build a graph and run benchmarks against it, with or without XLA.
Args:
tf_bench: An instance of tf.test.Benchmark, used to run the benchmark.
builder_fn: A function that builds a graph when invoked, and returns
(name, fetches), where name is the name of the test, and fetches
is a list of tensors to fetch as output.
use_xla_jit: If true compile with the XLA JIT, otherwise use regular TF.
device: The tensorflow device to run on, e.g. "cpu", "gpu".
separate_compiled_gradients: If true put each gradient subgraph into a
separate compilation scope. This gives fine-grained control over which
portions of the graph will be compiled as a single unit. Compiling
gradients separately may yield better performance for some graphs.
The scope is named based on the scope of the forward computation as well
as the name of the gradients. As a result, the gradients will be compiled
in a scope that is separate from both the forward computation, and from
other gradients.
"""
with ops.Graph().as_default():
name = None
targets = []
with ops.device(device):
fetches = []
jit_scope = jit.experimental_jit_scope
with jit_scope(
compile_ops=use_xla_jit,
separate_compiled_gradients=separate_compiled_gradients):
name, fetches = builder_fn()
# We only want to benchmark the operations themselves, and not the data
# transfer of the result(s). Non-compiled identity ops ensure XLA
# doesn't know we're dropping the results, otherwise it might compile
# away the entire computation.
for fetch in fetches:
targets.append(array_ops.identity(fetch).op)
config = config_pb2.ConfigProto(allow_soft_placement=True)
with session.Session(config=config) as sess:
sess.run(variables.global_variables_initializer())
xla = 'xla_' if use_xla_jit else ''
tf_bench.run_op_benchmark(
sess, targets, name='%s_%s%s' % (name, xla, device))
| 38.523529 | 80 | 0.706825 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import contextlib
import random
import re
import numpy as np
from tensorflow.contrib.compiler import jit
from tensorflow.core.framework import types_pb2
from tensorflow.core.protobuf import config_pb2
from tensorflow.python.client import session
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import random_seed
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import flags
from tensorflow.python.platform import test
from tensorflow.python.platform import tf_logging as logging
FLAGS = flags.FLAGS
flags.DEFINE_string('test_device', None,
'Tensorflow device on which to place operators under test')
flags.DEFINE_string('types', None, 'Types to test. Comma-separated list.')
flags.DEFINE_string('disabled_manifest', None,
'Path to a file with a list of tests that should not run.')
class XLATestCase(test.TestCase):
def __init__(self, method_name='runTest'):
super(XLATestCase, self).__init__(method_name)
self.device = FLAGS.test_device
self.has_custom_call = (self.device == 'XLA_CPU')
self.all_tf_types = [
dtypes.DType(types_pb2.DataType.Value(name))
for name in FLAGS.types.split(',')
]
self.all_types = [dtype.as_numpy_dtype for dtype in self.all_tf_types]
self.int_types = [
dtype.as_numpy_dtype for dtype in self.all_tf_types if dtype.is_integer
]
self.float_types = [
dtype.as_numpy_dtype for dtype in self.all_tf_types if dtype.is_floating
]
self.numeric_types = self.int_types + self.float_types
self.disabled_regex = None
if FLAGS.disabled_manifest is not None:
comments_re = re.compile('#.*$')
manifest_file = open(FLAGS.disabled_manifest, 'r')
lines = manifest_file.read().splitlines()
lines = [comments_re.sub('', l).strip() for l in lines]
self.disabled_regex = re.compile('|'.join(lines))
manifest_file.close()
def setUp(self):
name = '{}.{}'.format(type(self).__name__, self._testMethodName)
if self.disabled_regex is not None and self.disabled_regex.match(name):
logging.info('Disabled test case: %s', name)
self.skipTest('{} is disabled by manifest.'.format(name))
return
logging.info('Start test case: %s', name)
random.seed(random_seed.DEFAULT_GRAPH_SEED)
np.random.seed(random_seed.DEFAULT_GRAPH_SEED)
def tearDown(self):
logging.info('End test case: %s', self._testMethodName)
@contextlib.contextmanager
def test_session(self):
graph = ops.Graph()
with session.Session(graph=graph) as sess, graph.as_default():
yield sess
@contextlib.contextmanager
def test_scope(self):
with ops.device('device:{}:0'.format(self.device)):
yield
def Benchmark(tf_bench,
builder_fn,
use_xla_jit,
device,
separate_compiled_gradients=False):
with ops.Graph().as_default():
name = None
targets = []
with ops.device(device):
fetches = []
jit_scope = jit.experimental_jit_scope
with jit_scope(
compile_ops=use_xla_jit,
separate_compiled_gradients=separate_compiled_gradients):
name, fetches = builder_fn()
for fetch in fetches:
targets.append(array_ops.identity(fetch).op)
config = config_pb2.ConfigProto(allow_soft_placement=True)
with session.Session(config=config) as sess:
sess.run(variables.global_variables_initializer())
xla = 'xla_' if use_xla_jit else ''
tf_bench.run_op_benchmark(
sess, targets, name='%s_%s%s' % (name, xla, device))
| true | true |
f7fe188ff2e3f3c000edbdaaa53dda2021532221 | 7,810 | py | Python | Parameter_inference_real_data/figures/plot-complex-ap-supplement-compare.py | CardiacModelling/model-reduction-manifold-boundaries | 88ccb24d0ec9d0742a4a93e820fec7fee1a65b61 | [
"BSD-3-Clause"
] | null | null | null | Parameter_inference_real_data/figures/plot-complex-ap-supplement-compare.py | CardiacModelling/model-reduction-manifold-boundaries | 88ccb24d0ec9d0742a4a93e820fec7fee1a65b61 | [
"BSD-3-Clause"
] | null | null | null | Parameter_inference_real_data/figures/plot-complex-ap-supplement-compare.py | CardiacModelling/model-reduction-manifold-boundaries | 88ccb24d0ec9d0742a4a93e820fec7fee1a65b61 | [
"BSD-3-Clause"
] | null | null | null | import myokit
import myokit.pacing as pacing
import numpy as np
import matplotlib
import matplotlib.pyplot as pl
import myokit.lib.markov as markov
import pints
import argparse
import os
import sys
from mpl_toolkits.axes_grid1.inset_locator import zoomed_inset_axes, inset_axes
from mpl_toolkits.axes_grid1.inset_locator import mark_inset
# Load project modules
sys.path.append(os.path.abspath(os.path.join('../', 'python')))
import cells
import data
# Check input arguments
parser = argparse.ArgumentParser(
description='Plot model and experimental data')
parser.add_argument('--cell', type=int, default=2, metavar='N',
help='repeat number : 1, 2, 3, 4, 5, 6')
parser.add_argument('--model', type=str, default='wang', metavar='N',
help='which model to use')
parser.add_argument('--repeats', type=int, default=25, metavar='N',
help='number of CMA-ES runs from different initial guesses')
parser.add_argument('--protocol', type=int, default=1, metavar='N',
help='which protocol is used to fit the data: 1 for staircase #1, 2 for sine wave')
parser.add_argument("--show", action='store_true',
help="whether to show figures instead of saving them",
default=False)
parser.add_argument('--params', type=int, default=1, metavar='N',
help='which params to use')
parser.add_argument('--figsize', type=float, nargs='+', default=[9, 7], \
help='Figure size in x and y, e.g. --figsize2 2.5 3.5')
parser.add_argument("--grid", action='store_true',
help="whether to add grid to figures or not",
default=False)
args = parser.parse_args()
cell = args.cell
#
# Simple IKr test script
#
# Get model
p = myokit.load_protocol('../model-and-protocols/pr6-ap-steps.mmt')
current = 'ikr.IKr'
ek = cells.ek(cell)
print('Reversal potential ' + str(ek) + ' mV')
if args.protocol == 1:
protocol_str = 'staircase1'
else:
protocol_str = 'sine-wave'
# Run simulation
dt = 0.1
fig, (a0, a1) = pl.subplots(2, 1, gridspec_kw={'height_ratios': [1, 3]}, figsize=args.figsize, dpi=100 if args.show else 200, constrained_layout=True)
a0.set_xlim([0, 8000])
a0.set_ylim([-140, 80])
a0.set_ylabel( '(mV)' )
if args.grid:
a0.grid(True)
[label.set_visible(False) for label in a0.get_xticklabels()]
a1.set_xlim([0, 8000])
a1.set_ylim([-12.7, 3.3])
a1.set_xlabel( 'Time (ms)' )
a1.set_ylabel( 'Current (nA)' )
if args.grid:
a1.grid(True)
axins = zoomed_inset_axes(a1, 5, loc='lower left') # zoom-factor: 5
x1, x2, y1, y2 = 3170, 4370, -0.3, 2 # specify the limits
axins.set_xlim(x1, x2) # apply the x-limits
axins.set_ylim(y1, y2) # apply the y-limits
pl.yticks(visible=False)
pl.xticks(visible=False)
mark_inset(a1, axins, loc1=2, loc2=1, fc="none", ec="0.5")
axins2 = inset_axes(a1, 1.75, 3.2, loc='lower right') # zoom-factor: 5
x1, x2, y1, y2 = 6590, 6640, 0, 2.3 # specify the limits
axins2.set_xlim(x1, x2) # apply the x-limits
axins2.set_ylim(y1, y2) # apply the y-limits
pl.yticks(visible=False)
pl.xticks(visible=False)
mark_inset(a1, axins2, loc1=2, loc2=1, fc="none", ec="0.5")
e = myokit.DataLog.load_csv('../data/SFU-data/AP/complex-AP-WT-cell-' + str(cell) + '.csv').npview()
# Apply capacitance filtering for experiment and simulated data
signals = [e.time(), e['current']]
voltage = 'voltage' in e
if voltage:
signals.append(e['voltage'])
signals = data.capacitance(p, dt, *signals)
e = myokit.DataLog()
e.set_time_key('time')
e['time'] = signals[0]
e['current'] = signals[1] / 1000 # Correct units
if voltage:
e['voltage'] = signals[2]
# Filtered experimental data
e = e.npview()
# colors = ['orange', 'red']
models = ['wang', 'wang-r1', 'wang-r2', 'wang-r3', 'wang-r4', 'wang-r5', 'wang-r6', 'wang-r7', 'wang-r8',]
nmodels = len(models)
# Create colormap for plotting
cmap = matplotlib.cm.get_cmap('winter')
norm = matplotlib.colors.Normalize(0, nmodels)
for n, model in enumerate(models):
m = myokit.load_model('../model-and-protocols/' + model + '-ikr-markov.mmt')
if model == 'mazhari':
model_str = 'Mazhari'
states = ['ikr.c2', 'ikr.c3', 'ikr.o', 'ikr.i']
elif model == 'mazhari-reduced':
model_str = 'Maz-red'
states = ['ikr.c1', 'ikr.c3', 'ikr.o', 'ikr.i']
elif model == 'wang':
model_str = 'Wang'
states = ['ikr.c2', 'ikr.c3', 'ikr.o', 'ikr.i']
elif model == 'wang-r1':
model_str = 'Wang-r1'
states = ['ikr.c2', 'ikr.c3', 'ikr.o', 'ikr.i']
elif model == 'wang-r2':
model_str = 'Wang-r2'
states = ['ikr.c3', 'ikr.o', 'ikr.i']
elif model == 'wang-r3':
model_str = 'Wang-r3'
states = ['ikr.c3', 'ikr.o', 'ikr.i']
elif model == 'wang-r4':
model_str = 'Wang-r4'
states = ['ikr.c3', 'ikr.o', 'ikr.i']
elif model == 'wang-r5':
model_str = 'Wang-r5'
states = ['ikr.o', 'ikr.i']
elif model == 'wang-r6':
model_str = 'Wang-r6'
states = ['ikr.o', 'ikr.i']
elif model == 'wang-r7':
model_str = 'Wang-r7'
states = ['ikr.o', 'ikr.i']
elif model == 'wang-r8':
model_str = 'Wang-r8'
states = ['ikr.o']
else:
pass
n_params = int(m.get('misc.n_params').value())
m = markov.convert_markov_models_to_full_ode_form(m)
# Set steady state potential
LJP = m.get('misc.LJP').value()
ss_V = -80 - LJP
x_found = np.loadtxt('../cmaesfits/' + model_str + '-model-fit-' + protocol_str + '-iid-noise-parameters-' + str(args.params) + '.txt', unpack=True)
parameters = []
for i in range(n_params):
parameters.append('ikr.p'+str(i+1))
d = ['engine.time', 'membrane.V', 'ikr.IKr']
# Run simulation
m.get('nernst.EK').set_rhs(ek)
print('Updating model to steady-state for ' + str(ss_V) + ' mV')
m.get('membrane.V').set_label('membrane_potential')
mm = markov.LinearModel.from_component(m.get('ikr'))
x = mm.steady_state(ss_V, x_found)
for i in range(len(states)):
m.get(states[i]).set_state_value(x[i])
log = data.load_ap_protocol().npview()
t, v = log['time'], log['voltage']
m.get('membrane.V').set_rhs('engine.pace - misc.LJP')
s = myokit.Simulation(m, p)
s.set_fixed_form_protocol(t, v)
s.set_tolerance(1e-8, 1e-8)
s.set_max_step_size(0.1)
# Update model parameters
for i in range(n_params):
s.set_constant('ikr.p'+str(i+1), x_found[i])
d = s.run(p.characteristic_time(), log_interval=dt, log=d)
signals2 = [d.time(), d['ikr.IKr'], d['membrane.V']]
d = myokit.DataLog()
d.set_time_key('time')
d['time'] = signals2[0]
d['current'] = signals2[1]
d['voltage'] = signals2[2]
# Filtered simulated data
d = d.npview()
e = e.regularize(0.1)
d = d.regularize(0.1)
if n == 0:
a0.plot(d.time(), d['voltage'], color='grey')
if n == 0:
a1.plot(e.time(), e['current'], color='silver', label='Experiment')
a1.plot(d.time(), d['current'], label=model_str, color=cmap(norm(n)))
if n == 0:
axins.plot(e.time(), e['current'], color='silver', label='Expt.')
axins2.plot(e.time(), e['current'], color='silver')
axins2.axhline(np.max(e['current'][65900:66400]), color='silver', linestyle='--')
axins.plot(d.time(), d['current'], color=cmap(norm(n)), label=model_str)
axins2.plot(d.time(), d['current'], color=cmap(norm(n)))
axins2.axhline(np.max(d['current'][65900:66400]), color=cmap(norm(n)), linestyle='--')
axins.legend(loc='lower right', fontsize=8, ncol=3)
if args.show == True:
pl.show()
else:
filename = 'Complex-AP-all-models-compare-fit-' + protocol_str + '-iid-noise'
pl.savefig('All_figures/' + filename + '.png')
| 32.953586 | 152 | 0.618822 | import myokit
import myokit.pacing as pacing
import numpy as np
import matplotlib
import matplotlib.pyplot as pl
import myokit.lib.markov as markov
import pints
import argparse
import os
import sys
from mpl_toolkits.axes_grid1.inset_locator import zoomed_inset_axes, inset_axes
from mpl_toolkits.axes_grid1.inset_locator import mark_inset
sys.path.append(os.path.abspath(os.path.join('../', 'python')))
import cells
import data
parser = argparse.ArgumentParser(
description='Plot model and experimental data')
parser.add_argument('--cell', type=int, default=2, metavar='N',
help='repeat number : 1, 2, 3, 4, 5, 6')
parser.add_argument('--model', type=str, default='wang', metavar='N',
help='which model to use')
parser.add_argument('--repeats', type=int, default=25, metavar='N',
help='number of CMA-ES runs from different initial guesses')
parser.add_argument('--protocol', type=int, default=1, metavar='N',
help='which protocol is used to fit the data: 1 for staircase #1, 2 for sine wave')
parser.add_argument("--show", action='store_true',
help="whether to show figures instead of saving them",
default=False)
parser.add_argument('--params', type=int, default=1, metavar='N',
help='which params to use')
parser.add_argument('--figsize', type=float, nargs='+', default=[9, 7], \
help='Figure size in x and y, e.g. --figsize2 2.5 3.5')
parser.add_argument("--grid", action='store_true',
help="whether to add grid to figures or not",
default=False)
args = parser.parse_args()
cell = args.cell
p = myokit.load_protocol('../model-and-protocols/pr6-ap-steps.mmt')
current = 'ikr.IKr'
ek = cells.ek(cell)
print('Reversal potential ' + str(ek) + ' mV')
if args.protocol == 1:
protocol_str = 'staircase1'
else:
protocol_str = 'sine-wave'
dt = 0.1
fig, (a0, a1) = pl.subplots(2, 1, gridspec_kw={'height_ratios': [1, 3]}, figsize=args.figsize, dpi=100 if args.show else 200, constrained_layout=True)
a0.set_xlim([0, 8000])
a0.set_ylim([-140, 80])
a0.set_ylabel( '(mV)' )
if args.grid:
a0.grid(True)
[label.set_visible(False) for label in a0.get_xticklabels()]
a1.set_xlim([0, 8000])
a1.set_ylim([-12.7, 3.3])
a1.set_xlabel( 'Time (ms)' )
a1.set_ylabel( 'Current (nA)' )
if args.grid:
a1.grid(True)
axins = zoomed_inset_axes(a1, 5, loc='lower left')
x1, x2, y1, y2 = 3170, 4370, -0.3, 2
axins.set_xlim(x1, x2)
axins.set_ylim(y1, y2)
pl.yticks(visible=False)
pl.xticks(visible=False)
mark_inset(a1, axins, loc1=2, loc2=1, fc="none", ec="0.5")
axins2 = inset_axes(a1, 1.75, 3.2, loc='lower right')
x1, x2, y1, y2 = 6590, 6640, 0, 2.3
axins2.set_xlim(x1, x2)
axins2.set_ylim(y1, y2)
pl.yticks(visible=False)
pl.xticks(visible=False)
mark_inset(a1, axins2, loc1=2, loc2=1, fc="none", ec="0.5")
e = myokit.DataLog.load_csv('../data/SFU-data/AP/complex-AP-WT-cell-' + str(cell) + '.csv').npview()
signals = [e.time(), e['current']]
voltage = 'voltage' in e
if voltage:
signals.append(e['voltage'])
signals = data.capacitance(p, dt, *signals)
e = myokit.DataLog()
e.set_time_key('time')
e['time'] = signals[0]
e['current'] = signals[1] / 1000
if voltage:
e['voltage'] = signals[2]
e = e.npview()
models = ['wang', 'wang-r1', 'wang-r2', 'wang-r3', 'wang-r4', 'wang-r5', 'wang-r6', 'wang-r7', 'wang-r8',]
nmodels = len(models)
cmap = matplotlib.cm.get_cmap('winter')
norm = matplotlib.colors.Normalize(0, nmodels)
for n, model in enumerate(models):
m = myokit.load_model('../model-and-protocols/' + model + '-ikr-markov.mmt')
if model == 'mazhari':
model_str = 'Mazhari'
states = ['ikr.c2', 'ikr.c3', 'ikr.o', 'ikr.i']
elif model == 'mazhari-reduced':
model_str = 'Maz-red'
states = ['ikr.c1', 'ikr.c3', 'ikr.o', 'ikr.i']
elif model == 'wang':
model_str = 'Wang'
states = ['ikr.c2', 'ikr.c3', 'ikr.o', 'ikr.i']
elif model == 'wang-r1':
model_str = 'Wang-r1'
states = ['ikr.c2', 'ikr.c3', 'ikr.o', 'ikr.i']
elif model == 'wang-r2':
model_str = 'Wang-r2'
states = ['ikr.c3', 'ikr.o', 'ikr.i']
elif model == 'wang-r3':
model_str = 'Wang-r3'
states = ['ikr.c3', 'ikr.o', 'ikr.i']
elif model == 'wang-r4':
model_str = 'Wang-r4'
states = ['ikr.c3', 'ikr.o', 'ikr.i']
elif model == 'wang-r5':
model_str = 'Wang-r5'
states = ['ikr.o', 'ikr.i']
elif model == 'wang-r6':
model_str = 'Wang-r6'
states = ['ikr.o', 'ikr.i']
elif model == 'wang-r7':
model_str = 'Wang-r7'
states = ['ikr.o', 'ikr.i']
elif model == 'wang-r8':
model_str = 'Wang-r8'
states = ['ikr.o']
else:
pass
n_params = int(m.get('misc.n_params').value())
m = markov.convert_markov_models_to_full_ode_form(m)
LJP = m.get('misc.LJP').value()
ss_V = -80 - LJP
x_found = np.loadtxt('../cmaesfits/' + model_str + '-model-fit-' + protocol_str + '-iid-noise-parameters-' + str(args.params) + '.txt', unpack=True)
parameters = []
for i in range(n_params):
parameters.append('ikr.p'+str(i+1))
d = ['engine.time', 'membrane.V', 'ikr.IKr']
m.get('nernst.EK').set_rhs(ek)
print('Updating model to steady-state for ' + str(ss_V) + ' mV')
m.get('membrane.V').set_label('membrane_potential')
mm = markov.LinearModel.from_component(m.get('ikr'))
x = mm.steady_state(ss_V, x_found)
for i in range(len(states)):
m.get(states[i]).set_state_value(x[i])
log = data.load_ap_protocol().npview()
t, v = log['time'], log['voltage']
m.get('membrane.V').set_rhs('engine.pace - misc.LJP')
s = myokit.Simulation(m, p)
s.set_fixed_form_protocol(t, v)
s.set_tolerance(1e-8, 1e-8)
s.set_max_step_size(0.1)
for i in range(n_params):
s.set_constant('ikr.p'+str(i+1), x_found[i])
d = s.run(p.characteristic_time(), log_interval=dt, log=d)
signals2 = [d.time(), d['ikr.IKr'], d['membrane.V']]
d = myokit.DataLog()
d.set_time_key('time')
d['time'] = signals2[0]
d['current'] = signals2[1]
d['voltage'] = signals2[2]
d = d.npview()
e = e.regularize(0.1)
d = d.regularize(0.1)
if n == 0:
a0.plot(d.time(), d['voltage'], color='grey')
if n == 0:
a1.plot(e.time(), e['current'], color='silver', label='Experiment')
a1.plot(d.time(), d['current'], label=model_str, color=cmap(norm(n)))
if n == 0:
axins.plot(e.time(), e['current'], color='silver', label='Expt.')
axins2.plot(e.time(), e['current'], color='silver')
axins2.axhline(np.max(e['current'][65900:66400]), color='silver', linestyle='--')
axins.plot(d.time(), d['current'], color=cmap(norm(n)), label=model_str)
axins2.plot(d.time(), d['current'], color=cmap(norm(n)))
axins2.axhline(np.max(d['current'][65900:66400]), color=cmap(norm(n)), linestyle='--')
axins.legend(loc='lower right', fontsize=8, ncol=3)
if args.show == True:
pl.show()
else:
filename = 'Complex-AP-all-models-compare-fit-' + protocol_str + '-iid-noise'
pl.savefig('All_figures/' + filename + '.png')
| true | true |
f7fe190a60165982d955b3a5e671735acdf83472 | 4,858 | py | Python | usage.py | preftech/dash-tabulator | 207ae1ff6f683471cb0a02247ddff32860400210 | [
"MIT"
] | 62 | 2020-07-10T00:40:21.000Z | 2022-03-10T00:11:35.000Z | usage.py | preftech/dash-tabulator | 207ae1ff6f683471cb0a02247ddff32860400210 | [
"MIT"
] | 50 | 2020-07-15T14:27:33.000Z | 2022-02-20T14:08:09.000Z | usage.py | preftech/dash-tabulator | 207ae1ff6f683471cb0a02247ddff32860400210 | [
"MIT"
] | 16 | 2020-07-13T03:02:24.000Z | 2022-02-22T02:22:02.000Z | import dash_tabulator
import dash
from dash.dependencies import Input, Output
import dash_html_components as html
import dash_core_components as dcc
from dash_extensions.javascript import Namespace
#from textwrap import dedent as d
#import json
external_scripts = ['https://oss.sheetjs.com/sheetjs/xlsx.full.min.js']
external_stylesheets = ['https://stackpath.bootstrapcdn.com/bootstrap/4.1.3/css/bootstrap.min.css',
'https://cdnjs.cloudflare.com/ajax/libs/font-awesome/5.15.1/css/all.min.css']
app = dash.Dash(__name__, external_scripts=external_scripts, external_stylesheets=external_stylesheets)
styles = {
'pre': {
'border': 'thin lightgrey solid',
'overflowX': 'scroll'
}
}
# in the asset folder there is a JS method in assets/buttons.js with a window.myNamespace
# declared, a reference can be passed using Namespace that then gets mapped client side
# see https://github.com/preftech/dash-tabulator/pull/11
# The namespace here must match the name space of the JavaScript asset.
ns = Namespace("myNamespace", "tabulator")
columns = [
{"formatter":"rowSelection", "titleFormatter":"rowSelection",
"titleFormatterParams": {
"rowRange": "active" # only toggle the values of the active filtered rows
},
"hozAlign":"center", "headerSort":"false"},
{ "title": "Name", "field": "name", "width": 150, "headerFilter":True, "editor":"input"},
{ "title": "Age", "field": "age", "hozAlign": "left", "formatter": "progress", "bottomCalc":ns("ageCalc")},
{ "title": "Favourite Color", "field": "col", "headerFilter":True },
{ "title": "Date Of Birth", "field": "dob", "hozAlign": "center" },
{ "title": "Rating", "field": "rating", "hozAlign": "center", "formatter": "star" },
{ "title": "Passed?", "field": "passed", "hozAlign": "center", "formatter": "tickCross" },
{"title": "Print", "field": "print", "hozAlign": "center", "formatter": ns("printIcon")}
]
data = [
{"id":1, "name":"Oli Bob", "age":"12", "col":"red", "dob":"", "print" :"foo"},
{"id":2, "name":"Mary May", "age":"1", "col":"blue", "dob":"14/05/1982", "print" :"foo"},
{"id":3, "name":"Christine Lobowski", "age":"42", "col":"green", "dob":"22/05/1982", "print" :"foo"},
{"id":4, "name":"Brendon Philips", "age":"125", "col":"orange", "dob":"01/08/1980", "print" :"foo"},
{"id":5, "name":"Margret Marmajuke", "age":"16", "col":"yellow", "dob":"31/01/1999", "print" :"foo"},
{"id":6, "name":"Fred Savage", "age":"16", "col":"yellow", "rating":"1", "dob":"31/01/1999", "print" :"foo"},
{"id":7, "name":"Brie Larson", "age":"30", "col":"blue", "rating":"1", "dob":"31/01/1999", "print" :"foo"},
]
options = { "groupBy": "col", "selectable":"true", "columnResized" : ns("columnResized")}
downloadButtonType = {"css": "btn btn-primary", "text":"Export", "type":"xlsx"}
clearFilterButtonType = {"css": "btn btn-outline-dark", "text":"Clear Filters"}
initialHeaderFilter = [{"field":"col", "value":"blue"}]
app.layout = html.Div([
dash_tabulator.DashTabulator(
id='tabulator',
theme="tabulator",
options=options,
downloadButtonType=downloadButtonType,
clearFilterButtonType=clearFilterButtonType,
),
html.Div(id='output'),
dcc.Interval(
id='interval-component-iu',
interval=1*10, # in milliseconds
n_intervals=0,
max_intervals=0
)
])
@app.callback([ Output('tabulator', 'columns'),
Output('tabulator', 'data'),
Output('tabulator', 'initialHeaderFilter')],
[Input('interval-component-iu', 'n_intervals')])
def initialize(val):
return columns, data, initialHeaderFilter
@app.callback(Output('output', 'children'),
[Input('tabulator', 'rowClicked'),
Input('tabulator', 'multiRowsClicked'),
Input('tabulator', 'cellEdited'),
Input('tabulator', 'dataChanged'),
Input('tabulator', 'dataFiltering'),
Input('tabulator', 'dataFiltered')])
def display_output(row, multiRowsClicked, cell, dataChanged, filters, dataFiltered):
print("row: {}".format(str(row)))
print("cell: {}".format(str(cell)))
print("data changed: {}".format(str(dataChanged)))
print("filters: {}".format(str(filters)))
print("data filtered: {}".format(str(dataFiltered)))
return 'You have clicked row {} ; cell {} ; multiRowsClicked {}'.format(row, cell, multiRowsClicked)
if __name__ == '__main__':
app.run_server(debug=True)
| 47.627451 | 125 | 0.584397 | import dash_tabulator
import dash
from dash.dependencies import Input, Output
import dash_html_components as html
import dash_core_components as dcc
from dash_extensions.javascript import Namespace
external_scripts = ['https://oss.sheetjs.com/sheetjs/xlsx.full.min.js']
external_stylesheets = ['https://stackpath.bootstrapcdn.com/bootstrap/4.1.3/css/bootstrap.min.css',
'https://cdnjs.cloudflare.com/ajax/libs/font-awesome/5.15.1/css/all.min.css']
app = dash.Dash(__name__, external_scripts=external_scripts, external_stylesheets=external_stylesheets)
styles = {
'pre': {
'border': 'thin lightgrey solid',
'overflowX': 'scroll'
}
}
ns = Namespace("myNamespace", "tabulator")
columns = [
{"formatter":"rowSelection", "titleFormatter":"rowSelection",
"titleFormatterParams": {
"rowRange": "active"
},
"hozAlign":"center", "headerSort":"false"},
{ "title": "Name", "field": "name", "width": 150, "headerFilter":True, "editor":"input"},
{ "title": "Age", "field": "age", "hozAlign": "left", "formatter": "progress", "bottomCalc":ns("ageCalc")},
{ "title": "Favourite Color", "field": "col", "headerFilter":True },
{ "title": "Date Of Birth", "field": "dob", "hozAlign": "center" },
{ "title": "Rating", "field": "rating", "hozAlign": "center", "formatter": "star" },
{ "title": "Passed?", "field": "passed", "hozAlign": "center", "formatter": "tickCross" },
{"title": "Print", "field": "print", "hozAlign": "center", "formatter": ns("printIcon")}
]
data = [
{"id":1, "name":"Oli Bob", "age":"12", "col":"red", "dob":"", "print" :"foo"},
{"id":2, "name":"Mary May", "age":"1", "col":"blue", "dob":"14/05/1982", "print" :"foo"},
{"id":3, "name":"Christine Lobowski", "age":"42", "col":"green", "dob":"22/05/1982", "print" :"foo"},
{"id":4, "name":"Brendon Philips", "age":"125", "col":"orange", "dob":"01/08/1980", "print" :"foo"},
{"id":5, "name":"Margret Marmajuke", "age":"16", "col":"yellow", "dob":"31/01/1999", "print" :"foo"},
{"id":6, "name":"Fred Savage", "age":"16", "col":"yellow", "rating":"1", "dob":"31/01/1999", "print" :"foo"},
{"id":7, "name":"Brie Larson", "age":"30", "col":"blue", "rating":"1", "dob":"31/01/1999", "print" :"foo"},
]
options = { "groupBy": "col", "selectable":"true", "columnResized" : ns("columnResized")}
downloadButtonType = {"css": "btn btn-primary", "text":"Export", "type":"xlsx"}
clearFilterButtonType = {"css": "btn btn-outline-dark", "text":"Clear Filters"}
initialHeaderFilter = [{"field":"col", "value":"blue"}]
app.layout = html.Div([
dash_tabulator.DashTabulator(
id='tabulator',
theme="tabulator",
options=options,
downloadButtonType=downloadButtonType,
clearFilterButtonType=clearFilterButtonType,
),
html.Div(id='output'),
dcc.Interval(
id='interval-component-iu',
interval=1*10,
n_intervals=0,
max_intervals=0
)
])
@app.callback([ Output('tabulator', 'columns'),
Output('tabulator', 'data'),
Output('tabulator', 'initialHeaderFilter')],
[Input('interval-component-iu', 'n_intervals')])
def initialize(val):
return columns, data, initialHeaderFilter
@app.callback(Output('output', 'children'),
[Input('tabulator', 'rowClicked'),
Input('tabulator', 'multiRowsClicked'),
Input('tabulator', 'cellEdited'),
Input('tabulator', 'dataChanged'),
Input('tabulator', 'dataFiltering'),
Input('tabulator', 'dataFiltered')])
def display_output(row, multiRowsClicked, cell, dataChanged, filters, dataFiltered):
print("row: {}".format(str(row)))
print("cell: {}".format(str(cell)))
print("data changed: {}".format(str(dataChanged)))
print("filters: {}".format(str(filters)))
print("data filtered: {}".format(str(dataFiltered)))
return 'You have clicked row {} ; cell {} ; multiRowsClicked {}'.format(row, cell, multiRowsClicked)
if __name__ == '__main__':
app.run_server(debug=True)
| true | true |
f7fe196ff93759113abd6befa98b15fe6d4d2c9d | 485 | py | Python | pymterm/term_pyglet/window.py | stonewell/pymterm | af36656d5f7fb008533178d14b00d83d72ba00cf | [
"MIT"
] | 102 | 2016-07-21T06:39:02.000Z | 2022-03-09T19:34:03.000Z | pymterm/term_pyglet/window.py | stonewell/pymterm | af36656d5f7fb008533178d14b00d83d72ba00cf | [
"MIT"
] | 2 | 2017-01-11T13:43:34.000Z | 2020-01-19T12:06:47.000Z | pymterm/term_pyglet/window.py | stonewell/pymterm | af36656d5f7fb008533178d14b00d83d72ba00cf | [
"MIT"
] | 4 | 2020-03-22T04:08:35.000Z | 2021-06-27T23:38:02.000Z | # coding=utf-8
import logging
import sys
from functools32 import lru_cache
import cap.cap_manager
from session import create_session
from term import TextAttribute, TextMode, reserve
import term.term_keyboard
from term.terminal_gui import TerminalGUI
from term.terminal_widget import TerminalWidget
import window_base
class TermPygletWindow(window_base.TermPygletWindowBase):
def __init__(self, *args, **kwargs):
super(TermPygletWindow, self).__init__(*args, **kwargs)
| 25.526316 | 63 | 0.814433 |
import logging
import sys
from functools32 import lru_cache
import cap.cap_manager
from session import create_session
from term import TextAttribute, TextMode, reserve
import term.term_keyboard
from term.terminal_gui import TerminalGUI
from term.terminal_widget import TerminalWidget
import window_base
class TermPygletWindow(window_base.TermPygletWindowBase):
def __init__(self, *args, **kwargs):
super(TermPygletWindow, self).__init__(*args, **kwargs)
| true | true |
f7fe1a75fe592ba214223dd1f0a925acfe826468 | 5,911 | py | Python | upload_tool/pgy_upload_android.py | FRA7/pgy_upload_tool | 676ed92f2030643dd91f4495f423885e300100c6 | [
"MIT"
] | null | null | null | upload_tool/pgy_upload_android.py | FRA7/pgy_upload_tool | 676ed92f2030643dd91f4495f423885e300100c6 | [
"MIT"
] | null | null | null | upload_tool/pgy_upload_android.py | FRA7/pgy_upload_tool | 676ed92f2030643dd91f4495f423885e300100c6 | [
"MIT"
] | null | null | null | # coding=utf-8
"""
* User: fraj
* Email: fraj@foxmail.com
* Date: 18/2/1
* Time: 10:00
"""
import time
import urllib2
import time
import json
import mimetypes
import os
import smtplib
from email.mime.base import MIMEBase
from email.MIMEText import MIMEText
from email.MIMEMultipart import MIMEMultipart
from email import encoders
import json
# encoding=utf8
import sys
reload(sys)
sys.setdefaultencoding('utf8')
#蒲公英应用上传地址
url = 'https://www.pgyer.com/apiv2/app/upload'
#蒲公英提供的 用户Key
uKey = 'AAAAAAAAAAAAAAAAAAAAAA'
#蒲公英提供的 API Key
_api_key = 'BBBBBBBBBBBBBBBBBBBBB'
#(选填)应用安装方式,值为(1,2,3)。1:公开,2:密码安装,3:邀请安装。默认为1公开
buildInstallType = '2'
#(选填) 设置App安装密码,如果不想设置密码,请传空字符串,或不传。
buildPassword = '123456'
# 运行时环境变量字典
environsDict = os.environ
#print environsDict
#此次 jenkins 构建版本号
jenkins_build_number = environsDict['BUILD_TAG']
print jenkins_build_number
#此次 jenkins 构建环境 ZHDJ_COMMON 为商用环境 ZHDJ_TEST 为商测环境
sel_product_build = os.getenv('BUILD')
#渠道号
sel_product_flavors = os.getenv('FLAVORS')
print sel_product_flavors
#此次 jenkins 构建变更记录
changelog = os.getenv('SCM_CHANGELOG')
print '*******changelog****'
print changelog
#获取 apk 文件路径
def get_apk_file_path():
#安装包路径
apk_file_workspace_path = './your_app.apk'
if os.path.exists(apk_file_workspace_path):
return apk_file_workspace_path
# while get_apk_file_path() is None:
# time.sleep(5)
#apk 文件路径
apk_file_path = get_apk_file_path()
print apk_file_path
#请求字典编码
def _encode_multipart(params_dict):
boundary = '----------%s' % hex(int(time.time() * 1000))
data = []
for k, v in params_dict.items():
data.append('--%s' % boundary)
if hasattr(v, 'read'):
filename = getattr(v, 'name', '')
content = v.read()
decoded_content = content.decode('ISO-8859-1')
data.append('Content-Disposition: form-data; name="%s"; filename="zhdj.apk"' % k)
data.append('Content-Type: application/octet-stream\r\n')
data.append(decoded_content)
else:
data.append('Content-Disposition: form-data; name="%s"\r\n' % k)
data.append(v if isinstance(v, str) else v.decode('utf-8'))
data.append('--%s--\r\n' % boundary)
return '\r\n'.join(data), boundary
#处理 蒲公英 上传结果
def handle_resule(result):
json_result = json.loads(result)
print '*******上传蒲公英****'
print json_result
if json_result['code'] is 0:
print '*******文件上传成功****'
# print json_result
send_Email(json_result)
#发送邮件
def send_Email(json_result):
print '*******开始发送邮件****'
buildName = json_result['data']['buildName']
buildKey = json_result['data']['buildKey']
buildVersion = json_result['data']['buildVersion']
buildBuildVersion = json_result['data']['buildBuildVersion']
buildShortcutUrl = json_result['data']['buildShortcutUrl']
buildQRCodeURL = json_result['data']['buildQRCodeURL']
buildUpdated = json_result['data']['buildUpdated']
#邮件接受者
mail_receiver = ['receiver_one@mail.com','receiver_two@mail.com']
#根据不同邮箱配置 host,user,和pwd
mail_host = 'your mail host'
mail_port = 25
mail_user = 'your email'
mail_pwd = 'email password'
mail_to = ','.join(mail_receiver)
msg = MIMEMultipart()
environsString = '<h3>本次打包相关信息</h3><p>'
environsString += '<p>应用名称:'+ str(buildName) +'</p>'
environsString += '<p>版本号:'+ str(buildVersion) +'</p>'
environsString += '<p>更新时间:'+ str(buildUpdated) +'</p>'
environsString += '<p>安装密码:'+ str(buildPassword) +'</p>'
if changelog:
print "changelog not empty"
environsString += '<p>变更记录:</p>'
environsString += '<p>'+ str(changelog) +'</p>'
else:
print "changelog empty"
# environsString += '<p>你可从蒲公英网站在线安装 : ' + 'http://www.pgyer.com/' + str(buildShortcutUrl) + '<p>'
environsString += '<img src="'+ str(buildQRCodeURL) +'" alt="二维码" />'
environsString += '<p>扫码直接安装</p>'
message = environsString
body = MIMEText(message, _subtype='html', _charset='utf-8')
# 添加附件
part = MIMEBase('application', 'octet-stream') # 'octet-stream': binary data 创建附件对象
source_path = get_apk_file_path()
part.set_payload(open(source_path, 'rb').read()) # 将附件源文件加载到附件对象
encoders.encode_base64(part)
nowTime = time.strftime("%Y-%m-%d", time.localtime())
part_name = 'your_app-' + nowTime + '_'+ sel_product_flavors +'.apk'
part_name = part_name.decode('utf-8').encode(sys.getfilesystemencoding())
print part_name
part.add_header('Content-Disposition', 'attachment; filename="' + part_name +'"') # 给附件添加头文件
msg.attach(body)
msg.attach(part) # 将附件附加到根容器
msg['To'] = mail_to
msg['from'] = mail_user
msg['subject'] = 'Android打包文件: ' + sel_product_build + '-' + sel_product_flavors + ' ' + buildName +' '+ buildVersion
try:
s = smtplib.SMTP()
# 设置为调试模式,就是在会话过程中会有输出信息
s.set_debuglevel(1)
s.connect(mail_host)
s.login(mail_user, mail_pwd)
s.sendmail(mail_user, mail_receiver, msg.as_string())
s.close()
print '*******邮件发送成功****'
except Exception, e:
print e
#############################################################
#请求参数字典
params = {
'_api_key': _api_key,
'file': open(apk_file_path, 'rb'),
'buildInstallType': buildInstallType,
'buildPassword': buildPassword
}
coded_params, boundary = _encode_multipart(params)
req = urllib2.Request(url, coded_params.encode('ISO-8859-1'))
req.add_header('Content-Type', 'multipart/form-data; boundary=%s' % boundary)
try:
print '*******开始文件上传****'
resp = urllib2.urlopen(req)
body = resp.read().decode('utf-8')
handle_resule(body)
except urllib2.HTTPError as e:
print(e.fp.read())
| 28.555556 | 121 | 0.632888 |
"""
* User: fraj
* Email: fraj@foxmail.com
* Date: 18/2/1
* Time: 10:00
"""
import time
import urllib2
import time
import json
import mimetypes
import os
import smtplib
from email.mime.base import MIMEBase
from email.MIMEText import MIMEText
from email.MIMEMultipart import MIMEMultipart
from email import encoders
import json
import sys
reload(sys)
sys.setdefaultencoding('utf8')
url = 'https://www.pgyer.com/apiv2/app/upload'
uKey = 'AAAAAAAAAAAAAAAAAAAAAA'
_api_key = 'BBBBBBBBBBBBBBBBBBBBB'
buildInstallType = '2'
buildPassword = '123456'
environsDict = os.environ
jenkins_build_number = environsDict['BUILD_TAG']
print jenkins_build_number
sel_product_build = os.getenv('BUILD')
sel_product_flavors = os.getenv('FLAVORS')
print sel_product_flavors
changelog = os.getenv('SCM_CHANGELOG')
print '*******changelog****'
print changelog
def get_apk_file_path():
apk_file_workspace_path = './your_app.apk'
if os.path.exists(apk_file_workspace_path):
return apk_file_workspace_path
apk_file_path = get_apk_file_path()
print apk_file_path
def _encode_multipart(params_dict):
boundary = '----------%s' % hex(int(time.time() * 1000))
data = []
for k, v in params_dict.items():
data.append('--%s' % boundary)
if hasattr(v, 'read'):
filename = getattr(v, 'name', '')
content = v.read()
decoded_content = content.decode('ISO-8859-1')
data.append('Content-Disposition: form-data; name="%s"; filename="zhdj.apk"' % k)
data.append('Content-Type: application/octet-stream\r\n')
data.append(decoded_content)
else:
data.append('Content-Disposition: form-data; name="%s"\r\n' % k)
data.append(v if isinstance(v, str) else v.decode('utf-8'))
data.append('--%s--\r\n' % boundary)
return '\r\n'.join(data), boundary
def handle_resule(result):
json_result = json.loads(result)
print '*******上传蒲公英****'
print json_result
if json_result['code'] is 0:
print '*******文件上传成功****'
send_Email(json_result)
def send_Email(json_result):
print '*******开始发送邮件****'
buildName = json_result['data']['buildName']
buildKey = json_result['data']['buildKey']
buildVersion = json_result['data']['buildVersion']
buildBuildVersion = json_result['data']['buildBuildVersion']
buildShortcutUrl = json_result['data']['buildShortcutUrl']
buildQRCodeURL = json_result['data']['buildQRCodeURL']
buildUpdated = json_result['data']['buildUpdated']
mail_receiver = ['receiver_one@mail.com','receiver_two@mail.com']
mail_host = 'your mail host'
mail_port = 25
mail_user = 'your email'
mail_pwd = 'email password'
mail_to = ','.join(mail_receiver)
msg = MIMEMultipart()
environsString = '<h3>本次打包相关信息</h3><p>'
environsString += '<p>应用名称:'+ str(buildName) +'</p>'
environsString += '<p>版本号:'+ str(buildVersion) +'</p>'
environsString += '<p>更新时间:'+ str(buildUpdated) +'</p>'
environsString += '<p>安装密码:'+ str(buildPassword) +'</p>'
if changelog:
print "changelog not empty"
environsString += '<p>变更记录:</p>'
environsString += '<p>'+ str(changelog) +'</p>'
else:
print "changelog empty"
environsString += '<img src="'+ str(buildQRCodeURL) +'" alt="二维码" />'
environsString += '<p>扫码直接安装</p>'
message = environsString
body = MIMEText(message, _subtype='html', _charset='utf-8')
part = MIMEBase('application', 'octet-stream')
source_path = get_apk_file_path()
part.set_payload(open(source_path, 'rb').read())
encoders.encode_base64(part)
nowTime = time.strftime("%Y-%m-%d", time.localtime())
part_name = 'your_app-' + nowTime + '_'+ sel_product_flavors +'.apk'
part_name = part_name.decode('utf-8').encode(sys.getfilesystemencoding())
print part_name
part.add_header('Content-Disposition', 'attachment; filename="' + part_name +'"')
msg.attach(body)
msg.attach(part)
msg['To'] = mail_to
msg['from'] = mail_user
msg['subject'] = 'Android打包文件: ' + sel_product_build + '-' + sel_product_flavors + ' ' + buildName +' '+ buildVersion
try:
s = smtplib.SMTP()
s.set_debuglevel(1)
s.connect(mail_host)
s.login(mail_user, mail_pwd)
s.sendmail(mail_user, mail_receiver, msg.as_string())
s.close()
print '*******邮件发送成功****'
except Exception, e:
print e
| false | true |
f7fe1b7ad6706920ab8b0a28dcc9748b3a7091f9 | 6,359 | py | Python | pysnmp/JUNIPER-LSYSSP-NATPOIPNUM-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 11 | 2021-02-02T16:27:16.000Z | 2021-08-31T06:22:49.000Z | pysnmp/JUNIPER-LSYSSP-NATPOIPNUM-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 75 | 2021-02-24T17:30:31.000Z | 2021-12-08T00:01:18.000Z | pysnmp/JUNIPER-LSYSSP-NATPOIPNUM-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 10 | 2019-04-30T05:51:36.000Z | 2022-02-16T03:33:41.000Z | #
# PySNMP MIB module JUNIPER-LSYSSP-NATPOIPNUM-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/JUNIPER-LSYSSP-NATPOIPNUM-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 19:49:05 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsIntersection, ValueRangeConstraint, ConstraintsUnion, ValueSizeConstraint, SingleValueConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "ValueRangeConstraint", "ConstraintsUnion", "ValueSizeConstraint", "SingleValueConstraint")
jnxLsysSpNATpoipnum, = mibBuilder.importSymbols("JUNIPER-LSYS-SECURITYPROFILE-MIB", "jnxLsysSpNATpoipnum")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
ObjectIdentity, Integer32, Counter32, MibScalar, MibTable, MibTableRow, MibTableColumn, iso, Unsigned32, IpAddress, MibIdentifier, Gauge32, Counter64, NotificationType, TimeTicks, ModuleIdentity, Bits = mibBuilder.importSymbols("SNMPv2-SMI", "ObjectIdentity", "Integer32", "Counter32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "iso", "Unsigned32", "IpAddress", "MibIdentifier", "Gauge32", "Counter64", "NotificationType", "TimeTicks", "ModuleIdentity", "Bits")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
jnxLsysSpNATpoipnumMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 2636, 3, 39, 1, 17, 16, 1))
if mibBuilder.loadTexts: jnxLsysSpNATpoipnumMIB.setLastUpdated('201005191644Z')
if mibBuilder.loadTexts: jnxLsysSpNATpoipnumMIB.setOrganization('Juniper Networks, Inc.')
jnxLsysSpNATpoipnumObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 2636, 3, 39, 1, 17, 16, 1, 1))
jnxLsysSpNATpoipnumSummary = MibIdentifier((1, 3, 6, 1, 4, 1, 2636, 3, 39, 1, 17, 16, 1, 2))
jnxLsysSpNATpoipnumTable = MibTable((1, 3, 6, 1, 4, 1, 2636, 3, 39, 1, 17, 16, 1, 1, 1), )
if mibBuilder.loadTexts: jnxLsysSpNATpoipnumTable.setStatus('current')
jnxLsysSpNATpoipnumEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2636, 3, 39, 1, 17, 16, 1, 1, 1, 1), ).setIndexNames((1, "JUNIPER-LSYSSP-NATPOIPNUM-MIB", "jnxLsysSpNATpoipnumLsysName"))
if mibBuilder.loadTexts: jnxLsysSpNATpoipnumEntry.setStatus('current')
jnxLsysSpNATpoipnumLsysName = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 39, 1, 17, 16, 1, 1, 1, 1, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 64)))
if mibBuilder.loadTexts: jnxLsysSpNATpoipnumLsysName.setStatus('current')
jnxLsysSpNATpoipnumProfileName = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 39, 1, 17, 16, 1, 1, 1, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 32))).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxLsysSpNATpoipnumProfileName.setStatus('current')
jnxLsysSpNATpoipnumUsage = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 39, 1, 17, 16, 1, 1, 1, 1, 3), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxLsysSpNATpoipnumUsage.setStatus('current')
jnxLsysSpNATpoipnumReserved = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 39, 1, 17, 16, 1, 1, 1, 1, 4), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxLsysSpNATpoipnumReserved.setStatus('current')
jnxLsysSpNATpoipnumMaximum = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 39, 1, 17, 16, 1, 1, 1, 1, 5), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxLsysSpNATpoipnumMaximum.setStatus('current')
jnxLsysSpNATpoipnumUsedAmount = MibScalar((1, 3, 6, 1, 4, 1, 2636, 3, 39, 1, 17, 16, 1, 2, 1), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxLsysSpNATpoipnumUsedAmount.setStatus('current')
jnxLsysSpNATpoipnumMaxQuota = MibScalar((1, 3, 6, 1, 4, 1, 2636, 3, 39, 1, 17, 16, 1, 2, 2), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxLsysSpNATpoipnumMaxQuota.setStatus('current')
jnxLsysSpNATpoipnumAvailableAmount = MibScalar((1, 3, 6, 1, 4, 1, 2636, 3, 39, 1, 17, 16, 1, 2, 3), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxLsysSpNATpoipnumAvailableAmount.setStatus('current')
jnxLsysSpNATpoipnumHeaviestUsage = MibScalar((1, 3, 6, 1, 4, 1, 2636, 3, 39, 1, 17, 16, 1, 2, 4), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxLsysSpNATpoipnumHeaviestUsage.setStatus('current')
jnxLsysSpNATpoipnumHeaviestUser = MibScalar((1, 3, 6, 1, 4, 1, 2636, 3, 39, 1, 17, 16, 1, 2, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 64))).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxLsysSpNATpoipnumHeaviestUser.setStatus('current')
jnxLsysSpNATpoipnumLightestUsage = MibScalar((1, 3, 6, 1, 4, 1, 2636, 3, 39, 1, 17, 16, 1, 2, 6), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxLsysSpNATpoipnumLightestUsage.setStatus('current')
jnxLsysSpNATpoipnumLightestUser = MibScalar((1, 3, 6, 1, 4, 1, 2636, 3, 39, 1, 17, 16, 1, 2, 7), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 64))).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxLsysSpNATpoipnumLightestUser.setStatus('current')
mibBuilder.exportSymbols("JUNIPER-LSYSSP-NATPOIPNUM-MIB", jnxLsysSpNATpoipnumProfileName=jnxLsysSpNATpoipnumProfileName, jnxLsysSpNATpoipnumTable=jnxLsysSpNATpoipnumTable, jnxLsysSpNATpoipnumEntry=jnxLsysSpNATpoipnumEntry, jnxLsysSpNATpoipnumMIB=jnxLsysSpNATpoipnumMIB, jnxLsysSpNATpoipnumUsage=jnxLsysSpNATpoipnumUsage, jnxLsysSpNATpoipnumMaximum=jnxLsysSpNATpoipnumMaximum, jnxLsysSpNATpoipnumHeaviestUser=jnxLsysSpNATpoipnumHeaviestUser, jnxLsysSpNATpoipnumLightestUser=jnxLsysSpNATpoipnumLightestUser, jnxLsysSpNATpoipnumUsedAmount=jnxLsysSpNATpoipnumUsedAmount, jnxLsysSpNATpoipnumObjects=jnxLsysSpNATpoipnumObjects, jnxLsysSpNATpoipnumReserved=jnxLsysSpNATpoipnumReserved, PYSNMP_MODULE_ID=jnxLsysSpNATpoipnumMIB, jnxLsysSpNATpoipnumLightestUsage=jnxLsysSpNATpoipnumLightestUsage, jnxLsysSpNATpoipnumHeaviestUsage=jnxLsysSpNATpoipnumHeaviestUsage, jnxLsysSpNATpoipnumLsysName=jnxLsysSpNATpoipnumLsysName, jnxLsysSpNATpoipnumMaxQuota=jnxLsysSpNATpoipnumMaxQuota, jnxLsysSpNATpoipnumSummary=jnxLsysSpNATpoipnumSummary, jnxLsysSpNATpoipnumAvailableAmount=jnxLsysSpNATpoipnumAvailableAmount)
| 129.77551 | 1,093 | 0.788331 |
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsIntersection, ValueRangeConstraint, ConstraintsUnion, ValueSizeConstraint, SingleValueConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "ValueRangeConstraint", "ConstraintsUnion", "ValueSizeConstraint", "SingleValueConstraint")
jnxLsysSpNATpoipnum, = mibBuilder.importSymbols("JUNIPER-LSYS-SECURITYPROFILE-MIB", "jnxLsysSpNATpoipnum")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
ObjectIdentity, Integer32, Counter32, MibScalar, MibTable, MibTableRow, MibTableColumn, iso, Unsigned32, IpAddress, MibIdentifier, Gauge32, Counter64, NotificationType, TimeTicks, ModuleIdentity, Bits = mibBuilder.importSymbols("SNMPv2-SMI", "ObjectIdentity", "Integer32", "Counter32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "iso", "Unsigned32", "IpAddress", "MibIdentifier", "Gauge32", "Counter64", "NotificationType", "TimeTicks", "ModuleIdentity", "Bits")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
jnxLsysSpNATpoipnumMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 2636, 3, 39, 1, 17, 16, 1))
if mibBuilder.loadTexts: jnxLsysSpNATpoipnumMIB.setLastUpdated('201005191644Z')
if mibBuilder.loadTexts: jnxLsysSpNATpoipnumMIB.setOrganization('Juniper Networks, Inc.')
jnxLsysSpNATpoipnumObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 2636, 3, 39, 1, 17, 16, 1, 1))
jnxLsysSpNATpoipnumSummary = MibIdentifier((1, 3, 6, 1, 4, 1, 2636, 3, 39, 1, 17, 16, 1, 2))
jnxLsysSpNATpoipnumTable = MibTable((1, 3, 6, 1, 4, 1, 2636, 3, 39, 1, 17, 16, 1, 1, 1), )
if mibBuilder.loadTexts: jnxLsysSpNATpoipnumTable.setStatus('current')
jnxLsysSpNATpoipnumEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2636, 3, 39, 1, 17, 16, 1, 1, 1, 1), ).setIndexNames((1, "JUNIPER-LSYSSP-NATPOIPNUM-MIB", "jnxLsysSpNATpoipnumLsysName"))
if mibBuilder.loadTexts: jnxLsysSpNATpoipnumEntry.setStatus('current')
jnxLsysSpNATpoipnumLsysName = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 39, 1, 17, 16, 1, 1, 1, 1, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 64)))
if mibBuilder.loadTexts: jnxLsysSpNATpoipnumLsysName.setStatus('current')
jnxLsysSpNATpoipnumProfileName = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 39, 1, 17, 16, 1, 1, 1, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 32))).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxLsysSpNATpoipnumProfileName.setStatus('current')
jnxLsysSpNATpoipnumUsage = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 39, 1, 17, 16, 1, 1, 1, 1, 3), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxLsysSpNATpoipnumUsage.setStatus('current')
jnxLsysSpNATpoipnumReserved = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 39, 1, 17, 16, 1, 1, 1, 1, 4), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxLsysSpNATpoipnumReserved.setStatus('current')
jnxLsysSpNATpoipnumMaximum = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 39, 1, 17, 16, 1, 1, 1, 1, 5), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxLsysSpNATpoipnumMaximum.setStatus('current')
jnxLsysSpNATpoipnumUsedAmount = MibScalar((1, 3, 6, 1, 4, 1, 2636, 3, 39, 1, 17, 16, 1, 2, 1), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxLsysSpNATpoipnumUsedAmount.setStatus('current')
jnxLsysSpNATpoipnumMaxQuota = MibScalar((1, 3, 6, 1, 4, 1, 2636, 3, 39, 1, 17, 16, 1, 2, 2), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxLsysSpNATpoipnumMaxQuota.setStatus('current')
jnxLsysSpNATpoipnumAvailableAmount = MibScalar((1, 3, 6, 1, 4, 1, 2636, 3, 39, 1, 17, 16, 1, 2, 3), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxLsysSpNATpoipnumAvailableAmount.setStatus('current')
jnxLsysSpNATpoipnumHeaviestUsage = MibScalar((1, 3, 6, 1, 4, 1, 2636, 3, 39, 1, 17, 16, 1, 2, 4), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxLsysSpNATpoipnumHeaviestUsage.setStatus('current')
jnxLsysSpNATpoipnumHeaviestUser = MibScalar((1, 3, 6, 1, 4, 1, 2636, 3, 39, 1, 17, 16, 1, 2, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 64))).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxLsysSpNATpoipnumHeaviestUser.setStatus('current')
jnxLsysSpNATpoipnumLightestUsage = MibScalar((1, 3, 6, 1, 4, 1, 2636, 3, 39, 1, 17, 16, 1, 2, 6), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxLsysSpNATpoipnumLightestUsage.setStatus('current')
jnxLsysSpNATpoipnumLightestUser = MibScalar((1, 3, 6, 1, 4, 1, 2636, 3, 39, 1, 17, 16, 1, 2, 7), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 64))).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxLsysSpNATpoipnumLightestUser.setStatus('current')
mibBuilder.exportSymbols("JUNIPER-LSYSSP-NATPOIPNUM-MIB", jnxLsysSpNATpoipnumProfileName=jnxLsysSpNATpoipnumProfileName, jnxLsysSpNATpoipnumTable=jnxLsysSpNATpoipnumTable, jnxLsysSpNATpoipnumEntry=jnxLsysSpNATpoipnumEntry, jnxLsysSpNATpoipnumMIB=jnxLsysSpNATpoipnumMIB, jnxLsysSpNATpoipnumUsage=jnxLsysSpNATpoipnumUsage, jnxLsysSpNATpoipnumMaximum=jnxLsysSpNATpoipnumMaximum, jnxLsysSpNATpoipnumHeaviestUser=jnxLsysSpNATpoipnumHeaviestUser, jnxLsysSpNATpoipnumLightestUser=jnxLsysSpNATpoipnumLightestUser, jnxLsysSpNATpoipnumUsedAmount=jnxLsysSpNATpoipnumUsedAmount, jnxLsysSpNATpoipnumObjects=jnxLsysSpNATpoipnumObjects, jnxLsysSpNATpoipnumReserved=jnxLsysSpNATpoipnumReserved, PYSNMP_MODULE_ID=jnxLsysSpNATpoipnumMIB, jnxLsysSpNATpoipnumLightestUsage=jnxLsysSpNATpoipnumLightestUsage, jnxLsysSpNATpoipnumHeaviestUsage=jnxLsysSpNATpoipnumHeaviestUsage, jnxLsysSpNATpoipnumLsysName=jnxLsysSpNATpoipnumLsysName, jnxLsysSpNATpoipnumMaxQuota=jnxLsysSpNATpoipnumMaxQuota, jnxLsysSpNATpoipnumSummary=jnxLsysSpNATpoipnumSummary, jnxLsysSpNATpoipnumAvailableAmount=jnxLsysSpNATpoipnumAvailableAmount)
| true | true |
f7fe1bcd6fa84e313ad83eafb8b085f27e7b902a | 391 | py | Python | Summarisation/main_app.py | FreeBirdsCrew/MachineLearning_Scratch | 3b7d877952352017d2879dfc309ac6730096233b | [
"MIT"
] | 1 | 2020-12-11T10:27:16.000Z | 2020-12-11T10:27:16.000Z | Summarisation/main_app.py | FreeBirdsCrew/MachineLearning_Scratch | 3b7d877952352017d2879dfc309ac6730096233b | [
"MIT"
] | null | null | null | Summarisation/main_app.py | FreeBirdsCrew/MachineLearning_Scratch | 3b7d877952352017d2879dfc309ac6730096233b | [
"MIT"
] | null | null | null | from flask import Flask # import flask
app = Flask(__name__) # create an app instance
@app.route("/") # at the end point /
def hello(): # call method hello
return "Hello World!" # which returns "hello world"
if __name__ == "__main__": # on running python app.py
app.run() # run the app | 48.875 | 64 | 0.514066 | from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello World!"
if __name__ == "__main__":
app.run() | true | true |
f7fe1c3bfd47e0be5b8a22d3eee24b8e2eaf93ba | 11,399 | py | Python | tests/loggers/test_tensorboard.py | xxxhycl2010/pytorch-lightning | 7e18b118449133a5184b9014082ff1fb9818cf9b | [
"Apache-2.0"
] | 2 | 2021-08-24T17:46:10.000Z | 2022-02-19T14:39:29.000Z | tests/loggers/test_tensorboard.py | xxxhycl2010/pytorch-lightning | 7e18b118449133a5184b9014082ff1fb9818cf9b | [
"Apache-2.0"
] | 2 | 2021-07-03T07:07:32.000Z | 2022-03-10T16:07:20.000Z | tests/loggers/test_tensorboard.py | xxxhycl2010/pytorch-lightning | 7e18b118449133a5184b9014082ff1fb9818cf9b | [
"Apache-2.0"
] | 1 | 2021-11-29T11:18:52.000Z | 2021-11-29T11:18:52.000Z | # Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from argparse import Namespace
from unittest import mock
import pytest
import torch
import yaml
from omegaconf import OmegaConf
from packaging.version import Version
from tensorboard.backend.event_processing.event_accumulator import EventAccumulator
from pytorch_lightning import Trainer
from pytorch_lightning.loggers import TensorBoardLogger
from tests.helpers import BoringModel
from tests.helpers.runif import RunIf
@RunIf(min_torch="1.5.0")
def test_tensorboard_hparams_reload(tmpdir):
class CustomModel(BoringModel):
def __init__(self, b1=0.5, b2=0.999):
super().__init__()
self.save_hyperparameters()
trainer = Trainer(max_steps=1, default_root_dir=tmpdir)
model = CustomModel()
assert trainer.log_dir == trainer.logger.log_dir
trainer.fit(model)
assert trainer.log_dir == trainer.logger.log_dir
folder_path = trainer.log_dir
# make sure yaml is there
with open(os.path.join(folder_path, "hparams.yaml")) as file:
# The FullLoader parameter handles the conversion from YAML
# scalar values to Python the dictionary format
yaml_params = yaml.safe_load(file)
assert yaml_params["b1"] == 0.5
assert yaml_params["b2"] == 0.999
assert len(yaml_params.keys()) == 2
# verify artifacts
assert len(os.listdir(os.path.join(folder_path, "checkpoints"))) == 1
# verify tb logs
event_acc = EventAccumulator(folder_path)
event_acc.Reload()
data_pt_1_5 = b'\x12\x1b"\x04\n\x02b1"\x04\n\x02b2*\r\n\x0b\x12\thp_metric'
data_pt_1_6 = b'\x12\x1f"\x06\n\x02b1 \x03"\x06\n\x02b2 \x03*\r\n\x0b\x12\thp_metric'
hparams_data = data_pt_1_6 if Version(torch.__version__) >= Version("1.6.0") else data_pt_1_5
assert event_acc.summary_metadata['_hparams_/experiment'].plugin_data.plugin_name == 'hparams'
assert event_acc.summary_metadata['_hparams_/experiment'].plugin_data.content == hparams_data
def test_tensorboard_automatic_versioning(tmpdir):
"""Verify that automatic versioning works"""
root_dir = tmpdir / "tb_versioning"
root_dir.mkdir()
(root_dir / "version_0").mkdir()
(root_dir / "version_1").mkdir()
logger = TensorBoardLogger(save_dir=tmpdir, name="tb_versioning")
assert logger.version == 2
def test_tensorboard_manual_versioning(tmpdir):
"""Verify that manual versioning works"""
root_dir = tmpdir / "tb_versioning"
root_dir.mkdir()
(root_dir / "version_0").mkdir()
(root_dir / "version_1").mkdir()
(root_dir / "version_2").mkdir()
logger = TensorBoardLogger(save_dir=tmpdir, name="tb_versioning", version=1)
assert logger.version == 1
def test_tensorboard_named_version(tmpdir):
"""Verify that manual versioning works for string versions, e.g. '2020-02-05-162402' """
name = "tb_versioning"
(tmpdir / name).mkdir()
expected_version = "2020-02-05-162402"
logger = TensorBoardLogger(save_dir=tmpdir, name=name, version=expected_version)
logger.log_hyperparams({"a": 1, "b": 2, 123: 3, 3.5: 4, 5j: 5}) # Force data to be written
assert logger.version == expected_version
assert os.listdir(tmpdir / name) == [expected_version]
assert os.listdir(tmpdir / name / expected_version)
@pytest.mark.parametrize("name", ["", None])
def test_tensorboard_no_name(tmpdir, name):
"""Verify that None or empty name works"""
logger = TensorBoardLogger(save_dir=tmpdir, name=name)
logger.log_hyperparams({"a": 1, "b": 2, 123: 3, 3.5: 4, 5j: 5}) # Force data to be written
assert logger.root_dir == tmpdir
assert os.listdir(tmpdir / "version_0")
def test_tensorboard_log_sub_dir(tmpdir):
class TestLogger(TensorBoardLogger):
# for reproducibility
@property
def version(self):
return "version"
@property
def name(self):
return "name"
trainer_args = dict(
default_root_dir=tmpdir,
max_steps=1,
)
# no sub_dir specified
save_dir = tmpdir / "logs"
logger = TestLogger(save_dir)
trainer = Trainer(**trainer_args, logger=logger)
assert trainer.logger.log_dir == os.path.join(save_dir, "name", "version")
# sub_dir specified
logger = TestLogger(save_dir, sub_dir="sub_dir")
trainer = Trainer(**trainer_args, logger=logger)
assert trainer.logger.log_dir == os.path.join(save_dir, "name", "version", "sub_dir")
# test home dir (`~`) handling
save_dir = "~/tmp"
explicit_save_dir = os.path.expanduser(save_dir)
logger = TestLogger(save_dir, sub_dir="sub_dir")
trainer = Trainer(**trainer_args, logger=logger)
assert trainer.logger.log_dir == os.path.join(explicit_save_dir, "name", "version", "sub_dir")
# test env var (`$`) handling
test_env_dir = "some_directory"
os.environ["test_env_dir"] = test_env_dir
save_dir = "$test_env_dir/tmp"
explicit_save_dir = f"{test_env_dir}/tmp"
logger = TestLogger(save_dir, sub_dir="sub_dir")
trainer = Trainer(**trainer_args, logger=logger)
assert trainer.logger.log_dir == os.path.join(explicit_save_dir, "name", "version", "sub_dir")
@pytest.mark.parametrize("step_idx", [10, None])
def test_tensorboard_log_metrics(tmpdir, step_idx):
logger = TensorBoardLogger(tmpdir)
metrics = {
"float": 0.3,
"int": 1,
"FloatTensor": torch.tensor(0.1),
"IntTensor": torch.tensor(1),
}
logger.log_metrics(metrics, step_idx)
def test_tensorboard_log_hyperparams(tmpdir):
logger = TensorBoardLogger(tmpdir)
hparams = {
"float": 0.3,
"int": 1,
"string": "abc",
"bool": True,
"dict": {
"a": {
"b": "c"
}
},
"list": [1, 2, 3],
"namespace": Namespace(foo=Namespace(bar="buzz")),
"layer": torch.nn.BatchNorm1d,
}
logger.log_hyperparams(hparams)
def test_tensorboard_log_hparams_and_metrics(tmpdir):
logger = TensorBoardLogger(tmpdir, default_hp_metric=False)
hparams = {
"float": 0.3,
"int": 1,
"string": "abc",
"bool": True,
"dict": {
"a": {
"b": "c"
}
},
"list": [1, 2, 3],
"namespace": Namespace(foo=Namespace(bar="buzz")),
"layer": torch.nn.BatchNorm1d,
}
metrics = {"abc": torch.tensor([0.54])}
logger.log_hyperparams(hparams, metrics)
def test_tensorboard_log_omegaconf_hparams_and_metrics(tmpdir):
logger = TensorBoardLogger(tmpdir, default_hp_metric=False)
hparams = {
"float": 0.3,
"int": 1,
"string": "abc",
"bool": True,
"dict": {
"a": {
"b": "c"
}
},
"list": [1, 2, 3],
# "namespace": Namespace(foo=Namespace(bar="buzz")),
# "layer": torch.nn.BatchNorm1d,
}
hparams = OmegaConf.create(hparams)
metrics = {"abc": torch.tensor([0.54])}
logger.log_hyperparams(hparams, metrics)
@pytest.mark.parametrize("example_input_array", [None, torch.rand(2, 32)])
def test_tensorboard_log_graph(tmpdir, example_input_array):
""" test that log graph works with both model.example_input_array and
if array is passed externaly
"""
model = BoringModel()
if example_input_array is not None:
model.example_input_array = None
logger = TensorBoardLogger(tmpdir, log_graph=True)
logger.log_graph(model, example_input_array)
def test_tensorboard_log_graph_warning_no_example_input_array(tmpdir):
""" test that log graph throws warning if model.example_input_array is None """
model = BoringModel()
model.example_input_array = None
logger = TensorBoardLogger(tmpdir, log_graph=True)
with pytest.warns(
UserWarning,
match='Could not log computational graph since the `model.example_input_array`'
' attribute is not set or `input_array` was not given'
):
logger.log_graph(model)
@mock.patch('pytorch_lightning.loggers.TensorBoardLogger.log_metrics')
def test_tensorboard_with_accummulated_gradients(mock_log_metrics, tmpdir):
"""Tests to ensure that tensorboard log properly when accumulated_gradients > 1"""
class TestModel(BoringModel):
def __init__(self):
super().__init__()
self.indexes = []
def training_step(self, *args):
self.log('foo', 1, on_step=True, on_epoch=True)
if not self.trainer.train_loop.should_accumulate():
if self.trainer.logger_connector.should_update_logs:
self.indexes.append(self.trainer.global_step)
return super().training_step(*args)
model = TestModel()
model.training_epoch_end = None
logger_0 = TensorBoardLogger(tmpdir, default_hp_metric=False)
trainer = Trainer(
default_root_dir=tmpdir,
limit_train_batches=12,
limit_val_batches=0,
max_epochs=3,
accumulate_grad_batches=2,
logger=[logger_0],
log_every_n_steps=3,
)
trainer.fit(model)
calls = [m[2] for m in mock_log_metrics.mock_calls]
count_epochs = [c["step"] for c in calls if "foo_epoch" in c["metrics"]]
assert count_epochs == [5, 11, 17]
count_steps = [c["step"] for c in calls if "foo_step" in c["metrics"]]
assert count_steps == model.indexes
@mock.patch('pytorch_lightning.loggers.tensorboard.SummaryWriter')
def test_tensorboard_finalize(summary_writer, tmpdir):
""" Test that the SummaryWriter closes in finalize. """
logger = TensorBoardLogger(save_dir=tmpdir)
logger.finalize("any")
summary_writer().flush.assert_called()
summary_writer().close.assert_called()
def test_tensorboard_save_hparams_to_yaml_once(tmpdir):
model = BoringModel()
logger = TensorBoardLogger(save_dir=tmpdir, default_hp_metric=False)
trainer = Trainer(max_steps=1, default_root_dir=tmpdir, logger=logger)
assert trainer.log_dir == trainer.logger.log_dir
trainer.fit(model)
hparams_file = "hparams.yaml"
assert os.path.isfile(os.path.join(trainer.log_dir, hparams_file))
assert not os.path.isfile(os.path.join(tmpdir, hparams_file))
@mock.patch('pytorch_lightning.loggers.tensorboard.log')
def test_tensorboard_with_symlink(log, tmpdir):
"""
Tests a specific failure case when tensorboard logger is used with empty name, symbolic link ``save_dir``, and
relative paths.
"""
os.chdir(tmpdir) # need to use relative paths
source = os.path.join('.', 'lightning_logs')
dest = os.path.join('.', 'sym_lightning_logs')
os.makedirs(source, exist_ok=True)
os.symlink(source, dest)
logger = TensorBoardLogger(save_dir=dest, name='')
_ = logger.version
log.warning.assert_not_called()
| 33.233236 | 114 | 0.674708 |
import os
from argparse import Namespace
from unittest import mock
import pytest
import torch
import yaml
from omegaconf import OmegaConf
from packaging.version import Version
from tensorboard.backend.event_processing.event_accumulator import EventAccumulator
from pytorch_lightning import Trainer
from pytorch_lightning.loggers import TensorBoardLogger
from tests.helpers import BoringModel
from tests.helpers.runif import RunIf
@RunIf(min_torch="1.5.0")
def test_tensorboard_hparams_reload(tmpdir):
class CustomModel(BoringModel):
def __init__(self, b1=0.5, b2=0.999):
super().__init__()
self.save_hyperparameters()
trainer = Trainer(max_steps=1, default_root_dir=tmpdir)
model = CustomModel()
assert trainer.log_dir == trainer.logger.log_dir
trainer.fit(model)
assert trainer.log_dir == trainer.logger.log_dir
folder_path = trainer.log_dir
with open(os.path.join(folder_path, "hparams.yaml")) as file:
yaml_params = yaml.safe_load(file)
assert yaml_params["b1"] == 0.5
assert yaml_params["b2"] == 0.999
assert len(yaml_params.keys()) == 2
assert len(os.listdir(os.path.join(folder_path, "checkpoints"))) == 1
event_acc = EventAccumulator(folder_path)
event_acc.Reload()
data_pt_1_5 = b'\x12\x1b"\x04\n\x02b1"\x04\n\x02b2*\r\n\x0b\x12\thp_metric'
data_pt_1_6 = b'\x12\x1f"\x06\n\x02b1 \x03"\x06\n\x02b2 \x03*\r\n\x0b\x12\thp_metric'
hparams_data = data_pt_1_6 if Version(torch.__version__) >= Version("1.6.0") else data_pt_1_5
assert event_acc.summary_metadata['_hparams_/experiment'].plugin_data.plugin_name == 'hparams'
assert event_acc.summary_metadata['_hparams_/experiment'].plugin_data.content == hparams_data
def test_tensorboard_automatic_versioning(tmpdir):
root_dir = tmpdir / "tb_versioning"
root_dir.mkdir()
(root_dir / "version_0").mkdir()
(root_dir / "version_1").mkdir()
logger = TensorBoardLogger(save_dir=tmpdir, name="tb_versioning")
assert logger.version == 2
def test_tensorboard_manual_versioning(tmpdir):
root_dir = tmpdir / "tb_versioning"
root_dir.mkdir()
(root_dir / "version_0").mkdir()
(root_dir / "version_1").mkdir()
(root_dir / "version_2").mkdir()
logger = TensorBoardLogger(save_dir=tmpdir, name="tb_versioning", version=1)
assert logger.version == 1
def test_tensorboard_named_version(tmpdir):
name = "tb_versioning"
(tmpdir / name).mkdir()
expected_version = "2020-02-05-162402"
logger = TensorBoardLogger(save_dir=tmpdir, name=name, version=expected_version)
logger.log_hyperparams({"a": 1, "b": 2, 123: 3, 3.5: 4, 5j: 5})
assert logger.version == expected_version
assert os.listdir(tmpdir / name) == [expected_version]
assert os.listdir(tmpdir / name / expected_version)
@pytest.mark.parametrize("name", ["", None])
def test_tensorboard_no_name(tmpdir, name):
logger = TensorBoardLogger(save_dir=tmpdir, name=name)
logger.log_hyperparams({"a": 1, "b": 2, 123: 3, 3.5: 4, 5j: 5})
assert logger.root_dir == tmpdir
assert os.listdir(tmpdir / "version_0")
def test_tensorboard_log_sub_dir(tmpdir):
class TestLogger(TensorBoardLogger):
@property
def version(self):
return "version"
@property
def name(self):
return "name"
trainer_args = dict(
default_root_dir=tmpdir,
max_steps=1,
)
save_dir = tmpdir / "logs"
logger = TestLogger(save_dir)
trainer = Trainer(**trainer_args, logger=logger)
assert trainer.logger.log_dir == os.path.join(save_dir, "name", "version")
logger = TestLogger(save_dir, sub_dir="sub_dir")
trainer = Trainer(**trainer_args, logger=logger)
assert trainer.logger.log_dir == os.path.join(save_dir, "name", "version", "sub_dir")
save_dir = "~/tmp"
explicit_save_dir = os.path.expanduser(save_dir)
logger = TestLogger(save_dir, sub_dir="sub_dir")
trainer = Trainer(**trainer_args, logger=logger)
assert trainer.logger.log_dir == os.path.join(explicit_save_dir, "name", "version", "sub_dir")
test_env_dir = "some_directory"
os.environ["test_env_dir"] = test_env_dir
save_dir = "$test_env_dir/tmp"
explicit_save_dir = f"{test_env_dir}/tmp"
logger = TestLogger(save_dir, sub_dir="sub_dir")
trainer = Trainer(**trainer_args, logger=logger)
assert trainer.logger.log_dir == os.path.join(explicit_save_dir, "name", "version", "sub_dir")
@pytest.mark.parametrize("step_idx", [10, None])
def test_tensorboard_log_metrics(tmpdir, step_idx):
logger = TensorBoardLogger(tmpdir)
metrics = {
"float": 0.3,
"int": 1,
"FloatTensor": torch.tensor(0.1),
"IntTensor": torch.tensor(1),
}
logger.log_metrics(metrics, step_idx)
def test_tensorboard_log_hyperparams(tmpdir):
logger = TensorBoardLogger(tmpdir)
hparams = {
"float": 0.3,
"int": 1,
"string": "abc",
"bool": True,
"dict": {
"a": {
"b": "c"
}
},
"list": [1, 2, 3],
"namespace": Namespace(foo=Namespace(bar="buzz")),
"layer": torch.nn.BatchNorm1d,
}
logger.log_hyperparams(hparams)
def test_tensorboard_log_hparams_and_metrics(tmpdir):
logger = TensorBoardLogger(tmpdir, default_hp_metric=False)
hparams = {
"float": 0.3,
"int": 1,
"string": "abc",
"bool": True,
"dict": {
"a": {
"b": "c"
}
},
"list": [1, 2, 3],
"namespace": Namespace(foo=Namespace(bar="buzz")),
"layer": torch.nn.BatchNorm1d,
}
metrics = {"abc": torch.tensor([0.54])}
logger.log_hyperparams(hparams, metrics)
def test_tensorboard_log_omegaconf_hparams_and_metrics(tmpdir):
logger = TensorBoardLogger(tmpdir, default_hp_metric=False)
hparams = {
"float": 0.3,
"int": 1,
"string": "abc",
"bool": True,
"dict": {
"a": {
"b": "c"
}
},
"list": [1, 2, 3],
}
hparams = OmegaConf.create(hparams)
metrics = {"abc": torch.tensor([0.54])}
logger.log_hyperparams(hparams, metrics)
@pytest.mark.parametrize("example_input_array", [None, torch.rand(2, 32)])
def test_tensorboard_log_graph(tmpdir, example_input_array):
model = BoringModel()
if example_input_array is not None:
model.example_input_array = None
logger = TensorBoardLogger(tmpdir, log_graph=True)
logger.log_graph(model, example_input_array)
def test_tensorboard_log_graph_warning_no_example_input_array(tmpdir):
model = BoringModel()
model.example_input_array = None
logger = TensorBoardLogger(tmpdir, log_graph=True)
with pytest.warns(
UserWarning,
match='Could not log computational graph since the `model.example_input_array`'
' attribute is not set or `input_array` was not given'
):
logger.log_graph(model)
@mock.patch('pytorch_lightning.loggers.TensorBoardLogger.log_metrics')
def test_tensorboard_with_accummulated_gradients(mock_log_metrics, tmpdir):
class TestModel(BoringModel):
def __init__(self):
super().__init__()
self.indexes = []
def training_step(self, *args):
self.log('foo', 1, on_step=True, on_epoch=True)
if not self.trainer.train_loop.should_accumulate():
if self.trainer.logger_connector.should_update_logs:
self.indexes.append(self.trainer.global_step)
return super().training_step(*args)
model = TestModel()
model.training_epoch_end = None
logger_0 = TensorBoardLogger(tmpdir, default_hp_metric=False)
trainer = Trainer(
default_root_dir=tmpdir,
limit_train_batches=12,
limit_val_batches=0,
max_epochs=3,
accumulate_grad_batches=2,
logger=[logger_0],
log_every_n_steps=3,
)
trainer.fit(model)
calls = [m[2] for m in mock_log_metrics.mock_calls]
count_epochs = [c["step"] for c in calls if "foo_epoch" in c["metrics"]]
assert count_epochs == [5, 11, 17]
count_steps = [c["step"] for c in calls if "foo_step" in c["metrics"]]
assert count_steps == model.indexes
@mock.patch('pytorch_lightning.loggers.tensorboard.SummaryWriter')
def test_tensorboard_finalize(summary_writer, tmpdir):
logger = TensorBoardLogger(save_dir=tmpdir)
logger.finalize("any")
summary_writer().flush.assert_called()
summary_writer().close.assert_called()
def test_tensorboard_save_hparams_to_yaml_once(tmpdir):
model = BoringModel()
logger = TensorBoardLogger(save_dir=tmpdir, default_hp_metric=False)
trainer = Trainer(max_steps=1, default_root_dir=tmpdir, logger=logger)
assert trainer.log_dir == trainer.logger.log_dir
trainer.fit(model)
hparams_file = "hparams.yaml"
assert os.path.isfile(os.path.join(trainer.log_dir, hparams_file))
assert not os.path.isfile(os.path.join(tmpdir, hparams_file))
@mock.patch('pytorch_lightning.loggers.tensorboard.log')
def test_tensorboard_with_symlink(log, tmpdir):
os.chdir(tmpdir)
source = os.path.join('.', 'lightning_logs')
dest = os.path.join('.', 'sym_lightning_logs')
os.makedirs(source, exist_ok=True)
os.symlink(source, dest)
logger = TensorBoardLogger(save_dir=dest, name='')
_ = logger.version
log.warning.assert_not_called()
| true | true |
f7fe1d2379e184cf78cde96798fc34d8b40fe50e | 2,839 | py | Python | lib/stagers/osx/macho.py | Gui-Luz/Empire | 6f5eeff5f46dd085e1317cb09b39853a2fce5d13 | [
"BSD-3-Clause"
] | 5,720 | 2017-02-02T13:59:40.000Z | 2022-03-31T09:50:10.000Z | lib/stagers/osx/macho.py | VookiBoo/Empire | 5aae31e7de591282773d2c8498af04ee4e8778f5 | [
"BSD-3-Clause"
] | 866 | 2017-02-02T10:56:31.000Z | 2020-01-17T07:47:05.000Z | lib/stagers/osx/macho.py | VookiBoo/Empire | 5aae31e7de591282773d2c8498af04ee4e8778f5 | [
"BSD-3-Clause"
] | 2,181 | 2017-02-04T10:28:41.000Z | 2022-03-31T04:36:56.000Z | from lib.common import helpers
class Stager:
def __init__(self, mainMenu, params=[]):
self.info = {
'Name': 'macho',
'Author': ['@xorrior'],
'Description': ('Generates a macho executable.'),
'Comments': [
''
]
}
# any options needed by the stager, settable during runtime
self.options = {
# format:
# value_name : {description, required, default_value}
'Listener' : {
'Description' : 'Listener to generate stager for.',
'Required' : True,
'Value' : ''
},
'Language' : {
'Description' : 'Language of the stager to generate.',
'Required' : True,
'Value' : 'python'
},
'OutFile' : {
'Description' : 'File to write macho to.',
'Required' : True,
'Value' : ''
},
'SafeChecks' : {
'Description' : 'Switch. Checks for LittleSnitch or a SandBox, exit the staging process if true. Defaults to True.',
'Required' : True,
'Value' : 'True'
},
'UserAgent' : {
'Description' : 'User-agent string to use for the staging request (default, none, or other).',
'Required' : False,
'Value' : 'default'
}
}
# save off a copy of the mainMenu object to access external functionality
# like listeners/agent handlers/etc.
self.mainMenu = mainMenu
for param in params:
# parameter format is [Name, Value]
option, value = param
if option in self.options:
self.options[option]['Value'] = value
def generate(self):
# extract all of our options
language = self.options['Language']['Value']
listenerName = self.options['Listener']['Value']
savePath = self.options['OutFile']['Value']
userAgent = self.options['UserAgent']['Value']
safeChecks = self.options['SafeChecks']['Value']
# generate the launcher code
# turn base64 encoding off
encode = False
launcher = self.mainMenu.stagers.generate_launcher(listenerName, language=language, encode=encode, userAgent=userAgent, safeChecks=safeChecks)
if launcher == "":
print helpers.color("[!] Error in launcher command generation.")
return ""
else:
#launcher = launcher.strip('echo')
macho = self.mainMenu.stagers.generate_macho(launcher)
return macho
| 34.204819 | 151 | 0.495245 | from lib.common import helpers
class Stager:
def __init__(self, mainMenu, params=[]):
self.info = {
'Name': 'macho',
'Author': ['@xorrior'],
'Description': ('Generates a macho executable.'),
'Comments': [
''
]
}
self.options = {
'Listener' : {
'Description' : 'Listener to generate stager for.',
'Required' : True,
'Value' : ''
},
'Language' : {
'Description' : 'Language of the stager to generate.',
'Required' : True,
'Value' : 'python'
},
'OutFile' : {
'Description' : 'File to write macho to.',
'Required' : True,
'Value' : ''
},
'SafeChecks' : {
'Description' : 'Switch. Checks for LittleSnitch or a SandBox, exit the staging process if true. Defaults to True.',
'Required' : True,
'Value' : 'True'
},
'UserAgent' : {
'Description' : 'User-agent string to use for the staging request (default, none, or other).',
'Required' : False,
'Value' : 'default'
}
}
self.mainMenu = mainMenu
for param in params:
option, value = param
if option in self.options:
self.options[option]['Value'] = value
def generate(self):
language = self.options['Language']['Value']
listenerName = self.options['Listener']['Value']
savePath = self.options['OutFile']['Value']
userAgent = self.options['UserAgent']['Value']
safeChecks = self.options['SafeChecks']['Value']
encode = False
launcher = self.mainMenu.stagers.generate_launcher(listenerName, language=language, encode=encode, userAgent=userAgent, safeChecks=safeChecks)
if launcher == "":
print helpers.color("[!] Error in launcher command generation.")
return ""
else:
macho = self.mainMenu.stagers.generate_macho(launcher)
return macho
| false | true |
f7fe1d9d3eaf3358e979f16c4e47e42c1529c075 | 499 | py | Python | src/xraysink/util.py | garyd203/xray-asyncio | 6fe8a4de74372a5a914f52fcbfb9b9a390ed290a | [
"Apache-2.0"
] | null | null | null | src/xraysink/util.py | garyd203/xray-asyncio | 6fe8a4de74372a5a914f52fcbfb9b9a390ed290a | [
"Apache-2.0"
] | null | null | null | src/xraysink/util.py | garyd203/xray-asyncio | 6fe8a4de74372a5a914f52fcbfb9b9a390ed290a | [
"Apache-2.0"
] | null | null | null | """Miscellaneous functions for working with the X-Ray SDK."""
from aws_xray_sdk.core import xray_recorder
# noinspection PyProtectedMember
def has_current_trace() -> bool:
"""Whether there is currently a trace.
This is like calling xray_recorder.get_trace_entity(), but without the
annoying error handling.
"""
# See authoritative implementation in aws_xray_sdk.core.context.Context.get_trace_entity()
return bool(getattr(xray_recorder.context._local, "entities", None))
| 33.266667 | 94 | 0.761523 |
from aws_xray_sdk.core import xray_recorder
def has_current_trace() -> bool:
return bool(getattr(xray_recorder.context._local, "entities", None))
| true | true |
f7fe1dcec75f9f7dc8cc780592c815cd8bef9bd4 | 1,943 | py | Python | superflore/parser.py | zelenkovsky/superflore | 78e62c3b227da6d75bf361e70200ee72cb6de8b0 | [
"Apache-2.0"
] | null | null | null | superflore/parser.py | zelenkovsky/superflore | 78e62c3b227da6d75bf361e70200ee72cb6de8b0 | [
"Apache-2.0"
] | null | null | null | superflore/parser.py | zelenkovsky/superflore | 78e62c3b227da6d75bf361e70200ee72cb6de8b0 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 Open Source Robotics Foundation, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
# set up a parser and return it
def get_parser(tool_tip, is_generator=True):
parser = argparse.ArgumentParser(tool_tip)
if is_generator:
parser.add_argument(
'--ros-distro',
help='regenerate packages for the specified distro',
type=str
)
parser.add_argument(
'--all',
help='regenerate all packages in all distros',
action="store_true"
)
parser.add_argument(
'--dry-run',
help='run without filing a PR to remote',
action="store_true"
)
parser.add_argument(
'--pr-only',
help='ONLY file a PR to remote',
action='store_true'
)
parser.add_argument(
'--output-repository-path',
help='location of the Git repo',
type=str
)
parser.add_argument(
'--only',
nargs='+',
help='generate only the specified packages'
)
parser.add_argument(
'--pr-comment',
help='comment to add to the PR',
type=str
)
parser.add_argument(
'--upstream-repo',
help='location of the upstream repository',
type=str
)
return parser
| 30.84127 | 74 | 0.587751 |
import argparse
def get_parser(tool_tip, is_generator=True):
parser = argparse.ArgumentParser(tool_tip)
if is_generator:
parser.add_argument(
'--ros-distro',
help='regenerate packages for the specified distro',
type=str
)
parser.add_argument(
'--all',
help='regenerate all packages in all distros',
action="store_true"
)
parser.add_argument(
'--dry-run',
help='run without filing a PR to remote',
action="store_true"
)
parser.add_argument(
'--pr-only',
help='ONLY file a PR to remote',
action='store_true'
)
parser.add_argument(
'--output-repository-path',
help='location of the Git repo',
type=str
)
parser.add_argument(
'--only',
nargs='+',
help='generate only the specified packages'
)
parser.add_argument(
'--pr-comment',
help='comment to add to the PR',
type=str
)
parser.add_argument(
'--upstream-repo',
help='location of the upstream repository',
type=str
)
return parser
| true | true |
f7fe1fc07e0057997ed4e3cc163e88c1a5654472 | 287 | py | Python | automatic-hardware-analysis/src/com/automatic/hardware/analysis/__init__.py | jvruas/simple-get-data-about-computer-hardware | c1a45b0548391a964f7ab5ed7bb1c60686880640 | [
"MIT"
] | null | null | null | automatic-hardware-analysis/src/com/automatic/hardware/analysis/__init__.py | jvruas/simple-get-data-about-computer-hardware | c1a45b0548391a964f7ab5ed7bb1c60686880640 | [
"MIT"
] | null | null | null | automatic-hardware-analysis/src/com/automatic/hardware/analysis/__init__.py | jvruas/simple-get-data-about-computer-hardware | c1a45b0548391a964f7ab5ed7bb1c60686880640 | [
"MIT"
] | null | null | null | from SendComputerAnalysis import SendComputerAnalysis
class MainAutomaticHardwareAnalysis:
def __init__(self):
currentComputerAnalysis = SendComputerAnalysis()
# Definindo o arquivo como o Launcher do Projeto
if __name__ == "__main__":
MainAutomaticHardwareAnalysis()
| 26.090909 | 56 | 0.794425 | from SendComputerAnalysis import SendComputerAnalysis
class MainAutomaticHardwareAnalysis:
def __init__(self):
currentComputerAnalysis = SendComputerAnalysis()
if __name__ == "__main__":
MainAutomaticHardwareAnalysis()
| true | true |
f7fe1fd91aad6330980fa240cde4e9b972ba5956 | 2,751 | py | Python | model/models.py | Jokerakos/ekpa-papadimitriou | fe008b1fc963de4acddd5391a3bb4962bb706c97 | [
"MIT"
] | null | null | null | model/models.py | Jokerakos/ekpa-papadimitriou | fe008b1fc963de4acddd5391a3bb4962bb706c97 | [
"MIT"
] | null | null | null | model/models.py | Jokerakos/ekpa-papadimitriou | fe008b1fc963de4acddd5391a3bb4962bb706c97 | [
"MIT"
] | 1 | 2021-01-20T15:47:41.000Z | 2021-01-20T15:47:41.000Z | import psycopg2
import pandas as pd
def connect_to_db():
db_connection = psycopg2.connect(
host="***.***.***.**",
database="********",
user="*********",
password="********")
db_connection.set_session(autocommit=True)
cursor = db_connection.cursor()
cursor.execute('SELECT version()')
db_version = cursor.fetchone()
print(db_version)
return db_connection,cursor
conn,db=connect_to_db()
def create_table():
try:
table_creation="""
CREATE TABLE newscrawler (
id serial PRIMARY KEY,
title VARCHAR ( 500 ) ,
text VARCHAR ( 2000 ) ,
time TIMESTAMP ,
newsource VARCHAR ( 500 ) ,
image VARCHAR ( 500 ) ,
country VARCHAR ( 500 ) ,
countrycode VARCHAR ( 500 ) ,
newslet VARCHAR ( 500 ) ,
created_at TIMESTAMP NOT NULL DEFAULT NOW(),
updated_at TIMESTAMP NOT NULL DEFAULT NOW()
);
"""
db.execute(table_creation)
db.close()
return True
except Exception as e :
print("error:",e)
return False
def insert_to_db(new_source,data=None):
if data is None:
data=[]
try:
record_to_insert=[]
if len(data)>0:
for d in data:
checkrecord=record_exists(d['title'])
print("checkrecord:",checkrecord)
if not checkrecord:
title=str(d['title']).replace("'","''") if 'title' in d else None
text=d['text'] if 'text' in d else None
time=d['time'] if 'time' in d else None
newsource=new_source
image=d['image'] if 'image' in d else None
country=d['country'] if 'country' in d else None
countrycode=d['countrycode'] if 'countrycode' in d else None
newslet=d['newslet'] if 'newslet' in d else None
db_data=(title,text,time,newsource,image,country,countrycode,newslet)
record_to_insert.append(db_data)
db_insert_query = """ INSERT INTO newscrawler (title, text, time,newsource,image,country,countrycode,newslet) VALUES (%s,%s,%s,%s,%s,%s,%s,%s)"""
for record in record_to_insert :
db.execute(db_insert_query, record)
conn.commit()
return True
except Exception as e :
print("error:",e)
return False
def record_exists(title):
title=str(title).replace("'","''")
query="""SELECT id FROM newscrawler WHERE title = '{title}'""".format(title=title)
db.execute(query)
return db.fetchone() is not None
if __name__ == '__main__':
# print(create_table())
df = pd.read_csv("news.csv")
data=df.to_dict(orient='records')
print(insert_to_db('news247',data))
| 31.261364 | 149 | 0.580516 | import psycopg2
import pandas as pd
def connect_to_db():
db_connection = psycopg2.connect(
host="***.***.***.**",
database="********",
user="*********",
password="********")
db_connection.set_session(autocommit=True)
cursor = db_connection.cursor()
cursor.execute('SELECT version()')
db_version = cursor.fetchone()
print(db_version)
return db_connection,cursor
conn,db=connect_to_db()
def create_table():
try:
table_creation="""
CREATE TABLE newscrawler (
id serial PRIMARY KEY,
title VARCHAR ( 500 ) ,
text VARCHAR ( 2000 ) ,
time TIMESTAMP ,
newsource VARCHAR ( 500 ) ,
image VARCHAR ( 500 ) ,
country VARCHAR ( 500 ) ,
countrycode VARCHAR ( 500 ) ,
newslet VARCHAR ( 500 ) ,
created_at TIMESTAMP NOT NULL DEFAULT NOW(),
updated_at TIMESTAMP NOT NULL DEFAULT NOW()
);
"""
db.execute(table_creation)
db.close()
return True
except Exception as e :
print("error:",e)
return False
def insert_to_db(new_source,data=None):
if data is None:
data=[]
try:
record_to_insert=[]
if len(data)>0:
for d in data:
checkrecord=record_exists(d['title'])
print("checkrecord:",checkrecord)
if not checkrecord:
title=str(d['title']).replace("'","''") if 'title' in d else None
text=d['text'] if 'text' in d else None
time=d['time'] if 'time' in d else None
newsource=new_source
image=d['image'] if 'image' in d else None
country=d['country'] if 'country' in d else None
countrycode=d['countrycode'] if 'countrycode' in d else None
newslet=d['newslet'] if 'newslet' in d else None
db_data=(title,text,time,newsource,image,country,countrycode,newslet)
record_to_insert.append(db_data)
db_insert_query = """ INSERT INTO newscrawler (title, text, time,newsource,image,country,countrycode,newslet) VALUES (%s,%s,%s,%s,%s,%s,%s,%s)"""
for record in record_to_insert :
db.execute(db_insert_query, record)
conn.commit()
return True
except Exception as e :
print("error:",e)
return False
def record_exists(title):
title=str(title).replace("'","''")
query="""SELECT id FROM newscrawler WHERE title = '{title}'""".format(title=title)
db.execute(query)
return db.fetchone() is not None
if __name__ == '__main__':
df = pd.read_csv("news.csv")
data=df.to_dict(orient='records')
print(insert_to_db('news247',data))
| true | true |
f7fe1feceeccbb3c7dc6860f0ee5d6a9c10a57df | 7,507 | py | Python | PythonAndroid/youtube-dl/lib/python3.5/youtube_dl/extractor/rutv.py | jianglei12138/python-3.5.1 | 2d248ceba8aa4c14ee43e57ece99cc1a43fd22b7 | [
"PSF-2.0"
] | 5 | 2016-04-25T16:26:07.000Z | 2021-04-28T16:10:29.000Z | PythonAndroid/youtube-dl/lib/python3.5/youtube_dl/extractor/rutv.py | jianglei12138/python-3.5.1 | 2d248ceba8aa4c14ee43e57ece99cc1a43fd22b7 | [
"PSF-2.0"
] | 5 | 2016-04-22T01:33:31.000Z | 2016-08-04T15:33:19.000Z | PythonAndroid/youtube-dl/lib/python3.5/youtube_dl/extractor/rutv.py | jianglei12138/python-3.5.1 | 2d248ceba8aa4c14ee43e57ece99cc1a43fd22b7 | [
"PSF-2.0"
] | 4 | 2016-04-26T15:27:38.000Z | 2018-11-12T21:04:54.000Z | # encoding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
ExtractorError,
int_or_none
)
class RUTVIE(InfoExtractor):
IE_DESC = 'RUTV.RU'
_VALID_URL = r'''(?x)
https?://player\.(?:rutv\.ru|vgtrk\.com)/
(?P<path>flash2v/container\.swf\?id=
|iframe/(?P<type>swf|video|live)/id/
|index/iframe/cast_id/)
(?P<id>\d+)'''
_TESTS = [
{
'url': 'http://player.rutv.ru/flash2v/container.swf?id=774471&sid=kultura&fbv=true&isPlay=true&ssl=false&i=560&acc_video_id=episode_id/972347/video_id/978186/brand_id/31724',
'info_dict': {
'id': '774471',
'ext': 'mp4',
'title': 'Монологи на все времена',
'description': 'md5:18d8b5e6a41fb1faa53819471852d5d5',
'duration': 2906,
},
'params': {
# m3u8 download
'skip_download': True,
},
},
{
'url': 'https://player.vgtrk.com/flash2v/container.swf?id=774016&sid=russiatv&fbv=true&isPlay=true&ssl=false&i=560&acc_video_id=episode_id/972098/video_id/977760/brand_id/57638',
'info_dict': {
'id': '774016',
'ext': 'mp4',
'title': 'Чужой в семье Сталина',
'description': '',
'duration': 2539,
},
'params': {
# m3u8 download
'skip_download': True,
},
},
{
'url': 'http://player.rutv.ru/iframe/swf/id/766888/sid/hitech/?acc_video_id=4000',
'info_dict': {
'id': '766888',
'ext': 'mp4',
'title': 'Вести.net: интернет-гиганты начали перетягивание программных "одеял"',
'description': 'md5:65ddd47f9830c4f42ed6475f8730c995',
'duration': 279,
},
'params': {
# m3u8 download
'skip_download': True,
},
},
{
'url': 'http://player.rutv.ru/iframe/video/id/771852/start_zoom/true/showZoomBtn/false/sid/russiatv/?acc_video_id=episode_id/970443/video_id/975648/brand_id/5169',
'info_dict': {
'id': '771852',
'ext': 'mp4',
'title': 'Прямой эфир. Жертвы загадочной болезни: смерть от старости в 17 лет',
'description': 'md5:b81c8c55247a4bd996b43ce17395b2d8',
'duration': 3096,
},
'params': {
# m3u8 download
'skip_download': True,
},
},
{
'url': 'http://player.rutv.ru/iframe/live/id/51499/showZoomBtn/false/isPlay/true/sid/sochi2014',
'info_dict': {
'id': '51499',
'ext': 'flv',
'title': 'Сочи-2014. Биатлон. Индивидуальная гонка. Мужчины ',
'description': 'md5:9e0ed5c9d2fa1efbfdfed90c9a6d179c',
},
'skip': 'Translation has finished',
},
{
'url': 'http://player.rutv.ru/iframe/live/id/21/showZoomBtn/false/isPlay/true/',
'info_dict': {
'id': '21',
'ext': 'mp4',
'title': 're:^Россия 24. Прямой эфир [0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}$',
'is_live': True,
},
'params': {
# m3u8 download
'skip_download': True,
},
},
]
@classmethod
def _extract_url(cls, webpage):
mobj = re.search(
r'<iframe[^>]+?src=(["\'])(?P<url>https?://player\.(?:rutv\.ru|vgtrk\.com)/(?:iframe/(?:swf|video|live)/id|index/iframe/cast_id)/.+?)\1', webpage)
if mobj:
return mobj.group('url')
mobj = re.search(
r'<meta[^>]+?property=(["\'])og:video\1[^>]+?content=(["\'])(?P<url>https?://player\.(?:rutv\.ru|vgtrk\.com)/flash2v/container\.swf\?id=.+?\2)',
webpage)
if mobj:
return mobj.group('url')
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
video_path = mobj.group('path')
if video_path.startswith('flash2v'):
video_type = 'video'
elif video_path.startswith('iframe'):
video_type = mobj.group('type')
if video_type == 'swf':
video_type = 'video'
elif video_path.startswith('index/iframe/cast_id'):
video_type = 'live'
is_live = video_type == 'live'
json_data = self._download_json(
'http://player.rutv.ru/iframe/data%s/id/%s' % ('live' if is_live else 'video', video_id),
video_id, 'Downloading JSON')
if json_data['errors']:
raise ExtractorError('%s said: %s' % (self.IE_NAME, json_data['errors']), expected=True)
playlist = json_data['data']['playlist']
medialist = playlist['medialist']
media = medialist[0]
if media['errors']:
raise ExtractorError('%s said: %s' % (self.IE_NAME, media['errors']), expected=True)
view_count = playlist.get('count_views')
priority_transport = playlist['priority_transport']
thumbnail = media['picture']
width = int_or_none(media['width'])
height = int_or_none(media['height'])
description = media['anons']
title = media['title']
duration = int_or_none(media.get('duration'))
formats = []
for transport, links in media['sources'].items():
for quality, url in links.items():
preference = -1 if priority_transport == transport else -2
if transport == 'rtmp':
mobj = re.search(r'^(?P<url>rtmp://[^/]+/(?P<app>.+))/(?P<playpath>.+)$', url)
if not mobj:
continue
fmt = {
'url': mobj.group('url'),
'play_path': mobj.group('playpath'),
'app': mobj.group('app'),
'page_url': 'http://player.rutv.ru',
'player_url': 'http://player.rutv.ru/flash2v/osmf.swf?i=22',
'rtmp_live': True,
'ext': 'flv',
'vbr': int(quality),
'preference': preference,
}
elif transport == 'm3u8':
formats.extend(self._extract_m3u8_formats(
url, video_id, 'mp4', preference=preference, m3u8_id='hls'))
continue
else:
fmt = {
'url': url
}
fmt.update({
'width': width,
'height': height,
'format_id': '%s-%s' % (transport, quality),
})
formats.append(fmt)
self._sort_formats(formats)
return {
'id': video_id,
'title': self._live_title(title) if is_live else title,
'description': description,
'thumbnail': thumbnail,
'view_count': view_count,
'duration': duration,
'formats': formats,
'is_live': is_live,
}
| 36.79902 | 190 | 0.480085 |
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
ExtractorError,
int_or_none
)
class RUTVIE(InfoExtractor):
IE_DESC = 'RUTV.RU'
_VALID_URL = r'''(?x)
https?://player\.(?:rutv\.ru|vgtrk\.com)/
(?P<path>flash2v/container\.swf\?id=
|iframe/(?P<type>swf|video|live)/id/
|index/iframe/cast_id/)
(?P<id>\d+)'''
_TESTS = [
{
'url': 'http://player.rutv.ru/flash2v/container.swf?id=774471&sid=kultura&fbv=true&isPlay=true&ssl=false&i=560&acc_video_id=episode_id/972347/video_id/978186/brand_id/31724',
'info_dict': {
'id': '774471',
'ext': 'mp4',
'title': 'Монологи на все времена',
'description': 'md5:18d8b5e6a41fb1faa53819471852d5d5',
'duration': 2906,
},
'params': {
'skip_download': True,
},
},
{
'url': 'https://player.vgtrk.com/flash2v/container.swf?id=774016&sid=russiatv&fbv=true&isPlay=true&ssl=false&i=560&acc_video_id=episode_id/972098/video_id/977760/brand_id/57638',
'info_dict': {
'id': '774016',
'ext': 'mp4',
'title': 'Чужой в семье Сталина',
'description': '',
'duration': 2539,
},
'params': {
'skip_download': True,
},
},
{
'url': 'http://player.rutv.ru/iframe/swf/id/766888/sid/hitech/?acc_video_id=4000',
'info_dict': {
'id': '766888',
'ext': 'mp4',
'title': 'Вести.net: интернет-гиганты начали перетягивание программных "одеял"',
'description': 'md5:65ddd47f9830c4f42ed6475f8730c995',
'duration': 279,
},
'params': {
'skip_download': True,
},
},
{
'url': 'http://player.rutv.ru/iframe/video/id/771852/start_zoom/true/showZoomBtn/false/sid/russiatv/?acc_video_id=episode_id/970443/video_id/975648/brand_id/5169',
'info_dict': {
'id': '771852',
'ext': 'mp4',
'title': 'Прямой эфир. Жертвы загадочной болезни: смерть от старости в 17 лет',
'description': 'md5:b81c8c55247a4bd996b43ce17395b2d8',
'duration': 3096,
},
'params': {
'skip_download': True,
},
},
{
'url': 'http://player.rutv.ru/iframe/live/id/51499/showZoomBtn/false/isPlay/true/sid/sochi2014',
'info_dict': {
'id': '51499',
'ext': 'flv',
'title': 'Сочи-2014. Биатлон. Индивидуальная гонка. Мужчины ',
'description': 'md5:9e0ed5c9d2fa1efbfdfed90c9a6d179c',
},
'skip': 'Translation has finished',
},
{
'url': 'http://player.rutv.ru/iframe/live/id/21/showZoomBtn/false/isPlay/true/',
'info_dict': {
'id': '21',
'ext': 'mp4',
'title': 're:^Россия 24. Прямой эфир [0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}$',
'is_live': True,
},
'params': {
'skip_download': True,
},
},
]
@classmethod
def _extract_url(cls, webpage):
mobj = re.search(
r'<iframe[^>]+?src=(["\'])(?P<url>https?://player\.(?:rutv\.ru|vgtrk\.com)/(?:iframe/(?:swf|video|live)/id|index/iframe/cast_id)/.+?)\1', webpage)
if mobj:
return mobj.group('url')
mobj = re.search(
r'<meta[^>]+?property=(["\'])og:video\1[^>]+?content=(["\'])(?P<url>https?://player\.(?:rutv\.ru|vgtrk\.com)/flash2v/container\.swf\?id=.+?\2)',
webpage)
if mobj:
return mobj.group('url')
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
video_path = mobj.group('path')
if video_path.startswith('flash2v'):
video_type = 'video'
elif video_path.startswith('iframe'):
video_type = mobj.group('type')
if video_type == 'swf':
video_type = 'video'
elif video_path.startswith('index/iframe/cast_id'):
video_type = 'live'
is_live = video_type == 'live'
json_data = self._download_json(
'http://player.rutv.ru/iframe/data%s/id/%s' % ('live' if is_live else 'video', video_id),
video_id, 'Downloading JSON')
if json_data['errors']:
raise ExtractorError('%s said: %s' % (self.IE_NAME, json_data['errors']), expected=True)
playlist = json_data['data']['playlist']
medialist = playlist['medialist']
media = medialist[0]
if media['errors']:
raise ExtractorError('%s said: %s' % (self.IE_NAME, media['errors']), expected=True)
view_count = playlist.get('count_views')
priority_transport = playlist['priority_transport']
thumbnail = media['picture']
width = int_or_none(media['width'])
height = int_or_none(media['height'])
description = media['anons']
title = media['title']
duration = int_or_none(media.get('duration'))
formats = []
for transport, links in media['sources'].items():
for quality, url in links.items():
preference = -1 if priority_transport == transport else -2
if transport == 'rtmp':
mobj = re.search(r'^(?P<url>rtmp://[^/]+/(?P<app>.+))/(?P<playpath>.+)$', url)
if not mobj:
continue
fmt = {
'url': mobj.group('url'),
'play_path': mobj.group('playpath'),
'app': mobj.group('app'),
'page_url': 'http://player.rutv.ru',
'player_url': 'http://player.rutv.ru/flash2v/osmf.swf?i=22',
'rtmp_live': True,
'ext': 'flv',
'vbr': int(quality),
'preference': preference,
}
elif transport == 'm3u8':
formats.extend(self._extract_m3u8_formats(
url, video_id, 'mp4', preference=preference, m3u8_id='hls'))
continue
else:
fmt = {
'url': url
}
fmt.update({
'width': width,
'height': height,
'format_id': '%s-%s' % (transport, quality),
})
formats.append(fmt)
self._sort_formats(formats)
return {
'id': video_id,
'title': self._live_title(title) if is_live else title,
'description': description,
'thumbnail': thumbnail,
'view_count': view_count,
'duration': duration,
'formats': formats,
'is_live': is_live,
}
| true | true |
f7fe1ff721e81c240a1ff55702a9dc5dede65fc8 | 1,199 | py | Python | max-cut/code/simulated_annealing.py | minasora/- | 7c496feba6b74d4743e6d9f8e3adbdd4f6dd48d5 | [
"MIT"
] | 1 | 2021-06-01T06:32:09.000Z | 2021-06-01T06:32:09.000Z | max-cut/code/simulated_annealing.py | minasora/- | 7c496feba6b74d4743e6d9f8e3adbdd4f6dd48d5 | [
"MIT"
] | null | null | null | max-cut/code/simulated_annealing.py | minasora/- | 7c496feba6b74d4743e6d9f8e3adbdd4f6dd48d5 | [
"MIT"
] | 3 | 2020-06-07T16:20:45.000Z | 2021-06-01T06:32:11.000Z | import local_search as ls
import record_process as rp
import max_cut_instance as m_instance
import random as rd
import math
T = 10000 # 温度
T_min = pow(10,-5) # 冷却温度
Max_iters = 10000 # 最大迭代次数
r = 0.99 # 降火
def E_evaluation(delta, T):
"""
返回多大可能接受新解
:param obj:
:param new_obj:
:return: 概率
"""
if delta > 0:
return 1
else:
return math.exp(delta / T)
def simulated_annealing(solution, instance, T=T, T_min=T_min, Max_iters=Max_iters, r=r):
"""
模拟退火算法
:param solution:
:param instance:
:param T:
:param T_min:
:param Max_iters:
:param r:
:return: 迭代对应次数所得解
"""
iter = 0
record_data = 'T {} T_min {} r {}\n\n'.format(T, T_min, r)
while iter < Max_iters and T > T_min:
record_data = record_data + "iteration: {}\n{}\n\n".format(iter, solution)
i = rd.randint(0, instance.p - 1)
delta = solution.updates[i]
p = E_evaluation(delta, T)
p_rd = rd.random()
print(iter,solution.obj,T,p)
if p_rd < p:
solution = ls.flip(solution, i, instance)
T = r * T
iter += 1
rp.record(record_data,"sa")
return solution
| 21.8 | 88 | 0.582152 | import local_search as ls
import record_process as rp
import max_cut_instance as m_instance
import random as rd
import math
T = 10000
T_min = pow(10,-5)
Max_iters = 10000
r = 0.99
def E_evaluation(delta, T):
if delta > 0:
return 1
else:
return math.exp(delta / T)
def simulated_annealing(solution, instance, T=T, T_min=T_min, Max_iters=Max_iters, r=r):
iter = 0
record_data = 'T {} T_min {} r {}\n\n'.format(T, T_min, r)
while iter < Max_iters and T > T_min:
record_data = record_data + "iteration: {}\n{}\n\n".format(iter, solution)
i = rd.randint(0, instance.p - 1)
delta = solution.updates[i]
p = E_evaluation(delta, T)
p_rd = rd.random()
print(iter,solution.obj,T,p)
if p_rd < p:
solution = ls.flip(solution, i, instance)
T = r * T
iter += 1
rp.record(record_data,"sa")
return solution
| true | true |
f7fe216bdb9175bdf57112572f50a1c58ad54aff | 11,347 | py | Python | bot/constants.py | CasualCoder99/sir-lancebot | 0a6a355419d9382b35ac651117287d907e98af0c | [
"MIT"
] | null | null | null | bot/constants.py | CasualCoder99/sir-lancebot | 0a6a355419d9382b35ac651117287d907e98af0c | [
"MIT"
] | null | null | null | bot/constants.py | CasualCoder99/sir-lancebot | 0a6a355419d9382b35ac651117287d907e98af0c | [
"MIT"
] | null | null | null | import dataclasses
import enum
import logging
from datetime import datetime
from os import environ
from typing import Dict, NamedTuple
__all__ = (
"AdventOfCode",
"Branding",
"Cats",
"Channels",
"Categories",
"Client",
"Colours",
"Emojis",
"Icons",
"Lovefest",
"Month",
"Roles",
"Tokens",
"Wolfram",
"Reddit",
"RedisConfig",
"RedirectOutput",
"MODERATION_ROLES",
"STAFF_ROLES",
"WHITELISTED_CHANNELS",
"ERROR_REPLIES",
"NEGATIVE_REPLIES",
"POSITIVE_REPLIES",
)
log = logging.getLogger(__name__)
@dataclasses.dataclass
class AdventOfCodeLeaderboard:
id: str
_session: str
join_code: str
# If we notice that the session for this board expired, we set
# this attribute to `True`. We will emit a Sentry error so we
# can handle it, but, in the meantime, we'll try using the
# fallback session to make sure the commands still work.
use_fallback_session: bool = False
@property
def session(self) -> str:
"""Return either the actual `session` cookie or the fallback cookie."""
if self.use_fallback_session:
log.info(f"Returning fallback cookie for board `{self.id}`.")
return AdventOfCode.fallback_session
return self._session
def _parse_aoc_leaderboard_env() -> Dict[str, AdventOfCodeLeaderboard]:
"""
Parse the environment variable containing leaderboard information.
A leaderboard should be specified in the format `id,session,join_code`,
without the backticks. If more than one leaderboard needs to be added to
the constant, separate the individual leaderboards with `::`.
Example ENV: `id1,session1,join_code1::id2,session2,join_code2`
"""
raw_leaderboards = environ.get("AOC_LEADERBOARDS", "")
if not raw_leaderboards:
return {}
leaderboards = {}
for leaderboard in raw_leaderboards.split("::"):
leaderboard_id, session, join_code = leaderboard.split(",")
leaderboards[leaderboard_id] = AdventOfCodeLeaderboard(leaderboard_id, session, join_code)
return leaderboards
class AdventOfCode:
# Information for the several leaderboards we have
leaderboards = _parse_aoc_leaderboard_env()
staff_leaderboard_id = environ.get("AOC_STAFF_LEADERBOARD_ID", "")
fallback_session = environ.get("AOC_FALLBACK_SESSION", "")
# Other Advent of Code constants
ignored_days = environ.get("AOC_IGNORED_DAYS", "").split(",")
leaderboard_displayed_members = 10
leaderboard_cache_expiry_seconds = 1800
year = int(environ.get("AOC_YEAR", datetime.utcnow().year))
role_id = int(environ.get("AOC_ROLE_ID", 518565788744024082))
class Branding:
cycle_frequency = int(environ.get("CYCLE_FREQUENCY", 3)) # 0: never, 1: every day, 2: every other day, ...
class Cats:
cats = ["ᓚᘏᗢ", "ᘡᘏᗢ", "🐈", "ᓕᘏᗢ", "ᓇᘏᗢ", "ᓂᘏᗢ", "ᘣᘏᗢ", "ᕦᘏᗢ", "ᕂᘏᗢ"]
class Channels(NamedTuple):
advent_of_code = int(environ.get("AOC_CHANNEL_ID", 782715290437943306))
advent_of_code_commands = int(environ.get("AOC_COMMANDS_CHANNEL_ID", 607247579608121354))
bot = 267659945086812160
organisation = 551789653284356126
devlog = int(environ.get("CHANNEL_DEVLOG", 622895325144940554))
dev_contrib = 635950537262759947
mod_meta = 775412552795947058
mod_tools = 775413915391098921
off_topic_0 = 291284109232308226
off_topic_1 = 463035241142026251
off_topic_2 = 463035268514185226
community_bot_commands = int(environ.get("CHANNEL_COMMUNITY_BOT_COMMANDS", 607247579608121354))
hacktoberfest_2020 = 760857070781071431
voice_chat_0 = 412357430186344448
voice_chat_1 = 799647045886541885
staff_voice = 541638762007101470
reddit = int(environ.get("CHANNEL_REDDIT", 458224812528238616))
class Categories(NamedTuple):
help_in_use = 696958401460043776
development = 411199786025484308
devprojects = 787641585624940544
media = 799054581991997460
staff = 364918151625965579
class Client(NamedTuple):
name = "Sir Lancebot"
guild = int(environ.get("BOT_GUILD", 267624335836053506))
prefix = environ.get("PREFIX", ".")
token = environ.get("BOT_TOKEN")
sentry_dsn = environ.get("BOT_SENTRY_DSN")
debug = environ.get("BOT_DEBUG", "").lower() == "true"
github_bot_repo = "https://github.com/python-discord/sir-lancebot"
# Override seasonal locks: 1 (January) to 12 (December)
month_override = int(environ["MONTH_OVERRIDE"]) if "MONTH_OVERRIDE" in environ else None
class Colours:
blue = 0x0279FD
bright_green = 0x01D277
dark_green = 0x1F8B4C
orange = 0xE67E22
pink = 0xCF84E0
purple = 0xB734EB
soft_green = 0x68C290
soft_orange = 0xF9CB54
soft_red = 0xCD6D6D
yellow = 0xF9F586
python_blue = 0x4B8BBE
python_yellow = 0xFFD43B
grass_green = 0x66FF00
gold = 0xE6C200
easter_like_colours = [
(255, 247, 0),
(255, 255, 224),
(0, 255, 127),
(189, 252, 201),
(255, 192, 203),
(255, 160, 122),
(181, 115, 220),
(221, 160, 221),
(200, 162, 200),
(238, 130, 238),
(135, 206, 235),
(0, 204, 204),
(64, 224, 208),
]
class Emojis:
cross_mark = "\u274C"
star = "\u2B50"
christmas_tree = "\U0001F384"
check = "\u2611"
envelope = "\U0001F4E8"
trashcan = environ.get("TRASHCAN_EMOJI", "<:trashcan:637136429717389331>")
ok_hand = ":ok_hand:"
hand_raised = "\U0001F64B"
dice_1 = "<:dice_1:755891608859443290>"
dice_2 = "<:dice_2:755891608741740635>"
dice_3 = "<:dice_3:755891608251138158>"
dice_4 = "<:dice_4:755891607882039327>"
dice_5 = "<:dice_5:755891608091885627>"
dice_6 = "<:dice_6:755891607680843838>"
issue = "<:IssueOpen:629695470327037963>"
issue_closed = "<:IssueClosed:629695470570307614>"
pull_request = "<:PROpen:629695470175780875>"
pull_request_closed = "<:PRClosed:629695470519713818>"
pull_request_draft = "<:PRDraft:829755345425399848>"
merge = "<:PRMerged:629695470570176522>"
number_emojis = {
1: "\u0031\ufe0f\u20e3",
2: "\u0032\ufe0f\u20e3",
3: "\u0033\ufe0f\u20e3",
4: "\u0034\ufe0f\u20e3",
5: "\u0035\ufe0f\u20e3",
6: "\u0036\ufe0f\u20e3",
7: "\u0037\ufe0f\u20e3",
8: "\u0038\ufe0f\u20e3",
9: "\u0039\ufe0f\u20e3"
}
confirmation = "\u2705"
decline = "\u274c"
incident_unactioned = "<:incident_unactioned:719645583245180960>"
x = "\U0001f1fd"
o = "\U0001f1f4"
x_square = "<:x_square:632278427260682281>"
o_square = "<:o_square:632278452413661214>"
status_online = "<:status_online:470326272351010816>"
status_idle = "<:status_idle:470326266625785866>"
status_dnd = "<:status_dnd:470326272082313216>"
status_offline = "<:status_offline:470326266537705472>"
# Reddit emojis
reddit = "<:reddit:676030265734332427>"
reddit_post_text = "<:reddit_post_text:676030265910493204>"
reddit_post_video = "<:reddit_post_video:676030265839190047>"
reddit_post_photo = "<:reddit_post_photo:676030265734201344>"
reddit_upvote = "<:reddit_upvote:755845219890757644>"
reddit_comments = "<:reddit_comments:755845255001014384>"
reddit_users = "<:reddit_users:755845303822974997>"
class Icons:
questionmark = "https://cdn.discordapp.com/emojis/512367613339369475.png"
bookmark = (
"https://images-ext-2.discordapp.net/external/zl4oDwcmxUILY7sD9ZWE2fU5R7n6QcxEmPYSE5eddbg/"
"%3Fv%3D1/https/cdn.discordapp.com/emojis/654080405988966419.png?width=20&height=20"
)
class Lovefest:
role_id = int(environ.get("LOVEFEST_ROLE_ID", 542431903886606399))
class Month(enum.IntEnum):
JANUARY = 1
FEBRUARY = 2
MARCH = 3
APRIL = 4
MAY = 5
JUNE = 6
JULY = 7
AUGUST = 8
SEPTEMBER = 9
OCTOBER = 10
NOVEMBER = 11
DECEMBER = 12
def __str__(self) -> str:
return self.name.title()
# If a month override was configured, check that it's a valid Month
# Prevents delaying an exception after the bot starts
if Client.month_override is not None:
Month(Client.month_override)
class Roles(NamedTuple):
admin = int(environ.get("BOT_ADMIN_ROLE_ID", 267628507062992896))
moderator = 267629731250176001
owner = 267627879762755584
helpers = int(environ.get("ROLE_HELPERS", 267630620367257601))
core_developers = 587606783669829632
class Tokens(NamedTuple):
giphy = environ.get("GIPHY_TOKEN")
aoc_session_cookie = environ.get("AOC_SESSION_COOKIE")
omdb = environ.get("OMDB_API_KEY")
youtube = environ.get("YOUTUBE_API_KEY")
tmdb = environ.get("TMDB_API_KEY")
nasa = environ.get("NASA_API_KEY")
igdb_client_id = environ.get("IGDB_CLIENT_ID")
igdb_client_secret = environ.get("IGDB_CLIENT_SECRET")
github = environ.get("GITHUB_TOKEN")
unsplash_access_key = environ.get("UNSPLASH_KEY")
class Wolfram(NamedTuple):
user_limit_day = int(environ.get("WOLFRAM_USER_LIMIT_DAY", 10))
guild_limit_day = int(environ.get("WOLFRAM_GUILD_LIMIT_DAY", 67))
key = environ.get("WOLFRAM_API_KEY")
class RedisConfig(NamedTuple):
host = environ.get("REDIS_HOST", "redis.default.svc.cluster.local")
port = environ.get("REDIS_PORT", 6379)
password = environ.get("REDIS_PASSWORD")
use_fakeredis = environ.get("USE_FAKEREDIS", "false").lower() == "true"
class Source:
github = "https://github.com/python-discord/sir-lancebot"
github_avatar_url = "https://avatars1.githubusercontent.com/u/9919"
class RedirectOutput:
delete_delay: int = 10
class Reddit:
subreddits = ["r/Python"]
client_id = environ.get("REDDIT_CLIENT_ID")
secret = environ.get("REDDIT_SECRET")
webhook = int(environ.get("REDDIT_WEBHOOK", 635408384794951680))
# Default role combinations
MODERATION_ROLES = Roles.moderator, Roles.admin, Roles.owner
STAFF_ROLES = Roles.helpers, Roles.moderator, Roles.admin, Roles.owner
# Whitelisted channels
WHITELISTED_CHANNELS = (
Channels.bot,
Channels.community_bot_commands,
Channels.off_topic_0,
Channels.off_topic_1,
Channels.off_topic_2,
Channels.voice_chat_0,
Channels.voice_chat_1,
)
GIT_SHA = environ.get("GIT_SHA", "foobar")
# Bot replies
ERROR_REPLIES = [
"Please don't do that.",
"You have to stop.",
"Do you mind?",
"In the future, don't do that.",
"That was a mistake.",
"You blew it.",
"You're bad at computers.",
"Are you trying to kill me?",
"Noooooo!!",
"I can't believe you've done this",
]
NEGATIVE_REPLIES = [
"Noooooo!!",
"Nope.",
"I'm sorry Dave, I'm afraid I can't do that.",
"I don't think so.",
"Not gonna happen.",
"Out of the question.",
"Huh? No.",
"Nah.",
"Naw.",
"Not likely.",
"No way, José.",
"Not in a million years.",
"Fat chance.",
"Certainly not.",
"NEGATORY.",
"Nuh-uh.",
"Not in my house!",
]
POSITIVE_REPLIES = [
"Yep.",
"Absolutely!",
"Can do!",
"Affirmative!",
"Yeah okay.",
"Sure.",
"Sure thing!",
"You're the boss!",
"Okay.",
"No problem.",
"I got you.",
"Alright.",
"You got it!",
"ROGER THAT",
"Of course!",
"Aye aye, cap'n!",
"I'll allow it.",
]
| 28.799492 | 111 | 0.67216 | import dataclasses
import enum
import logging
from datetime import datetime
from os import environ
from typing import Dict, NamedTuple
__all__ = (
"AdventOfCode",
"Branding",
"Cats",
"Channels",
"Categories",
"Client",
"Colours",
"Emojis",
"Icons",
"Lovefest",
"Month",
"Roles",
"Tokens",
"Wolfram",
"Reddit",
"RedisConfig",
"RedirectOutput",
"MODERATION_ROLES",
"STAFF_ROLES",
"WHITELISTED_CHANNELS",
"ERROR_REPLIES",
"NEGATIVE_REPLIES",
"POSITIVE_REPLIES",
)
log = logging.getLogger(__name__)
@dataclasses.dataclass
class AdventOfCodeLeaderboard:
id: str
_session: str
join_code: str
# fallback session to make sure the commands still work.
use_fallback_session: bool = False
@property
def session(self) -> str:
if self.use_fallback_session:
log.info(f"Returning fallback cookie for board `{self.id}`.")
return AdventOfCode.fallback_session
return self._session
def _parse_aoc_leaderboard_env() -> Dict[str, AdventOfCodeLeaderboard]:
raw_leaderboards = environ.get("AOC_LEADERBOARDS", "")
if not raw_leaderboards:
return {}
leaderboards = {}
for leaderboard in raw_leaderboards.split("::"):
leaderboard_id, session, join_code = leaderboard.split(",")
leaderboards[leaderboard_id] = AdventOfCodeLeaderboard(leaderboard_id, session, join_code)
return leaderboards
class AdventOfCode:
# Information for the several leaderboards we have
leaderboards = _parse_aoc_leaderboard_env()
staff_leaderboard_id = environ.get("AOC_STAFF_LEADERBOARD_ID", "")
fallback_session = environ.get("AOC_FALLBACK_SESSION", "")
# Other Advent of Code constants
ignored_days = environ.get("AOC_IGNORED_DAYS", "").split(",")
leaderboard_displayed_members = 10
leaderboard_cache_expiry_seconds = 1800
year = int(environ.get("AOC_YEAR", datetime.utcnow().year))
role_id = int(environ.get("AOC_ROLE_ID", 518565788744024082))
class Branding:
cycle_frequency = int(environ.get("CYCLE_FREQUENCY", 3)) # 0: never, 1: every day, 2: every other day, ...
class Cats:
cats = ["ᓚᘏᗢ", "ᘡᘏᗢ", "🐈", "ᓕᘏᗢ", "ᓇᘏᗢ", "ᓂᘏᗢ", "ᘣᘏᗢ", "ᕦᘏᗢ", "ᕂᘏᗢ"]
class Channels(NamedTuple):
advent_of_code = int(environ.get("AOC_CHANNEL_ID", 782715290437943306))
advent_of_code_commands = int(environ.get("AOC_COMMANDS_CHANNEL_ID", 607247579608121354))
bot = 267659945086812160
organisation = 551789653284356126
devlog = int(environ.get("CHANNEL_DEVLOG", 622895325144940554))
dev_contrib = 635950537262759947
mod_meta = 775412552795947058
mod_tools = 775413915391098921
off_topic_0 = 291284109232308226
off_topic_1 = 463035241142026251
off_topic_2 = 463035268514185226
community_bot_commands = int(environ.get("CHANNEL_COMMUNITY_BOT_COMMANDS", 607247579608121354))
hacktoberfest_2020 = 760857070781071431
voice_chat_0 = 412357430186344448
voice_chat_1 = 799647045886541885
staff_voice = 541638762007101470
reddit = int(environ.get("CHANNEL_REDDIT", 458224812528238616))
class Categories(NamedTuple):
help_in_use = 696958401460043776
development = 411199786025484308
devprojects = 787641585624940544
media = 799054581991997460
staff = 364918151625965579
class Client(NamedTuple):
name = "Sir Lancebot"
guild = int(environ.get("BOT_GUILD", 267624335836053506))
prefix = environ.get("PREFIX", ".")
token = environ.get("BOT_TOKEN")
sentry_dsn = environ.get("BOT_SENTRY_DSN")
debug = environ.get("BOT_DEBUG", "").lower() == "true"
github_bot_repo = "https://github.com/python-discord/sir-lancebot"
# Override seasonal locks: 1 (January) to 12 (December)
month_override = int(environ["MONTH_OVERRIDE"]) if "MONTH_OVERRIDE" in environ else None
class Colours:
blue = 0x0279FD
bright_green = 0x01D277
dark_green = 0x1F8B4C
orange = 0xE67E22
pink = 0xCF84E0
purple = 0xB734EB
soft_green = 0x68C290
soft_orange = 0xF9CB54
soft_red = 0xCD6D6D
yellow = 0xF9F586
python_blue = 0x4B8BBE
python_yellow = 0xFFD43B
grass_green = 0x66FF00
gold = 0xE6C200
easter_like_colours = [
(255, 247, 0),
(255, 255, 224),
(0, 255, 127),
(189, 252, 201),
(255, 192, 203),
(255, 160, 122),
(181, 115, 220),
(221, 160, 221),
(200, 162, 200),
(238, 130, 238),
(135, 206, 235),
(0, 204, 204),
(64, 224, 208),
]
class Emojis:
cross_mark = "\u274C"
star = "\u2B50"
christmas_tree = "\U0001F384"
check = "\u2611"
envelope = "\U0001F4E8"
trashcan = environ.get("TRASHCAN_EMOJI", "<:trashcan:637136429717389331>")
ok_hand = ":ok_hand:"
hand_raised = "\U0001F64B"
dice_1 = "<:dice_1:755891608859443290>"
dice_2 = "<:dice_2:755891608741740635>"
dice_3 = "<:dice_3:755891608251138158>"
dice_4 = "<:dice_4:755891607882039327>"
dice_5 = "<:dice_5:755891608091885627>"
dice_6 = "<:dice_6:755891607680843838>"
issue = "<:IssueOpen:629695470327037963>"
issue_closed = "<:IssueClosed:629695470570307614>"
pull_request = "<:PROpen:629695470175780875>"
pull_request_closed = "<:PRClosed:629695470519713818>"
pull_request_draft = "<:PRDraft:829755345425399848>"
merge = "<:PRMerged:629695470570176522>"
number_emojis = {
1: "\u0031\ufe0f\u20e3",
2: "\u0032\ufe0f\u20e3",
3: "\u0033\ufe0f\u20e3",
4: "\u0034\ufe0f\u20e3",
5: "\u0035\ufe0f\u20e3",
6: "\u0036\ufe0f\u20e3",
7: "\u0037\ufe0f\u20e3",
8: "\u0038\ufe0f\u20e3",
9: "\u0039\ufe0f\u20e3"
}
confirmation = "\u2705"
decline = "\u274c"
incident_unactioned = "<:incident_unactioned:719645583245180960>"
x = "\U0001f1fd"
o = "\U0001f1f4"
x_square = "<:x_square:632278427260682281>"
o_square = "<:o_square:632278452413661214>"
status_online = "<:status_online:470326272351010816>"
status_idle = "<:status_idle:470326266625785866>"
status_dnd = "<:status_dnd:470326272082313216>"
status_offline = "<:status_offline:470326266537705472>"
# Reddit emojis
reddit = "<:reddit:676030265734332427>"
reddit_post_text = "<:reddit_post_text:676030265910493204>"
reddit_post_video = "<:reddit_post_video:676030265839190047>"
reddit_post_photo = "<:reddit_post_photo:676030265734201344>"
reddit_upvote = "<:reddit_upvote:755845219890757644>"
reddit_comments = "<:reddit_comments:755845255001014384>"
reddit_users = "<:reddit_users:755845303822974997>"
class Icons:
questionmark = "https://cdn.discordapp.com/emojis/512367613339369475.png"
bookmark = (
"https://images-ext-2.discordapp.net/external/zl4oDwcmxUILY7sD9ZWE2fU5R7n6QcxEmPYSE5eddbg/"
"%3Fv%3D1/https/cdn.discordapp.com/emojis/654080405988966419.png?width=20&height=20"
)
class Lovefest:
role_id = int(environ.get("LOVEFEST_ROLE_ID", 542431903886606399))
class Month(enum.IntEnum):
JANUARY = 1
FEBRUARY = 2
MARCH = 3
APRIL = 4
MAY = 5
JUNE = 6
JULY = 7
AUGUST = 8
SEPTEMBER = 9
OCTOBER = 10
NOVEMBER = 11
DECEMBER = 12
def __str__(self) -> str:
return self.name.title()
# If a month override was configured, check that it's a valid Month
if Client.month_override is not None:
Month(Client.month_override)
class Roles(NamedTuple):
admin = int(environ.get("BOT_ADMIN_ROLE_ID", 267628507062992896))
moderator = 267629731250176001
owner = 267627879762755584
helpers = int(environ.get("ROLE_HELPERS", 267630620367257601))
core_developers = 587606783669829632
class Tokens(NamedTuple):
giphy = environ.get("GIPHY_TOKEN")
aoc_session_cookie = environ.get("AOC_SESSION_COOKIE")
omdb = environ.get("OMDB_API_KEY")
youtube = environ.get("YOUTUBE_API_KEY")
tmdb = environ.get("TMDB_API_KEY")
nasa = environ.get("NASA_API_KEY")
igdb_client_id = environ.get("IGDB_CLIENT_ID")
igdb_client_secret = environ.get("IGDB_CLIENT_SECRET")
github = environ.get("GITHUB_TOKEN")
unsplash_access_key = environ.get("UNSPLASH_KEY")
class Wolfram(NamedTuple):
user_limit_day = int(environ.get("WOLFRAM_USER_LIMIT_DAY", 10))
guild_limit_day = int(environ.get("WOLFRAM_GUILD_LIMIT_DAY", 67))
key = environ.get("WOLFRAM_API_KEY")
class RedisConfig(NamedTuple):
host = environ.get("REDIS_HOST", "redis.default.svc.cluster.local")
port = environ.get("REDIS_PORT", 6379)
password = environ.get("REDIS_PASSWORD")
use_fakeredis = environ.get("USE_FAKEREDIS", "false").lower() == "true"
class Source:
github = "https://github.com/python-discord/sir-lancebot"
github_avatar_url = "https://avatars1.githubusercontent.com/u/9919"
class RedirectOutput:
delete_delay: int = 10
class Reddit:
subreddits = ["r/Python"]
client_id = environ.get("REDDIT_CLIENT_ID")
secret = environ.get("REDDIT_SECRET")
webhook = int(environ.get("REDDIT_WEBHOOK", 635408384794951680))
MODERATION_ROLES = Roles.moderator, Roles.admin, Roles.owner
STAFF_ROLES = Roles.helpers, Roles.moderator, Roles.admin, Roles.owner
WHITELISTED_CHANNELS = (
Channels.bot,
Channels.community_bot_commands,
Channels.off_topic_0,
Channels.off_topic_1,
Channels.off_topic_2,
Channels.voice_chat_0,
Channels.voice_chat_1,
)
GIT_SHA = environ.get("GIT_SHA", "foobar")
ERROR_REPLIES = [
"Please don't do that.",
"You have to stop.",
"Do you mind?",
"In the future, don't do that.",
"That was a mistake.",
"You blew it.",
"You're bad at computers.",
"Are you trying to kill me?",
"Noooooo!!",
"I can't believe you've done this",
]
NEGATIVE_REPLIES = [
"Noooooo!!",
"Nope.",
"I'm sorry Dave, I'm afraid I can't do that.",
"I don't think so.",
"Not gonna happen.",
"Out of the question.",
"Huh? No.",
"Nah.",
"Naw.",
"Not likely.",
"No way, José.",
"Not in a million years.",
"Fat chance.",
"Certainly not.",
"NEGATORY.",
"Nuh-uh.",
"Not in my house!",
]
POSITIVE_REPLIES = [
"Yep.",
"Absolutely!",
"Can do!",
"Affirmative!",
"Yeah okay.",
"Sure.",
"Sure thing!",
"You're the boss!",
"Okay.",
"No problem.",
"I got you.",
"Alright.",
"You got it!",
"ROGER THAT",
"Of course!",
"Aye aye, cap'n!",
"I'll allow it.",
]
| true | true |
f7fe21e0482fd7a77646c12d7c6b49b43588b691 | 21,732 | py | Python | tests/test_modeling_gpt2.py | katarinaslama/transformers-1 | a5a8eeb772b185b0746f3ce9be6ae43181d2ca71 | [
"Apache-2.0"
] | 12 | 2021-06-05T03:51:23.000Z | 2022-03-05T05:09:41.000Z | tests/test_modeling_gpt2.py | katarinaslama/transformers-1 | a5a8eeb772b185b0746f3ce9be6ae43181d2ca71 | [
"Apache-2.0"
] | 1 | 2021-10-20T02:25:36.000Z | 2021-10-20T02:25:36.000Z | tests/test_modeling_gpt2.py | katarinaslama/transformers-1 | a5a8eeb772b185b0746f3ce9be6ae43181d2ca71 | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# Copyright 2018 The Google AI Language Team Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from transformers import is_torch_available
from transformers.testing_utils import require_torch, slow, torch_device
from .test_configuration_common import ConfigTester
from .test_modeling_common import ModelTesterMixin, floats_tensor, ids_tensor, random_attention_mask
if is_torch_available():
import torch
from transformers import (
GPT2_PRETRAINED_MODEL_ARCHIVE_LIST,
GPT2Config,
GPT2DoubleHeadsModel,
GPT2ForSequenceClassification,
GPT2LMHeadModel,
GPT2Model,
GPT2Tokenizer,
)
class GPT2ModelTester:
def __init__(
self,
parent,
batch_size=14,
seq_length=7,
is_training=True,
use_token_type_ids=True,
use_input_mask=True,
use_labels=True,
use_mc_token_ids=True,
vocab_size=99,
hidden_size=32,
num_hidden_layers=5,
num_attention_heads=4,
intermediate_size=37,
hidden_act="gelu",
hidden_dropout_prob=0.1,
attention_probs_dropout_prob=0.1,
max_position_embeddings=512,
type_vocab_size=16,
type_sequence_label_size=2,
initializer_range=0.02,
num_labels=3,
num_choices=4,
scope=None,
):
self.parent = parent
self.batch_size = batch_size
self.seq_length = seq_length
self.is_training = is_training
self.use_token_type_ids = use_token_type_ids
self.use_input_mask = use_input_mask
self.use_labels = use_labels
self.use_mc_token_ids = use_mc_token_ids
self.vocab_size = vocab_size
self.hidden_size = hidden_size
self.num_hidden_layers = num_hidden_layers
self.num_attention_heads = num_attention_heads
self.intermediate_size = intermediate_size
self.hidden_act = hidden_act
self.hidden_dropout_prob = hidden_dropout_prob
self.attention_probs_dropout_prob = attention_probs_dropout_prob
self.max_position_embeddings = max_position_embeddings
self.type_vocab_size = type_vocab_size
self.type_sequence_label_size = type_sequence_label_size
self.initializer_range = initializer_range
self.num_labels = num_labels
self.num_choices = num_choices
self.scope = None
self.bos_token_id = vocab_size - 1
self.eos_token_id = vocab_size - 1
self.pad_token_id = vocab_size - 1
def prepare_config_and_inputs(self, gradient_checkpointing=False):
input_ids = ids_tensor([self.batch_size, self.seq_length], self.vocab_size)
input_mask = None
if self.use_input_mask:
input_mask = random_attention_mask([self.batch_size, self.seq_length])
token_type_ids = None
if self.use_token_type_ids:
token_type_ids = ids_tensor([self.batch_size, self.seq_length], self.type_vocab_size)
mc_token_ids = None
if self.use_mc_token_ids:
mc_token_ids = ids_tensor([self.batch_size, self.num_choices], self.seq_length)
sequence_labels = None
token_labels = None
choice_labels = None
if self.use_labels:
sequence_labels = ids_tensor([self.batch_size], self.type_sequence_label_size)
token_labels = ids_tensor([self.batch_size, self.seq_length], self.num_labels)
choice_labels = ids_tensor([self.batch_size], self.num_choices)
config = GPT2Config(
vocab_size=self.vocab_size,
n_embd=self.hidden_size,
n_layer=self.num_hidden_layers,
n_head=self.num_attention_heads,
# intermediate_size=self.intermediate_size,
# hidden_act=self.hidden_act,
# hidden_dropout_prob=self.hidden_dropout_prob,
# attention_probs_dropout_prob=self.attention_probs_dropout_prob,
n_positions=self.max_position_embeddings,
n_ctx=self.max_position_embeddings,
# type_vocab_size=self.type_vocab_size,
# initializer_range=self.initializer_range,
bos_token_id=self.bos_token_id,
eos_token_id=self.eos_token_id,
pad_token_id=self.pad_token_id,
return_dict=True,
gradient_checkpointing=gradient_checkpointing,
)
head_mask = ids_tensor([self.num_hidden_layers, self.num_attention_heads], 2)
return (
config,
input_ids,
input_mask,
head_mask,
token_type_ids,
mc_token_ids,
sequence_labels,
token_labels,
choice_labels,
)
def prepare_config_and_inputs_for_decoder(self):
(
config,
input_ids,
input_mask,
head_mask,
token_type_ids,
mc_token_ids,
sequence_labels,
token_labels,
choice_labels,
) = self.prepare_config_and_inputs()
encoder_hidden_states = floats_tensor([self.batch_size, self.seq_length, self.hidden_size])
encoder_attention_mask = ids_tensor([self.batch_size, self.seq_length], vocab_size=2)
return (
config,
input_ids,
input_mask,
head_mask,
token_type_ids,
sequence_labels,
token_labels,
choice_labels,
encoder_hidden_states,
encoder_attention_mask,
)
def create_and_check_gpt2_model(self, config, input_ids, input_mask, head_mask, token_type_ids, *args):
model = GPT2Model(config=config)
model.to(torch_device)
model.eval()
result = model(input_ids, token_type_ids=token_type_ids, head_mask=head_mask)
result = model(input_ids, token_type_ids=token_type_ids)
result = model(input_ids)
self.parent.assertEqual(result.last_hidden_state.shape, (self.batch_size, self.seq_length, self.hidden_size))
self.parent.assertEqual(len(result.past_key_values), config.n_layer)
def create_and_check_gpt2_model_past(self, config, input_ids, input_mask, head_mask, token_type_ids, *args):
model = GPT2Model(config=config)
model.to(torch_device)
model.eval()
# first forward pass
outputs = model(input_ids, token_type_ids=token_type_ids, use_cache=True)
outputs_use_cache_conf = model(input_ids, token_type_ids=token_type_ids)
outputs_no_past = model(input_ids, token_type_ids=token_type_ids, use_cache=False)
self.parent.assertTrue(len(outputs) == len(outputs_use_cache_conf))
self.parent.assertTrue(len(outputs) == len(outputs_no_past) + 1)
output, past = outputs.to_tuple()
# create hypothetical next token and extent to next_input_ids
next_tokens = ids_tensor((self.batch_size, 1), config.vocab_size)
next_token_types = ids_tensor([self.batch_size, 1], self.type_vocab_size)
# append to next input_ids and token_type_ids
next_input_ids = torch.cat([input_ids, next_tokens], dim=-1)
next_token_type_ids = torch.cat([token_type_ids, next_token_types], dim=-1)
output_from_no_past = model(next_input_ids, token_type_ids=next_token_type_ids)["last_hidden_state"]
output_from_past = model(next_tokens, token_type_ids=next_token_types, past=past)["last_hidden_state"]
# select random slice
random_slice_idx = ids_tensor((1,), output_from_past.shape[-1]).item()
output_from_no_past_slice = output_from_no_past[:, -1, random_slice_idx].detach()
output_from_past_slice = output_from_past[:, 0, random_slice_idx].detach()
# test that outputs are equal for slice
self.parent.assertTrue(torch.allclose(output_from_past_slice, output_from_no_past_slice, atol=1e-3))
def create_and_check_gpt2_model_attention_mask_past(
self, config, input_ids, input_mask, head_mask, token_type_ids, *args
):
model = GPT2Model(config=config)
model.to(torch_device)
model.eval()
# create attention mask
attn_mask = torch.ones(input_ids.shape, dtype=torch.long, device=torch_device)
half_seq_length = self.seq_length // 2
attn_mask[:, half_seq_length:] = 0
# first forward pass
output, past = model(input_ids, attention_mask=attn_mask).to_tuple()
# create hypothetical next token and extent to next_input_ids
next_tokens = ids_tensor((self.batch_size, 1), config.vocab_size)
# change a random masked slice from input_ids
random_seq_idx_to_change = ids_tensor((1,), half_seq_length).item() + 1
random_other_next_tokens = ids_tensor((self.batch_size, 1), config.vocab_size).squeeze(-1)
input_ids[:, -random_seq_idx_to_change] = random_other_next_tokens
# append to next input_ids and attn_mask
next_input_ids = torch.cat([input_ids, next_tokens], dim=-1)
attn_mask = torch.cat(
[attn_mask, torch.ones((attn_mask.shape[0], 1), dtype=torch.long, device=torch_device)],
dim=1,
)
# get two different outputs
output_from_no_past = model(next_input_ids, attention_mask=attn_mask)["last_hidden_state"]
output_from_past = model(next_tokens, past=past, attention_mask=attn_mask)["last_hidden_state"]
# select random slice
random_slice_idx = ids_tensor((1,), output_from_past.shape[-1]).item()
output_from_no_past_slice = output_from_no_past[:, -1, random_slice_idx].detach()
output_from_past_slice = output_from_past[:, 0, random_slice_idx].detach()
# test that outputs are equal for slice
self.parent.assertTrue(torch.allclose(output_from_past_slice, output_from_no_past_slice, atol=1e-3))
def create_and_check_gpt2_model_past_large_inputs(
self, config, input_ids, input_mask, head_mask, token_type_ids, *args
):
model = GPT2Model(config=config)
model.to(torch_device)
model.eval()
# first forward pass
outputs = model(input_ids, token_type_ids=token_type_ids, use_cache=True)
output, past = outputs.to_tuple()
# create hypothetical next token and extent to next_input_ids
next_tokens = ids_tensor((self.batch_size, 3), config.vocab_size)
next_token_types = ids_tensor([self.batch_size, 3], self.type_vocab_size)
# append to next input_ids and token_type_ids
next_input_ids = torch.cat([input_ids, next_tokens], dim=-1)
next_token_type_ids = torch.cat([token_type_ids, next_token_types], dim=-1)
output_from_no_past = model(next_input_ids, token_type_ids=next_token_type_ids)["last_hidden_state"]
output_from_past = model(next_tokens, token_type_ids=next_token_types, past=past)["last_hidden_state"]
self.parent.assertTrue(output_from_past.shape[1] == next_tokens.shape[1])
# select random slice
random_slice_idx = ids_tensor((1,), output_from_past.shape[-1]).item()
output_from_no_past_slice = output_from_no_past[:, -3:, random_slice_idx].detach()
output_from_past_slice = output_from_past[:, :, random_slice_idx].detach()
# test that outputs are equal for slice
self.parent.assertTrue(torch.allclose(output_from_past_slice, output_from_no_past_slice, atol=1e-3))
def create_and_check_lm_head_model(self, config, input_ids, input_mask, head_mask, token_type_ids, *args):
model = GPT2LMHeadModel(config)
model.to(torch_device)
model.eval()
result = model(input_ids, token_type_ids=token_type_ids, labels=input_ids)
self.parent.assertEqual(result.loss.shape, ())
self.parent.assertEqual(result.logits.shape, (self.batch_size, self.seq_length, self.vocab_size))
def create_and_check_forward_and_backwards(self, config, input_ids, input_mask, head_mask, token_type_ids, *args):
model = GPT2LMHeadModel(config)
model.to(torch_device)
result = model(input_ids, token_type_ids=token_type_ids, labels=input_ids)
self.parent.assertEqual(result.loss.shape, ())
self.parent.assertEqual(result.logits.shape, (self.batch_size, self.seq_length, self.vocab_size))
result.loss.backward()
def create_and_check_double_lm_head_model(
self, config, input_ids, input_mask, head_mask, token_type_ids, mc_token_ids, *args
):
model = GPT2DoubleHeadsModel(config)
model.to(torch_device)
model.eval()
multiple_choice_inputs_ids = input_ids.unsqueeze(1).expand(-1, self.num_choices, -1).contiguous()
multiple_choice_input_mask = input_mask.unsqueeze(1).expand(-1, self.num_choices, -1).contiguous()
multiple_choice_token_type_ids = token_type_ids.unsqueeze(1).expand(-1, self.num_choices, -1).contiguous()
inputs = {
"input_ids": multiple_choice_inputs_ids,
"mc_token_ids": mc_token_ids,
"attention_mask": multiple_choice_input_mask,
"token_type_ids": multiple_choice_token_type_ids,
"labels": multiple_choice_inputs_ids,
}
result = model(**inputs)
self.parent.assertEqual(result.loss.shape, ())
self.parent.assertEqual(
result.logits.shape, (self.batch_size, self.num_choices, self.seq_length, self.vocab_size)
)
self.parent.assertEqual(result.mc_logits.shape, (self.batch_size, self.num_choices))
def create_and_check_gpt2_for_sequence_classification(
self, config, input_ids, input_mask, head_mask, token_type_ids, mc_token_ids, sequence_labels, *args
):
config.num_labels = self.num_labels
model = GPT2ForSequenceClassification(config)
model.to(torch_device)
model.eval()
print(config.num_labels, sequence_labels.size())
result = model(input_ids, attention_mask=input_mask, token_type_ids=token_type_ids, labels=sequence_labels)
self.parent.assertEqual(result.logits.shape, (self.batch_size, self.num_labels))
def prepare_config_and_inputs_for_common(self):
config_and_inputs = self.prepare_config_and_inputs()
(
config,
input_ids,
input_mask,
head_mask,
token_type_ids,
mc_token_ids,
sequence_labels,
token_labels,
choice_labels,
) = config_and_inputs
inputs_dict = {
"input_ids": input_ids,
"token_type_ids": token_type_ids,
"head_mask": head_mask,
}
return config, inputs_dict
@require_torch
class GPT2ModelTest(ModelTesterMixin, unittest.TestCase):
all_model_classes = (
(GPT2Model, GPT2LMHeadModel, GPT2DoubleHeadsModel, GPT2ForSequenceClassification)
if is_torch_available()
else ()
)
all_generative_model_classes = (GPT2LMHeadModel, GPT2DoubleHeadsModel) if is_torch_available() else ()
test_missing_keys = False
def setUp(self):
self.model_tester = GPT2ModelTester(self)
self.config_tester = ConfigTester(self, config_class=GPT2Config, n_embd=37)
def test_config(self):
self.config_tester.run_common_tests()
def test_gpt2_model(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_gpt2_model(*config_and_inputs)
def test_gpt2_model_past(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_gpt2_model_past(*config_and_inputs)
def test_gpt2_model_att_mask_past(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_gpt2_model_attention_mask_past(*config_and_inputs)
def test_gpt2_model_past_large_inputs(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_gpt2_model_past_large_inputs(*config_and_inputs)
def test_gpt2_lm_head_model(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_lm_head_model(*config_and_inputs)
def test_gpt2_double_lm_head_model(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_double_lm_head_model(*config_and_inputs)
def test_gpt2_sequence_classification_model(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_gpt2_for_sequence_classification(*config_and_inputs)
def test_gpt2_gradient_checkpointing(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs(gradient_checkpointing=True)
self.model_tester.create_and_check_forward_and_backwards(*config_and_inputs)
@slow
def test_batch_generation(self):
model = GPT2LMHeadModel.from_pretrained("gpt2")
model.to(torch_device)
tokenizer = GPT2Tokenizer.from_pretrained("gpt2")
tokenizer.padding_side = "left"
# Define PAD Token = EOS Token = 50256
tokenizer.pad_token = tokenizer.eos_token
model.config.pad_token_id = model.config.eos_token_id
# use different length sentences to test batching
sentences = [
"Hello, my dog is a little",
"Today, I",
]
inputs = tokenizer(sentences, return_tensors="pt", padding=True)
torch.manual_seed(0)
outputs = model.generate(
input_ids=inputs["input_ids"].to(torch_device),
attention_mask=inputs["attention_mask"].to(torch_device),
)
inputs_non_padded = tokenizer(sentences[0], return_tensors="pt").input_ids.to(torch_device)
output_non_padded = model.generate(input_ids=inputs_non_padded)
num_paddings = inputs_non_padded.shape[-1] - inputs["attention_mask"][-1].long().sum().cpu().item()
inputs_padded = tokenizer(sentences[1], return_tensors="pt").input_ids.to(torch_device)
output_padded = model.generate(input_ids=inputs_padded, max_length=model.config.max_length - num_paddings)
batch_out_sentence = tokenizer.batch_decode(outputs, skip_special_tokens=True)
non_padded_sentence = tokenizer.decode(output_non_padded[0], skip_special_tokens=True)
padded_sentence = tokenizer.decode(output_padded[0], skip_special_tokens=True)
expected_output_sentence = [
"Hello, my dog is a little bit of a mess. I'm not sure if he's going",
"Today, I'm going to be doing a lot of research on this. I",
]
self.assertListEqual(expected_output_sentence, batch_out_sentence)
self.assertListEqual(expected_output_sentence, [non_padded_sentence, padded_sentence])
@slow
def test_model_from_pretrained(self):
for model_name in GPT2_PRETRAINED_MODEL_ARCHIVE_LIST[:1]:
model = GPT2Model.from_pretrained(model_name)
self.assertIsNotNone(model)
@require_torch
class GPT2ModelLanguageGenerationTest(unittest.TestCase):
@slow
def test_lm_generate_gpt2(self):
for checkpointing in [True, False]:
model = GPT2LMHeadModel.from_pretrained("gpt2", gradient_checkpointing=checkpointing)
model.to(torch_device)
input_ids = torch.tensor([[464, 3290]], dtype=torch.long, device=torch_device) # The dog
expected_output_ids = [
464,
3290,
373,
1043,
287,
257,
2214,
1474,
262,
16246,
286,
2688,
290,
2688,
27262,
13,
198,
198,
464,
3290,
] # The dog was found in a field near the intersection of West and West Streets.\n\nThe dog
output_ids = model.generate(input_ids, do_sample=False)
self.assertListEqual(output_ids[0].tolist(), expected_output_ids)
@slow
def test_lm_generate_distilgpt2(self):
model = GPT2LMHeadModel.from_pretrained("distilgpt2")
model.to(torch_device)
input_ids = torch.tensor([[464, 1893]], dtype=torch.long, device=torch_device) # The president
expected_output_ids = [
464,
1893,
286,
262,
1578,
1829,
11,
290,
262,
1893,
286,
262,
1578,
7526,
11,
423,
587,
287,
262,
2635,
] # The president of the United States, and the president of the United Kingdom, have been in the White
output_ids = model.generate(input_ids, do_sample=False)
self.assertListEqual(output_ids[0].tolist(), expected_output_ids)
| 40.022099 | 118 | 0.675594 |
import unittest
from transformers import is_torch_available
from transformers.testing_utils import require_torch, slow, torch_device
from .test_configuration_common import ConfigTester
from .test_modeling_common import ModelTesterMixin, floats_tensor, ids_tensor, random_attention_mask
if is_torch_available():
import torch
from transformers import (
GPT2_PRETRAINED_MODEL_ARCHIVE_LIST,
GPT2Config,
GPT2DoubleHeadsModel,
GPT2ForSequenceClassification,
GPT2LMHeadModel,
GPT2Model,
GPT2Tokenizer,
)
class GPT2ModelTester:
def __init__(
self,
parent,
batch_size=14,
seq_length=7,
is_training=True,
use_token_type_ids=True,
use_input_mask=True,
use_labels=True,
use_mc_token_ids=True,
vocab_size=99,
hidden_size=32,
num_hidden_layers=5,
num_attention_heads=4,
intermediate_size=37,
hidden_act="gelu",
hidden_dropout_prob=0.1,
attention_probs_dropout_prob=0.1,
max_position_embeddings=512,
type_vocab_size=16,
type_sequence_label_size=2,
initializer_range=0.02,
num_labels=3,
num_choices=4,
scope=None,
):
self.parent = parent
self.batch_size = batch_size
self.seq_length = seq_length
self.is_training = is_training
self.use_token_type_ids = use_token_type_ids
self.use_input_mask = use_input_mask
self.use_labels = use_labels
self.use_mc_token_ids = use_mc_token_ids
self.vocab_size = vocab_size
self.hidden_size = hidden_size
self.num_hidden_layers = num_hidden_layers
self.num_attention_heads = num_attention_heads
self.intermediate_size = intermediate_size
self.hidden_act = hidden_act
self.hidden_dropout_prob = hidden_dropout_prob
self.attention_probs_dropout_prob = attention_probs_dropout_prob
self.max_position_embeddings = max_position_embeddings
self.type_vocab_size = type_vocab_size
self.type_sequence_label_size = type_sequence_label_size
self.initializer_range = initializer_range
self.num_labels = num_labels
self.num_choices = num_choices
self.scope = None
self.bos_token_id = vocab_size - 1
self.eos_token_id = vocab_size - 1
self.pad_token_id = vocab_size - 1
def prepare_config_and_inputs(self, gradient_checkpointing=False):
input_ids = ids_tensor([self.batch_size, self.seq_length], self.vocab_size)
input_mask = None
if self.use_input_mask:
input_mask = random_attention_mask([self.batch_size, self.seq_length])
token_type_ids = None
if self.use_token_type_ids:
token_type_ids = ids_tensor([self.batch_size, self.seq_length], self.type_vocab_size)
mc_token_ids = None
if self.use_mc_token_ids:
mc_token_ids = ids_tensor([self.batch_size, self.num_choices], self.seq_length)
sequence_labels = None
token_labels = None
choice_labels = None
if self.use_labels:
sequence_labels = ids_tensor([self.batch_size], self.type_sequence_label_size)
token_labels = ids_tensor([self.batch_size, self.seq_length], self.num_labels)
choice_labels = ids_tensor([self.batch_size], self.num_choices)
config = GPT2Config(
vocab_size=self.vocab_size,
n_embd=self.hidden_size,
n_layer=self.num_hidden_layers,
n_head=self.num_attention_heads,
n_positions=self.max_position_embeddings,
n_ctx=self.max_position_embeddings,
bos_token_id=self.bos_token_id,
eos_token_id=self.eos_token_id,
pad_token_id=self.pad_token_id,
return_dict=True,
gradient_checkpointing=gradient_checkpointing,
)
head_mask = ids_tensor([self.num_hidden_layers, self.num_attention_heads], 2)
return (
config,
input_ids,
input_mask,
head_mask,
token_type_ids,
mc_token_ids,
sequence_labels,
token_labels,
choice_labels,
)
def prepare_config_and_inputs_for_decoder(self):
(
config,
input_ids,
input_mask,
head_mask,
token_type_ids,
mc_token_ids,
sequence_labels,
token_labels,
choice_labels,
) = self.prepare_config_and_inputs()
encoder_hidden_states = floats_tensor([self.batch_size, self.seq_length, self.hidden_size])
encoder_attention_mask = ids_tensor([self.batch_size, self.seq_length], vocab_size=2)
return (
config,
input_ids,
input_mask,
head_mask,
token_type_ids,
sequence_labels,
token_labels,
choice_labels,
encoder_hidden_states,
encoder_attention_mask,
)
def create_and_check_gpt2_model(self, config, input_ids, input_mask, head_mask, token_type_ids, *args):
model = GPT2Model(config=config)
model.to(torch_device)
model.eval()
result = model(input_ids, token_type_ids=token_type_ids, head_mask=head_mask)
result = model(input_ids, token_type_ids=token_type_ids)
result = model(input_ids)
self.parent.assertEqual(result.last_hidden_state.shape, (self.batch_size, self.seq_length, self.hidden_size))
self.parent.assertEqual(len(result.past_key_values), config.n_layer)
def create_and_check_gpt2_model_past(self, config, input_ids, input_mask, head_mask, token_type_ids, *args):
model = GPT2Model(config=config)
model.to(torch_device)
model.eval()
outputs = model(input_ids, token_type_ids=token_type_ids, use_cache=True)
outputs_use_cache_conf = model(input_ids, token_type_ids=token_type_ids)
outputs_no_past = model(input_ids, token_type_ids=token_type_ids, use_cache=False)
self.parent.assertTrue(len(outputs) == len(outputs_use_cache_conf))
self.parent.assertTrue(len(outputs) == len(outputs_no_past) + 1)
output, past = outputs.to_tuple()
next_tokens = ids_tensor((self.batch_size, 1), config.vocab_size)
next_token_types = ids_tensor([self.batch_size, 1], self.type_vocab_size)
next_input_ids = torch.cat([input_ids, next_tokens], dim=-1)
next_token_type_ids = torch.cat([token_type_ids, next_token_types], dim=-1)
output_from_no_past = model(next_input_ids, token_type_ids=next_token_type_ids)["last_hidden_state"]
output_from_past = model(next_tokens, token_type_ids=next_token_types, past=past)["last_hidden_state"]
random_slice_idx = ids_tensor((1,), output_from_past.shape[-1]).item()
output_from_no_past_slice = output_from_no_past[:, -1, random_slice_idx].detach()
output_from_past_slice = output_from_past[:, 0, random_slice_idx].detach()
self.parent.assertTrue(torch.allclose(output_from_past_slice, output_from_no_past_slice, atol=1e-3))
def create_and_check_gpt2_model_attention_mask_past(
self, config, input_ids, input_mask, head_mask, token_type_ids, *args
):
model = GPT2Model(config=config)
model.to(torch_device)
model.eval()
attn_mask = torch.ones(input_ids.shape, dtype=torch.long, device=torch_device)
half_seq_length = self.seq_length // 2
attn_mask[:, half_seq_length:] = 0
output, past = model(input_ids, attention_mask=attn_mask).to_tuple()
next_tokens = ids_tensor((self.batch_size, 1), config.vocab_size)
random_seq_idx_to_change = ids_tensor((1,), half_seq_length).item() + 1
random_other_next_tokens = ids_tensor((self.batch_size, 1), config.vocab_size).squeeze(-1)
input_ids[:, -random_seq_idx_to_change] = random_other_next_tokens
next_input_ids = torch.cat([input_ids, next_tokens], dim=-1)
attn_mask = torch.cat(
[attn_mask, torch.ones((attn_mask.shape[0], 1), dtype=torch.long, device=torch_device)],
dim=1,
)
output_from_no_past = model(next_input_ids, attention_mask=attn_mask)["last_hidden_state"]
output_from_past = model(next_tokens, past=past, attention_mask=attn_mask)["last_hidden_state"]
random_slice_idx = ids_tensor((1,), output_from_past.shape[-1]).item()
output_from_no_past_slice = output_from_no_past[:, -1, random_slice_idx].detach()
output_from_past_slice = output_from_past[:, 0, random_slice_idx].detach()
self.parent.assertTrue(torch.allclose(output_from_past_slice, output_from_no_past_slice, atol=1e-3))
def create_and_check_gpt2_model_past_large_inputs(
self, config, input_ids, input_mask, head_mask, token_type_ids, *args
):
model = GPT2Model(config=config)
model.to(torch_device)
model.eval()
outputs = model(input_ids, token_type_ids=token_type_ids, use_cache=True)
output, past = outputs.to_tuple()
next_tokens = ids_tensor((self.batch_size, 3), config.vocab_size)
next_token_types = ids_tensor([self.batch_size, 3], self.type_vocab_size)
next_input_ids = torch.cat([input_ids, next_tokens], dim=-1)
next_token_type_ids = torch.cat([token_type_ids, next_token_types], dim=-1)
output_from_no_past = model(next_input_ids, token_type_ids=next_token_type_ids)["last_hidden_state"]
output_from_past = model(next_tokens, token_type_ids=next_token_types, past=past)["last_hidden_state"]
self.parent.assertTrue(output_from_past.shape[1] == next_tokens.shape[1])
random_slice_idx = ids_tensor((1,), output_from_past.shape[-1]).item()
output_from_no_past_slice = output_from_no_past[:, -3:, random_slice_idx].detach()
output_from_past_slice = output_from_past[:, :, random_slice_idx].detach()
self.parent.assertTrue(torch.allclose(output_from_past_slice, output_from_no_past_slice, atol=1e-3))
def create_and_check_lm_head_model(self, config, input_ids, input_mask, head_mask, token_type_ids, *args):
model = GPT2LMHeadModel(config)
model.to(torch_device)
model.eval()
result = model(input_ids, token_type_ids=token_type_ids, labels=input_ids)
self.parent.assertEqual(result.loss.shape, ())
self.parent.assertEqual(result.logits.shape, (self.batch_size, self.seq_length, self.vocab_size))
def create_and_check_forward_and_backwards(self, config, input_ids, input_mask, head_mask, token_type_ids, *args):
model = GPT2LMHeadModel(config)
model.to(torch_device)
result = model(input_ids, token_type_ids=token_type_ids, labels=input_ids)
self.parent.assertEqual(result.loss.shape, ())
self.parent.assertEqual(result.logits.shape, (self.batch_size, self.seq_length, self.vocab_size))
result.loss.backward()
def create_and_check_double_lm_head_model(
self, config, input_ids, input_mask, head_mask, token_type_ids, mc_token_ids, *args
):
model = GPT2DoubleHeadsModel(config)
model.to(torch_device)
model.eval()
multiple_choice_inputs_ids = input_ids.unsqueeze(1).expand(-1, self.num_choices, -1).contiguous()
multiple_choice_input_mask = input_mask.unsqueeze(1).expand(-1, self.num_choices, -1).contiguous()
multiple_choice_token_type_ids = token_type_ids.unsqueeze(1).expand(-1, self.num_choices, -1).contiguous()
inputs = {
"input_ids": multiple_choice_inputs_ids,
"mc_token_ids": mc_token_ids,
"attention_mask": multiple_choice_input_mask,
"token_type_ids": multiple_choice_token_type_ids,
"labels": multiple_choice_inputs_ids,
}
result = model(**inputs)
self.parent.assertEqual(result.loss.shape, ())
self.parent.assertEqual(
result.logits.shape, (self.batch_size, self.num_choices, self.seq_length, self.vocab_size)
)
self.parent.assertEqual(result.mc_logits.shape, (self.batch_size, self.num_choices))
def create_and_check_gpt2_for_sequence_classification(
self, config, input_ids, input_mask, head_mask, token_type_ids, mc_token_ids, sequence_labels, *args
):
config.num_labels = self.num_labels
model = GPT2ForSequenceClassification(config)
model.to(torch_device)
model.eval()
print(config.num_labels, sequence_labels.size())
result = model(input_ids, attention_mask=input_mask, token_type_ids=token_type_ids, labels=sequence_labels)
self.parent.assertEqual(result.logits.shape, (self.batch_size, self.num_labels))
def prepare_config_and_inputs_for_common(self):
config_and_inputs = self.prepare_config_and_inputs()
(
config,
input_ids,
input_mask,
head_mask,
token_type_ids,
mc_token_ids,
sequence_labels,
token_labels,
choice_labels,
) = config_and_inputs
inputs_dict = {
"input_ids": input_ids,
"token_type_ids": token_type_ids,
"head_mask": head_mask,
}
return config, inputs_dict
@require_torch
class GPT2ModelTest(ModelTesterMixin, unittest.TestCase):
all_model_classes = (
(GPT2Model, GPT2LMHeadModel, GPT2DoubleHeadsModel, GPT2ForSequenceClassification)
if is_torch_available()
else ()
)
all_generative_model_classes = (GPT2LMHeadModel, GPT2DoubleHeadsModel) if is_torch_available() else ()
test_missing_keys = False
def setUp(self):
self.model_tester = GPT2ModelTester(self)
self.config_tester = ConfigTester(self, config_class=GPT2Config, n_embd=37)
def test_config(self):
self.config_tester.run_common_tests()
def test_gpt2_model(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_gpt2_model(*config_and_inputs)
def test_gpt2_model_past(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_gpt2_model_past(*config_and_inputs)
def test_gpt2_model_att_mask_past(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_gpt2_model_attention_mask_past(*config_and_inputs)
def test_gpt2_model_past_large_inputs(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_gpt2_model_past_large_inputs(*config_and_inputs)
def test_gpt2_lm_head_model(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_lm_head_model(*config_and_inputs)
def test_gpt2_double_lm_head_model(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_double_lm_head_model(*config_and_inputs)
def test_gpt2_sequence_classification_model(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_gpt2_for_sequence_classification(*config_and_inputs)
def test_gpt2_gradient_checkpointing(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs(gradient_checkpointing=True)
self.model_tester.create_and_check_forward_and_backwards(*config_and_inputs)
@slow
def test_batch_generation(self):
model = GPT2LMHeadModel.from_pretrained("gpt2")
model.to(torch_device)
tokenizer = GPT2Tokenizer.from_pretrained("gpt2")
tokenizer.padding_side = "left"
tokenizer.pad_token = tokenizer.eos_token
model.config.pad_token_id = model.config.eos_token_id
sentences = [
"Hello, my dog is a little",
"Today, I",
]
inputs = tokenizer(sentences, return_tensors="pt", padding=True)
torch.manual_seed(0)
outputs = model.generate(
input_ids=inputs["input_ids"].to(torch_device),
attention_mask=inputs["attention_mask"].to(torch_device),
)
inputs_non_padded = tokenizer(sentences[0], return_tensors="pt").input_ids.to(torch_device)
output_non_padded = model.generate(input_ids=inputs_non_padded)
num_paddings = inputs_non_padded.shape[-1] - inputs["attention_mask"][-1].long().sum().cpu().item()
inputs_padded = tokenizer(sentences[1], return_tensors="pt").input_ids.to(torch_device)
output_padded = model.generate(input_ids=inputs_padded, max_length=model.config.max_length - num_paddings)
batch_out_sentence = tokenizer.batch_decode(outputs, skip_special_tokens=True)
non_padded_sentence = tokenizer.decode(output_non_padded[0], skip_special_tokens=True)
padded_sentence = tokenizer.decode(output_padded[0], skip_special_tokens=True)
expected_output_sentence = [
"Hello, my dog is a little bit of a mess. I'm not sure if he's going",
"Today, I'm going to be doing a lot of research on this. I",
]
self.assertListEqual(expected_output_sentence, batch_out_sentence)
self.assertListEqual(expected_output_sentence, [non_padded_sentence, padded_sentence])
@slow
def test_model_from_pretrained(self):
for model_name in GPT2_PRETRAINED_MODEL_ARCHIVE_LIST[:1]:
model = GPT2Model.from_pretrained(model_name)
self.assertIsNotNone(model)
@require_torch
class GPT2ModelLanguageGenerationTest(unittest.TestCase):
@slow
def test_lm_generate_gpt2(self):
for checkpointing in [True, False]:
model = GPT2LMHeadModel.from_pretrained("gpt2", gradient_checkpointing=checkpointing)
model.to(torch_device)
input_ids = torch.tensor([[464, 3290]], dtype=torch.long, device=torch_device) # The dog
expected_output_ids = [
464,
3290,
373,
1043,
287,
257,
2214,
1474,
262,
16246,
286,
2688,
290,
2688,
27262,
13,
198,
198,
464,
3290,
] # The dog was found in a field near the intersection of West and West Streets.\n\nThe dog
output_ids = model.generate(input_ids, do_sample=False)
self.assertListEqual(output_ids[0].tolist(), expected_output_ids)
@slow
def test_lm_generate_distilgpt2(self):
model = GPT2LMHeadModel.from_pretrained("distilgpt2")
model.to(torch_device)
input_ids = torch.tensor([[464, 1893]], dtype=torch.long, device=torch_device) # The president
expected_output_ids = [
464,
1893,
286,
262,
1578,
1829,
11,
290,
262,
1893,
286,
262,
1578,
7526,
11,
423,
587,
287,
262,
2635,
] # The president of the United States, and the president of the United Kingdom, have been in the White
output_ids = model.generate(input_ids, do_sample=False)
self.assertListEqual(output_ids[0].tolist(), expected_output_ids)
| true | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.